diff --git a/.build/Gruntfile.js b/.build/Gruntfile.js index 371610f5d8..d838bc97c9 100644 --- a/.build/Gruntfile.js +++ b/.build/Gruntfile.js @@ -28,23 +28,19 @@ module.exports = function(grunt) { // eslint-disable-line xo/filename-case dependencies: { }, mainFiles: { - 'tablesorter': [ + tablesorter: [ 'dist/js/jquery.tablesorter.combined.js', 'dist/js/widgets/widget-columnSelector.min.js', - 'dist/js/widgets/widget-stickyHeaders.min.js', 'dist/css/theme.blue.min.css' ], - 'bootstrap': [ + bootstrap: [ 'dist/css/bootstrap.min.css', 'dist/js/bootstrap.min.js' ], - 'bootstrap-formhelpers': [ - 'dist/js/bootstrap-formhelpers.min.js' - ], - 'isotope': [ + isotope: [ 'dist/isotope.pkgd.min.js' ], - 'outlayer': [ + outlayer: [ 'item.js', 'outlayer.js' ] diff --git a/.build/bower.json b/.build/bower.json index 4e2b3fcfeb..48887d3cd2 100644 --- a/.build/bower.json +++ b/.build/bower.json @@ -10,24 +10,26 @@ ], "dependencies": { "jquery": "3.0.0", - "bootstrap": "3.3.7", - "underscore": "^1.8.3", - "bootstrap-hover-dropdown": "2.2.1", "jquery-ui": "1.12.1", + "bootstrap": "3.3.7", + "jqueryui-touch-punch": "*", + "jquery-form": "4.0.1", "jquery-tokeninput": "^1.7.0", + "jquery-confirm": "2.5.2", + "jquery-backstretch": "^2.1.15", "bootstrap3-typeahead": "4.0.2", - "jqueryui-touch-punch": "*", + "lodash": "4.17.4", + "underscore.string": "^3.3.4", + "loglevel": "1.4.1", + "timeago": "1.5.4", "pnotify": "2.1.0", "qtip2": "2.2.1", - "tablesorter": "jquery.tablesorter#2.24.5", - "jquery-confirm": "2.5.2", - "bootstrap-formhelpers": "2.3.0", + "tablesorter": "jquery.tablesorter#2.28.8", "isotope": "3.0.1", - "openSans": "https://google-fonts.azurewebsites.net/googleFonts/openSans?family=Open+Sans:400,300,300italic,400italic,600,600italic,700,700italic,800,800italic", - "jquery-backstretch-2": "^2.1.13" + "openSans": "https://google-fonts.azurewebsites.net/googleFonts/openSans?family=Open+Sans:400,300,300italic,400italic,600,600italic,700,700italic,800,800italic" }, "resolutions": { "bootstrap": "3.3.7", - "jquery": "2.1.4" + "jquery": "3.0.0" } } diff --git a/.build/yarn.lock b/.build/yarn.lock new file mode 100644 index 0000000000..d8ec364db9 --- /dev/null +++ b/.build/yarn.lock @@ -0,0 +1,5249 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +JSONStream@^0.8.4: + version "0.8.4" + resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-0.8.4.tgz#91657dfe6ff857483066132b4618b62e8f4887bd" + dependencies: + jsonparse "0.0.5" + through ">=2.2.7 <3" + +"JSV@>= 4.0.x": + version "4.0.2" + resolved "https://registry.yarnpkg.com/JSV/-/JSV-4.0.2.tgz#d077f6825571f82132f9dffaed587b4029feff57" + +abbrev@1, abbrev@^1.0.7: + version "1.1.0" + resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.0.tgz#d0554c2256636e2f56e7c2e5ad183f859428d81f" + +abbrev@~1.0.4: + version "1.0.9" + resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.0.9.tgz#91b4792588a7738c25f35dd6f63752a2f8776135" + +acorn-jsx@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-3.0.1.tgz#afdf9488fb1ecefc8348f6fb22f464e32a58b36b" + dependencies: + acorn "^3.0.4" + +acorn@4.0.4: + version "4.0.4" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-4.0.4.tgz#17a8d6a7a6c4ef538b814ec9abac2779293bf30a" + +acorn@^3.0.4, acorn@^3.1.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-3.3.0.tgz#45e37fb39e8da3f25baee3ff5369e2bb5f22017a" + +ajv-keywords@^1.0.0: + version "1.5.1" + resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-1.5.1.tgz#314dd0a4b3368fad3dfcdc54ede6171b886daf3c" + +ajv@^4.7.0, ajv@^4.9.1: + version "4.11.3" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-4.11.3.tgz#ce30bdb90d1254f762c75af915fb3a63e7183d22" + dependencies: + co "^4.6.0" + json-stable-stringify "^1.0.1" + +align-text@^0.1.1, align-text@^0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/align-text/-/align-text-0.1.4.tgz#0cd90a561093f35d0a99256c22b7069433fad117" + dependencies: + kind-of "^3.0.2" + longest "^1.0.1" + repeat-string "^1.5.2" + +amanda@^0.5.1: + version "0.5.1" + resolved "https://registry.yarnpkg.com/amanda/-/amanda-0.5.1.tgz#20033776c0e4b043e534a872e36eb6c6abe84103" + +amdefine@>=0.0.4: + version "1.0.1" + resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5" + +ansi-align@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/ansi-align/-/ansi-align-1.1.0.tgz#2f0c1658829739add5ebb15e6b0c6e3423f016ba" + dependencies: + string-width "^1.0.1" + +ansi-escapes@^1.1.0, ansi-escapes@^1.3.0, ansi-escapes@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-1.4.0.tgz#d3a8a83b319aa67793662b13e761c7911422306e" + +ansi-regex@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-0.1.0.tgz#55ca60db6900857c423ae9297980026f941ed903" + +ansi-regex@^0.2.0, ansi-regex@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-0.2.1.tgz#0d8e946967a3d8143f93e24e298525fc1b2235f9" + +ansi-regex@^1.0.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-1.1.1.tgz#41c847194646375e6a1a5d10c3ca054ef9fc980d" + +ansi-regex@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" + +ansi-styles@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-1.1.0.tgz#eaecbf66cd706882760b2f4691582b8f55d7a7de" + +ansi-styles@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" + +ansi-styles@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-1.0.0.tgz#cb102df1c56f5123eab8b67cd7b98027a0279178" + +ansicolors@^0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/ansicolors/-/ansicolors-0.3.2.tgz#665597de86a9ffe3aa9bfbe6cae5c6ea426b4979" + +api-blueprint-http-formatter@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/api-blueprint-http-formatter/-/api-blueprint-http-formatter-0.0.1.tgz#2a7eb3cf82dec17da3622fedb7ea0b2d3069c9b7" + +archy@0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/archy/-/archy-0.0.2.tgz#910f43bf66141fc335564597abc189df44b3d35e" + +archy@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/archy/-/archy-1.0.0.tgz#f9c8c13757cc1dd7bc379ac77b2c62a5c2868c40" + +argparse@^1.0.7: + version "1.0.9" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.9.tgz#73d83bc263f86e97f8cc4f6bae1b0e90a7d22c86" + dependencies: + sprintf-js "~1.0.2" + +"argparse@~ 0.1.11": + version "0.1.16" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-0.1.16.tgz#cfd01e0fbba3d6caed049fbd758d40f65196f57c" + dependencies: + underscore "~1.7.0" + underscore.string "~2.4.0" + +arr-diff@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-2.0.0.tgz#8f3b827f955a8bd669697e4a4256ac3ceae356cf" + dependencies: + arr-flatten "^1.0.1" + +arr-flatten@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.0.1.tgz#e5ffe54d45e19f32f216e91eb99c8ce892bb604b" + +array-differ@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/array-differ/-/array-differ-1.0.0.tgz#eff52e3758249d33be402b8bb8e564bb2b5d4031" + +array-filter@~0.0.0: + version "0.0.1" + resolved "https://registry.yarnpkg.com/array-filter/-/array-filter-0.0.1.tgz#7da8cf2e26628ed732803581fd21f67cacd2eeec" + +array-find-index@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1" + +array-map@~0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/array-map/-/array-map-0.0.0.tgz#88a2bab73d1cf7bcd5c1b118a003f66f665fa662" + +array-reduce@~0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/array-reduce/-/array-reduce-0.0.0.tgz#173899d3ffd1c7d9383e4479525dbe278cab5f2b" + +array-union@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" + dependencies: + array-uniq "^1.0.1" + +array-uniq@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" + +array-unique@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.2.1.tgz#a1d97ccafcbc2625cc70fadceb36a50c58b01a53" + +arrify@^1.0.0, arrify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" + +asap@~2.0.3: + version "2.0.5" + resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.5.tgz#522765b50c3510490e52d7dcfe085ef9ba96958f" + +asn1@0.1.11: + version "0.1.11" + resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.1.11.tgz#559be18376d08a4ec4dbe80877d27818639b2df7" + +asn1@~0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.3.tgz#dac8787713c9966849fc8180777ebe9c1ddf3b86" + +assert-plus@^0.1.5: + version "0.1.5" + resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-0.1.5.tgz#ee74009413002d84cec7219c6ac811812e723160" + +assert-plus@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-0.2.0.tgz#d74e1b87e7affc0db8aadb7021f3fe48101ab234" + +assert-plus@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" + +assertion-error@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/assertion-error/-/assertion-error-1.0.2.tgz#13ca515d86206da0bac66e834dd397d87581094c" + +async@^0.9.0, async@~0.9.0: + version "0.9.2" + resolved "https://registry.yarnpkg.com/async/-/async-0.9.2.tgz#aea74d5e61c1f899613bf64bda66d4c78f2fd17d" + +async@^1.5.2, async@~1.5, async@~1.5.2: + version "1.5.2" + resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" + +async@^2.0.0-rc.6, async@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/async/-/async-2.3.0.tgz#1013d1051047dd320fe24e494d5c66ecaf6147d9" + dependencies: + lodash "^4.14.0" + +async@~0.1.22: + version "0.1.22" + resolved "https://registry.yarnpkg.com/async/-/async-0.1.22.tgz#0fc1aaa088a0e3ef0ebe2d8831bab0dcf8845061" + +async@~0.2.6, async@~0.2.8: + version "0.2.10" + resolved "https://registry.yarnpkg.com/async/-/async-0.2.10.tgz#b6bbe0b0674b9d719708ca38de8c237cb526c3d1" + +async@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/async/-/async-1.0.0.tgz#f8fc04ca3a13784ade9e1641af98578cfbd647a9" + +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + +autoprefixer@^6.0.0: + version "6.7.3" + resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-6.7.3.tgz#bc2c28018e9a226f24f0ded36ce81014dccec817" + dependencies: + browserslist "^1.7.2" + caniuse-db "^1.0.30000623" + normalize-range "^0.1.2" + num2fraction "^1.2.2" + postcss "^5.2.13" + postcss-value-parser "^3.2.3" + +aws-sign2@~0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.5.0.tgz#c57103f7a17fc037f02d7c2e64b602ea223f7d63" + +aws-sign2@~0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.6.0.tgz#14342dd38dbcc94d0e5b87d763cd63612c0e794f" + +aws4@^1.2.1: + version "1.6.0" + resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.6.0.tgz#83ef5ca860b2b32e4a0deedee8c771b9db57471e" + +babel-code-frame@^6.16.0: + version "6.22.0" + resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.22.0.tgz#027620bee567a88c32561574e7fd0801d33118e4" + dependencies: + chalk "^1.1.0" + esutils "^2.0.2" + js-tokens "^3.0.0" + +babel-runtime@^5.5.6, babel-runtime@^5.8.20: + version "5.8.38" + resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-5.8.38.tgz#1c0b02eb63312f5f087ff20450827b425c9d4c19" + dependencies: + core-js "^1.0.0" + +balanced-match@^0.4.0, balanced-match@^0.4.1: + version "0.4.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-0.4.2.tgz#cb3f3e3c732dc0f01ee70b403f302e61d7709838" + +bcrypt-pbkdf@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz#63bc5dcb61331b92bc05fd528953c33462a06f8d" + dependencies: + tweetnacl "^0.14.3" + +binary@~0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/binary/-/binary-0.3.0.tgz#9f60553bc5ce8c3386f3b553cff47462adecaa79" + dependencies: + buffers "~0.1.1" + chainsaw "~0.1.0" + +bl@^0.9.0, bl@~0.9.0: + version "0.9.5" + resolved "https://registry.yarnpkg.com/bl/-/bl-0.9.5.tgz#c06b797af085ea00bc527afc8efcf11de2232054" + dependencies: + readable-stream "~1.0.26" + +boom@0.4.x: + version "0.4.2" + resolved "https://registry.yarnpkg.com/boom/-/boom-0.4.2.tgz#7a636e9ded4efcefb19cef4947a3c67dfaee911b" + dependencies: + hoek "0.9.x" + +boom@2.x.x: + version "2.10.1" + resolved "https://registry.yarnpkg.com/boom/-/boom-2.10.1.tgz#39c8918ceff5799f83f9492a848f625add0c766f" + dependencies: + hoek "2.x.x" + +bower-config@~0.5.0, bower-config@~0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/bower-config/-/bower-config-0.5.2.tgz#1f7d2e899e99b70c29a613e70d4c64590414b22e" + dependencies: + graceful-fs "~2.0.0" + mout "~0.9.0" + optimist "~0.6.0" + osenv "0.0.3" + +bower-endpoint-parser@~0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/bower-endpoint-parser/-/bower-endpoint-parser-0.2.2.tgz#00b565adbfab6f2d35addde977e97962acbcb3f6" + +bower-json@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/bower-json/-/bower-json-0.4.0.tgz#a99c3ccf416ef0590ed0ded252c760f1c6d93766" + dependencies: + deep-extend "~0.2.5" + graceful-fs "~2.0.0" + intersect "~0.0.3" + +bower-logger@~0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/bower-logger/-/bower-logger-0.2.2.tgz#39be07e979b2fc8e03a94634205ed9422373d381" + +bower-registry-client@~0.2.0: + version "0.2.4" + resolved "https://registry.yarnpkg.com/bower-registry-client/-/bower-registry-client-0.2.4.tgz#269fc7e898b627fb939d1144a593254d7fbbeebc" + dependencies: + async "~0.2.8" + bower-config "~0.5.0" + graceful-fs "~2.0.0" + lru-cache "~2.3.0" + mkdirp "~0.3.5" + request "~2.51.0" + request-replay "~0.2.0" + rimraf "~2.2.0" + +bower@~1.3.0: + version "1.3.12" + resolved "https://registry.yarnpkg.com/bower/-/bower-1.3.12.tgz#37de0edb3904baf90aee13384a1a379a05ee214c" + dependencies: + abbrev "~1.0.4" + archy "0.0.2" + bower-config "~0.5.2" + bower-endpoint-parser "~0.2.2" + bower-json "~0.4.0" + bower-logger "~0.2.2" + bower-registry-client "~0.2.0" + cardinal "0.4.0" + chalk "0.5.0" + chmodr "0.1.0" + decompress-zip "0.0.8" + fstream "~1.0.2" + fstream-ignore "~1.0.1" + glob "~4.0.2" + graceful-fs "~3.0.1" + handlebars "~2.0.0" + inquirer "0.7.1" + insight "0.4.3" + is-root "~1.0.0" + junk "~1.0.0" + lockfile "~1.0.0" + lru-cache "~2.5.0" + mkdirp "0.5.0" + mout "~0.9.0" + nopt "~3.0.0" + opn "~1.0.0" + osenv "0.1.0" + p-throttler "0.1.0" + promptly "0.2.0" + q "~1.0.1" + request "~2.42.0" + request-progress "0.3.0" + retry "0.6.0" + rimraf "~2.2.0" + semver "~2.3.0" + shell-quote "~1.4.1" + stringify-object "~1.0.0" + tar-fs "0.5.2" + tmp "0.0.23" + update-notifier "0.2.0" + which "~1.0.5" + +bower@~1.7.7: + version "1.7.9" + resolved "https://registry.yarnpkg.com/bower/-/bower-1.7.9.tgz#b7296c2393e0d75edaa6ca39648132dd255812b0" + +boxen@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/boxen/-/boxen-0.3.1.tgz#a7d898243ae622f7abb6bb604d740a76c6a5461b" + dependencies: + chalk "^1.1.1" + filled-array "^1.0.0" + object-assign "^4.0.1" + repeating "^2.0.0" + string-width "^1.0.1" + widest-line "^1.0.0" + +boxen@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/boxen/-/boxen-0.6.0.tgz#8364d4248ac34ff0ef1b2f2bf49a6c60ce0d81b6" + dependencies: + ansi-align "^1.1.0" + camelcase "^2.1.0" + chalk "^1.1.1" + cli-boxes "^1.0.0" + filled-array "^1.0.0" + object-assign "^4.0.1" + repeating "^2.0.0" + string-width "^1.0.1" + widest-line "^1.0.0" + +brace-expansion@^1.0.0: + version "1.1.6" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.6.tgz#7197d7eaa9b87e648390ea61fc66c84427420df9" + dependencies: + balanced-match "^0.4.1" + concat-map "0.0.1" + +braces@^1.8.2: + version "1.8.5" + resolved "https://registry.yarnpkg.com/braces/-/braces-1.8.5.tgz#ba77962e12dff969d6b76711e914b737857bf6a7" + dependencies: + expand-range "^1.8.1" + preserve "^0.2.0" + repeat-element "^1.1.2" + +browserify-zlib@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.1.4.tgz#bb35f8a519f600e0fa6b8485241c979d0141fb2d" + dependencies: + pako "~0.2.0" + +browserslist@^1.1.1, browserslist@^1.1.3, browserslist@^1.7.2: + version "1.7.3" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-1.7.3.tgz#25ead9c917b278ad668b83f39c8025697797b2ab" + dependencies: + caniuse-db "^1.0.30000623" + electron-to-chromium "^1.2.2" + +buf-compare@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/buf-compare/-/buf-compare-1.0.1.tgz#fef28da8b8113a0a0db4430b0b6467b69730b34a" + +buffer-shims@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/buffer-shims/-/buffer-shims-1.0.0.tgz#9978ce317388c649ad8793028c3477ef044a8b51" + +buffers@~0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/buffers/-/buffers-0.1.1.tgz#b24579c3bed4d6d396aeee6d9a8ae7f5482ab7bb" + +builtin-modules@^1.0.0, builtin-modules@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-1.1.1.tgz#270f076c5a72c02f5b65a47df94c5fe3a278892f" + +builtins@0.0.7: + version "0.0.7" + resolved "https://registry.yarnpkg.com/builtins/-/builtins-0.0.7.tgz#355219cd6cf18dbe7c01cc7fd2dce765cfdc549a" + +call-me-maybe@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/call-me-maybe/-/call-me-maybe-1.0.1.tgz#26d208ea89e37b5cbde60250a15f031c16a4d66b" + +caller-path@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-0.1.0.tgz#94085ef63581ecd3daa92444a8fe94e82577751f" + dependencies: + callsites "^0.2.0" + +callsites@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-0.2.0.tgz#afab96262910a7f33c19a5775825c69f34e350ca" + +camelcase-keys@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-2.1.0.tgz#308beeaffdf28119051efa1d932213c91b8f92e7" + dependencies: + camelcase "^2.0.0" + map-obj "^1.0.0" + +camelcase@^1.0.2: + version "1.2.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-1.2.1.tgz#9bb5304d2e0b56698b2c758b08a3eaa9daa58a39" + +camelcase@^2.0.0, camelcase@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-2.1.1.tgz#7c1d16d679a1bbe59ca02cacecfb011e201f5a1f" + +camelcase@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-3.0.0.tgz#32fc4b9fcdaf845fcdf7e73bb97cac2261f0ab0a" + +caniuse-db@^1.0.30000187, caniuse-db@^1.0.30000623: + version "1.0.30000623" + resolved "https://registry.yarnpkg.com/caniuse-db/-/caniuse-db-1.0.30000623.tgz#6e9dc4385d00a8f587efbb23fcbed7916f186e5d" + +capture-stack-trace@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/capture-stack-trace/-/capture-stack-trace-1.0.0.tgz#4a6fa07399c26bba47f0b2496b4d0fb408c5550d" + +cardinal@0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/cardinal/-/cardinal-0.4.0.tgz#7d10aafb20837bde043c45e43a0c8c28cdaae45e" + dependencies: + redeyed "~0.4.0" + +caseless@^0.11.0: + version "0.11.0" + resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.11.0.tgz#715b96ea9841593cc33067923f5ec60ebda4f7d7" + +caseless@^0.12.0, caseless@~0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" + +caseless@~0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.6.0.tgz#8167c1ab8397fb5bb95f96d28e5a81c50f247ac4" + +caseless@~0.8.0: + version "0.8.0" + resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.8.0.tgz#5bca2881d41437f54b2407ebe34888c7b9ad4f7d" + +center-align@^0.1.1: + version "0.1.3" + resolved "https://registry.yarnpkg.com/center-align/-/center-align-0.1.3.tgz#aa0d32629b6ee972200411cbd4461c907bc2b7ad" + dependencies: + align-text "^0.1.3" + lazy-cache "^1.0.3" + +chai@^3.5.0: + version "3.5.0" + resolved "https://registry.yarnpkg.com/chai/-/chai-3.5.0.tgz#4d02637b067fe958bdbfdd3a40ec56fef7373247" + dependencies: + assertion-error "^1.0.1" + deep-eql "^0.1.3" + type-detect "^1.0.0" + +chainsaw@~0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/chainsaw/-/chainsaw-0.1.0.tgz#5eab50b28afe58074d0d58291388828b5e5fbc98" + dependencies: + traverse ">=0.3.0 <0.4" + +chalk@0.5.0, chalk@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-0.5.0.tgz#375dfccbc21c0a60a8b61bc5b78f3dc2a55c212f" + dependencies: + ansi-styles "^1.1.0" + escape-string-regexp "^1.0.0" + has-ansi "^0.1.0" + strip-ansi "^0.3.0" + supports-color "^0.2.0" + +chalk@^0.5.1: + version "0.5.1" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-0.5.1.tgz#663b3a648b68b55d04690d49167aa837858f2174" + dependencies: + ansi-styles "^1.1.0" + escape-string-regexp "^1.0.0" + has-ansi "^0.1.0" + strip-ansi "^0.3.0" + supports-color "^0.2.0" + +chalk@^1.0.0, chalk@^1.1.0, chalk@^1.1.1, chalk@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" + dependencies: + ansi-styles "^2.2.1" + escape-string-regexp "^1.0.2" + has-ansi "^2.0.0" + strip-ansi "^3.0.0" + supports-color "^2.0.0" + +chalk@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-0.4.0.tgz#5199a3ddcd0c1efe23bc08c1b027b06176e0c64f" + dependencies: + ansi-styles "~1.0.0" + has-color "~0.1.0" + strip-ansi "~0.1.0" + +chance@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/chance/-/chance-1.0.6.tgz#4734f62d02b738cdc2882d8b5d41f89af49e7bfd" + +chmodr@0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/chmodr/-/chmodr-0.1.0.tgz#e09215a1d51542db2a2576969765bcf6125583eb" + +circular-json@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/circular-json/-/circular-json-0.3.1.tgz#be8b36aefccde8b3ca7aa2d6afc07a37242c0d2d" + +clean-css@~3.4.2: + version "3.4.24" + resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-3.4.24.tgz#89f5a5e9da37ae02394fe049a41388abbe72c3b5" + dependencies: + commander "2.8.x" + source-map "0.4.x" + +cli-boxes@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-1.0.0.tgz#4fa917c3e59c94a004cd61f8ee509da651687143" + +cli-color@~0.3.2: + version "0.3.3" + resolved "https://registry.yarnpkg.com/cli-color/-/cli-color-0.3.3.tgz#12d5bdd158ff8a0b0db401198913c03df069f6f5" + dependencies: + d "~0.1.1" + es5-ext "~0.10.6" + memoizee "~0.3.8" + timers-ext "0.1" + +cli-cursor@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-1.0.2.tgz#64da3f7d56a54412e59794bd62dc35295e8f2987" + dependencies: + restore-cursor "^1.0.1" + +cli-width@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.1.0.tgz#b234ca209b29ef66fc518d9b98d5847b00edf00a" + +clite@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/clite/-/clite-0.3.0.tgz#e7fcbc8cc5bd3e7f8b84ed48db12e9474cc73441" + dependencies: + abbrev "^1.0.7" + debug "^2.2.0" + es6-promise "^3.1.2" + lodash.defaults "^4.0.1" + lodash.defaultsdeep "^4.3.1" + lodash.mergewith "^4.3.1" + then-fs "^2.0.0" + update-notifier "^0.6.0" + yargs "^4.3.2" + +cliui@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-2.1.0.tgz#4b475760ff80264c762c3a1719032e91c7fea0d1" + dependencies: + center-align "^0.1.1" + right-align "^0.1.1" + wordwrap "0.0.2" + +cliui@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-3.2.0.tgz#120601537a916d29940f934da3b48d585a39213d" + dependencies: + string-width "^1.0.1" + strip-ansi "^3.0.1" + wrap-ansi "^2.0.0" + +clone-regexp@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/clone-regexp/-/clone-regexp-1.0.0.tgz#eae0a2413f55c0942f818c229fefce845d7f3b1c" + dependencies: + is-regexp "^1.0.0" + is-supported-regexp-flag "^1.0.0" + +clone@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.2.tgz#260b7a99ebb1edfe247538175f783243cb19d149" + +clone@^2.1.0, clone@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.1.tgz#d217d1e961118e3ac9a4b8bba3285553bf647cdb" + +co@^4.6.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" + +code-point-at@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" + +coffee-script@^1.12.5: + version "1.12.5" + resolved "https://registry.yarnpkg.com/coffee-script/-/coffee-script-1.12.5.tgz#809f4585419112bbfe46a073ad7543af18c27346" + +coffee-script@~1.3.3: + version "1.3.3" + resolved "https://registry.yarnpkg.com/coffee-script/-/coffee-script-1.3.3.tgz#150d6b4cb522894369efed6a2101c20bc7f4a4f4" + +color-diff@^0.1.3: + version "0.1.7" + resolved "https://registry.yarnpkg.com/color-diff/-/color-diff-0.1.7.tgz#6db78cd9482a8e459d40821eaf4b503283dcb8e2" + +colorguard@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/colorguard/-/colorguard-1.2.0.tgz#f3facaf5caaeba4ef54653d9fb25bb73177c0d84" + dependencies: + chalk "^1.1.1" + color-diff "^0.1.3" + log-symbols "^1.0.2" + object-assign "^4.0.1" + pipetteur "^2.0.0" + plur "^2.0.0" + postcss "^5.0.4" + postcss-reporter "^1.2.1" + text-table "^0.2.0" + yargs "^1.2.6" + +colors@1.0.x: + version "1.0.3" + resolved "https://registry.yarnpkg.com/colors/-/colors-1.0.3.tgz#0433f44d809680fdeb60ed260f1b0c262e82a40b" + +colors@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/colors/-/colors-1.1.2.tgz#168a4701756b6a7f51a12ce0c97bfa28c084ed63" + +colors@~0.6.0-1, colors@~0.6.2: + version "0.6.2" + resolved "https://registry.yarnpkg.com/colors/-/colors-0.6.2.tgz#2423fe6678ac0c5dae8852e5d0e5be08c997abcc" + +combined-stream@^1.0.5, combined-stream@~1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.5.tgz#938370a57b4a51dea2c77c15d5c5fdf895164009" + dependencies: + delayed-stream "~1.0.0" + +combined-stream@~0.0.4, combined-stream@~0.0.5: + version "0.0.7" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-0.0.7.tgz#0137e657baa5a7541c57ac37ac5fc07d73b4dc1f" + dependencies: + delayed-stream "0.0.5" + +commander@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-1.2.0.tgz#fd5713bfa153c7d6cc599378a5ab4c45c535029e" + dependencies: + keypress "0.1.x" + +commander@2.8.x: + version "2.8.1" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.8.1.tgz#06be367febfda0c330aa1e2a072d3dc9762425d4" + dependencies: + graceful-readlink ">= 1.0.0" + +commander@^2.7.1, commander@^2.9.0: + version "2.9.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.9.0.tgz#9c99094176e12240cb22d6c5146098400fe0f7d4" + dependencies: + graceful-readlink ">= 1.0.0" + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + +concat-stream@^1.4.1, concat-stream@^1.4.7, concat-stream@^1.5.2: + version "1.6.0" + resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.0.tgz#0aac662fd52be78964d5532f694784e70110acf7" + dependencies: + inherits "^2.0.3" + readable-stream "^2.2.2" + typedarray "^0.0.6" + +config-chain@~1.1.8: + version "1.1.11" + resolved "https://registry.yarnpkg.com/config-chain/-/config-chain-1.1.11.tgz#aba09747dfbe4c3e70e766a6e41586e1859fc6f2" + dependencies: + ini "^1.3.4" + proto-list "~1.2.1" + +configstore@^0.3.0, configstore@^0.3.1: + version "0.3.2" + resolved "https://registry.yarnpkg.com/configstore/-/configstore-0.3.2.tgz#25e4c16c3768abf75c5a65bc61761f495055b459" + dependencies: + graceful-fs "^3.0.1" + js-yaml "^3.1.0" + mkdirp "^0.5.0" + object-assign "^2.0.0" + osenv "^0.1.0" + user-home "^1.0.0" + uuid "^2.0.1" + xdg-basedir "^1.0.0" + +configstore@^1.0.0, configstore@^1.2.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/configstore/-/configstore-1.4.0.tgz#c35781d0501d268c25c54b8b17f6240e8a4fb021" + dependencies: + graceful-fs "^4.1.2" + mkdirp "^0.5.0" + object-assign "^4.0.1" + os-tmpdir "^1.0.0" + osenv "^0.1.0" + uuid "^2.0.1" + write-file-atomic "^1.1.2" + xdg-basedir "^2.0.0" + +configstore@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/configstore/-/configstore-2.1.0.tgz#737a3a7036e9886102aa6099e47bb33ab1aba1a1" + dependencies: + dot-prop "^3.0.0" + graceful-fs "^4.1.2" + mkdirp "^0.5.0" + object-assign "^4.0.1" + os-tmpdir "^1.0.0" + osenv "^0.1.0" + uuid "^2.0.1" + write-file-atomic "^1.1.2" + xdg-basedir "^2.0.0" + +contains-path@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/contains-path/-/contains-path-0.1.0.tgz#fe8cf184ff6670b6baef01a9d4861a5cbec4120a" + +core-assert@^0.2.0: + version "0.2.1" + resolved "https://registry.yarnpkg.com/core-assert/-/core-assert-0.2.1.tgz#f85e2cf9bfed28f773cc8b3fa5c5b69bdc02fe3f" + dependencies: + buf-compare "^1.0.0" + is-error "^2.2.0" + +core-js@^1.0.0: + version "1.2.7" + resolved "https://registry.yarnpkg.com/core-js/-/core-js-1.2.7.tgz#652294c14651db28fa93bd2d5ff2983a4f08c636" + +core-js@^2.0.0: + version "2.4.1" + resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.4.1.tgz#4de911e667b0eae9124e34254b53aea6fc618d3e" + +core-util-is@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" + +cosmiconfig@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-2.1.1.tgz#817f2c2039347a1e9bf7d090c0923e53f749ca82" + dependencies: + js-yaml "^3.4.3" + minimist "^1.2.0" + object-assign "^4.1.0" + os-homedir "^1.0.1" + parse-json "^2.2.0" + require-from-string "^1.1.0" + +create-error-class@^3.0.1: + version "3.0.2" + resolved "https://registry.yarnpkg.com/create-error-class/-/create-error-class-3.0.2.tgz#06be7abef947a3f14a30fd610671d401bca8b7b6" + dependencies: + capture-stack-trace "^1.0.0" + +cross-spawn@^4.0.0: + version "4.0.2" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-4.0.2.tgz#7b9247621c23adfdd3856004a823cbe397424d41" + dependencies: + lru-cache "^4.0.1" + which "^1.2.9" + +cross-spawn@^5.0.1: + version "5.1.0" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449" + dependencies: + lru-cache "^4.0.1" + shebang-command "^1.2.0" + which "^1.2.9" + +cryptiles@0.2.x: + version "0.2.2" + resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-0.2.2.tgz#ed91ff1f17ad13d3748288594f8a48a0d26f325c" + dependencies: + boom "0.4.x" + +cryptiles@2.x.x: + version "2.0.5" + resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-2.0.5.tgz#3bdfecdc608147c1c67202fa291e7dca59eaa3b8" + dependencies: + boom "2.x.x" + +css-color-names@0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/css-color-names/-/css-color-names-0.0.3.tgz#de0cef16f4d8aa8222a320d5b6d7e9bbada7b9f6" + +css-rule-stream@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/css-rule-stream/-/css-rule-stream-1.1.0.tgz#3786e7198983d965a26e31957e09078cbb7705a2" + dependencies: + css-tokenize "^1.0.1" + duplexer2 "0.0.2" + ldjson-stream "^1.2.1" + through2 "^0.6.3" + +css-tokenize@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/css-tokenize/-/css-tokenize-1.0.1.tgz#4625cb1eda21c143858b7f81d6803c1d26fc14be" + dependencies: + inherits "^2.0.1" + readable-stream "^1.0.33" + +csv-generate@^0.0.6: + version "0.0.6" + resolved "https://registry.yarnpkg.com/csv-generate/-/csv-generate-0.0.6.tgz#97e4e63ae46b21912cd9475bc31469d26f5ade66" + +csv-parse@^1.0.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/csv-parse/-/csv-parse-1.2.0.tgz#047b73868ab9a85746e885f637f9ed0fb645a425" + +csv-stringify@^0.0.8: + version "0.0.8" + resolved "https://registry.yarnpkg.com/csv-stringify/-/csv-stringify-0.0.8.tgz#52cc3b3dfc197758c55ad325a95be85071f9e51b" + +csv@^0.4.2: + version "0.4.6" + resolved "https://registry.yarnpkg.com/csv/-/csv-0.4.6.tgz#8dbae7ddfdbaae62c1ea987c3e0f8a9ac737b73d" + dependencies: + csv-generate "^0.0.6" + csv-parse "^1.0.0" + csv-stringify "^0.0.8" + stream-transform "^0.1.0" + +ctype@0.5.3: + version "0.5.3" + resolved "https://registry.yarnpkg.com/ctype/-/ctype-0.5.3.tgz#82c18c2461f74114ef16c135224ad0b9144ca12f" + +curl-trace-parser@0.0.8: + version "0.0.8" + resolved "https://registry.yarnpkg.com/curl-trace-parser/-/curl-trace-parser-0.0.8.tgz#8420f4890fd998822fb4b48bfd667123e029370b" + dependencies: + api-blueprint-http-formatter "0.0.1" + commander "1.2.0" + http-string-parser "0.0.4" + +currently-unhandled@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/currently-unhandled/-/currently-unhandled-0.4.1.tgz#988df33feab191ef799a61369dd76c17adf957ea" + dependencies: + array-find-index "^1.0.1" + +cycle@1.0.x: + version "1.0.3" + resolved "https://registry.yarnpkg.com/cycle/-/cycle-1.0.3.tgz#21e80b2be8580f98b468f379430662b046c34ad2" + +d@^0.1.1, d@~0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/d/-/d-0.1.1.tgz#da184c535d18d8ee7ba2aa229b914009fae11309" + dependencies: + es5-ext "~0.10.2" + +dashdash@^1.12.0: + version "1.14.1" + resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" + dependencies: + assert-plus "^1.0.0" + +dateformat@1.0.2-1.2.3: + version "1.0.2-1.2.3" + resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-1.0.2-1.2.3.tgz#b0220c02de98617433b72851cf47de3df2cdbee9" + +debug@2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.2.0.tgz#f87057e995b1a1f6ae6a4960664137bc56f039da" + dependencies: + ms "0.7.1" + +debug@^2.1.1, debug@^2.2.0, debug@^2.6.0: + version "2.6.1" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.1.tgz#79855090ba2c4e3115cc7d8769491d58f0491351" + dependencies: + ms "0.7.2" + +decamelize@^1.0.0, decamelize@^1.1.1, decamelize@^1.1.2: + version "1.2.0" + resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" + +deckardcain@^0.1.6: + version "0.1.6" + resolved "https://registry.yarnpkg.com/deckardcain/-/deckardcain-0.1.6.tgz#f5d8c8ecffa12ddafb5ecd96885b3af0608c98df" + +deckardcain@^0.3.2: + version "0.3.3" + resolved "https://registry.yarnpkg.com/deckardcain/-/deckardcain-0.3.3.tgz#dbe0c99c5155dbda7fd09c7052e56fbbe5753e5c" + +decompress-zip@0.0.8: + version "0.0.8" + resolved "https://registry.yarnpkg.com/decompress-zip/-/decompress-zip-0.0.8.tgz#4a265b22c7b209d7b24fa66f2b2dfbced59044f3" + dependencies: + binary "~0.3.0" + graceful-fs "~3.0.0" + mkpath "~0.1.0" + nopt "~2.2.0" + q "~1.0.0" + readable-stream "~1.1.8" + touch "0.0.2" + +deep-assign@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/deep-assign/-/deep-assign-1.0.0.tgz#b092743be8427dc621ea0067cdec7e70dd19f37b" + dependencies: + is-obj "^1.0.0" + +deep-eql@^0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/deep-eql/-/deep-eql-0.1.3.tgz#ef558acab8de25206cd713906d74e56930eb69f2" + dependencies: + type-detect "0.1.1" + +deep-equal@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-1.0.1.tgz#f5d260292b660e084eff4cdbc9f08ad3247448b5" + +deep-extend@^0.4.0, deep-extend@~0.4.0: + version "0.4.1" + resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.4.1.tgz#efe4113d08085f4e6f9687759810f807469e2253" + +deep-extend@~0.2.5: + version "0.2.11" + resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.2.11.tgz#7a16ba69729132340506170494bc83f7076fe08f" + +deep-is@~0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" + +deep-strict-equal@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/deep-strict-equal/-/deep-strict-equal-0.2.0.tgz#4a078147a8ab57f6a0d4f5547243cd22f44eb4e4" + dependencies: + core-assert "^0.2.0" + +defined@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693" + +del@^2.0.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/del/-/del-2.2.2.tgz#c12c981d067846c84bcaf862cff930d907ffd1a8" + dependencies: + globby "^5.0.0" + is-path-cwd "^1.0.0" + is-path-in-cwd "^1.0.0" + object-assign "^4.0.1" + pify "^2.0.0" + pinkie-promise "^2.0.0" + rimraf "^2.2.8" + +delayed-stream@0.0.5: + version "0.0.5" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-0.0.5.tgz#d4b1f43a93e8296dfe02694f4680bc37a313c73f" + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + +deref@^0.6.4: + version "0.6.4" + resolved "https://registry.yarnpkg.com/deref/-/deref-0.6.4.tgz#bd5a96d45dbed3011bb81bdf68ddf54be8e1bd4e" + dependencies: + deep-extend "^0.4.0" + +detective@~4.3.1: + version "4.3.2" + resolved "https://registry.yarnpkg.com/detective/-/detective-4.3.2.tgz#77697e2e7947ac3fe7c8e26a6d6f115235afa91c" + dependencies: + acorn "^3.1.0" + defined "^1.0.0" + +discontinuous-range@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/discontinuous-range/-/discontinuous-range-1.0.0.tgz#e38331f0844bba49b9a9cb71c771585aab1bc65a" + +doctrine@1.5.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-1.5.0.tgz#379dce730f6166f76cefa4e6707a159b02c5a6fa" + dependencies: + esutils "^2.0.2" + isarray "^1.0.0" + +doctrine@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.0.0.tgz#c73d8d2909d22291e1a007a395804da8b665fe63" + dependencies: + esutils "^2.0.2" + isarray "^1.0.0" + +doiuse@^2.4.1: + version "2.5.0" + resolved "https://registry.yarnpkg.com/doiuse/-/doiuse-2.5.0.tgz#c7f156965d054bf4d699a4067af1cadbc7350b7c" + dependencies: + browserslist "^1.1.1" + caniuse-db "^1.0.30000187" + css-rule-stream "^1.1.0" + duplexer2 "0.0.2" + jsonfilter "^1.1.2" + ldjson-stream "^1.2.1" + lodash "^4.0.0" + multimatch "^2.0.0" + postcss "^5.0.8" + source-map "^0.4.2" + through2 "^0.6.3" + yargs "^3.5.4" + +dot-prop@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-3.0.0.tgz#1b708af094a49c9a0e7dbcad790aba539dac1177" + dependencies: + is-obj "^1.0.0" + +drafter.js@^2.6.0: + version "2.6.6" + resolved "https://registry.yarnpkg.com/drafter.js/-/drafter.js-2.6.6.tgz#6706978eb11afedec4de61ad8b90e006d68e3a3c" + +drafter@^1.0.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/drafter/-/drafter-1.2.0.tgz#f4d11e8dee1f27f9a40485b8740c44361e9cf859" + dependencies: + drafter.js "^2.6.0" + optionalDependencies: + protagonist "^1.6.0" + +dredd-transactions@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/dredd-transactions/-/dredd-transactions-4.0.0.tgz#0783491c415bd23d49ff3e21a3a4fbd0aceed3d6" + dependencies: + caseless "^0.11.0" + clone "^2.1.0" + deckardcain "^0.3.2" + fury "^2.1.0" + fury-adapter-apib-parser "^0.3.0" + fury-adapter-swagger "^0.9.7" + sift "^3.2.1" + traverse "^0.6.6" + uri-template "^1.0.0" + +dredd@^3.3.0: + version "3.4.2" + resolved "https://registry.yarnpkg.com/dredd/-/dredd-3.4.2.tgz#89f6dc287c5a34bb97d3515f766fb23af4a92446" + dependencies: + async "^2.3.0" + caseless "^0.12.0" + chai "^3.5.0" + clone "^2.1.1" + coffee-script "^1.12.5" + colors "^1.1.2" + cross-spawn "^5.0.1" + dredd-transactions "^4.0.0" + file "^0.2.2" + gavel "^1.1.0" + glob "^7.0.5" + html "^1.0.0" + htmlencode "0.0.4" + inquirer "^1.1.0" + js-yaml "^3.8.3" + markdown-it "^8.3.1" + optimist "^0.6.1" + pitboss-ng "^0.3.2" + proxyquire "^1.7.10" + request "^2.81.0" + spawn-args "^0.2.0" + uuid "^3.0.0" + which "^1.2.14" + winston "^2.2.0" + +duplexer2@0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/duplexer2/-/duplexer2-0.0.2.tgz#c614dcf67e2fb14995a91711e5a617e8a60a31db" + dependencies: + readable-stream "~1.1.9" + +duplexer2@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/duplexer2/-/duplexer2-0.1.4.tgz#8b12dab878c0d69e3e7891051662a32fc6bddcc1" + dependencies: + readable-stream "^2.0.2" + +duplexer@^0.1.1, duplexer@~0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.1.tgz#ace6ff808c1ce66b57d1ebf97977acb02334cfc1" + +duplexify@^3.2.0: + version "3.5.0" + resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-3.5.0.tgz#1aa773002e1578457e9d9d4a50b0ccaaebcbd604" + dependencies: + end-of-stream "1.0.0" + inherits "^2.0.1" + readable-stream "^2.0.0" + stream-shift "^1.0.0" + +ecc-jsbn@~0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz#0fc73a9ed5f0d53c38193398523ef7e543777505" + dependencies: + jsbn "~0.1.0" + +electron-to-chromium@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.2.2.tgz#e41bc9488c88e3cfa1e94bde28e8420d7d47c47c" + +end-of-stream@1.0.0, end-of-stream@^1.0.0, end-of-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.0.0.tgz#d4596e702734a93e40e9af864319eabd99ff2f0e" + dependencies: + once "~1.3.0" + +enhance-visitors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/enhance-visitors/-/enhance-visitors-1.0.0.tgz#aa945d05da465672a1ebd38fee2ed3da8518e95a" + dependencies: + lodash "^4.13.1" + +entities@~1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.1.tgz#6e5c2d0a5621b5dadaecef80b90edfb5cd7772f0" + +error-ex@^1.2.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.0.tgz#e67b43f3e82c96ea3a584ffee0b9fc3325d802d9" + dependencies: + is-arrayish "^0.2.1" + +es5-ext@^0.10.7, es5-ext@^0.10.8, es5-ext@~0.10.11, es5-ext@~0.10.2, es5-ext@~0.10.5, es5-ext@~0.10.6, es5-ext@~0.10.7: + version "0.10.12" + resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.12.tgz#aa84641d4db76b62abba5e45fd805ecbab140047" + dependencies: + es6-iterator "2" + es6-symbol "~3.1" + +es6-iterator@2: + version "2.0.0" + resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.0.tgz#bd968567d61635e33c0b80727613c9cb4b096bac" + dependencies: + d "^0.1.1" + es5-ext "^0.10.7" + es6-symbol "3" + +es6-iterator@~0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-0.1.3.tgz#d6f58b8c4fc413c249b4baa19768f8e4d7c8944e" + dependencies: + d "~0.1.1" + es5-ext "~0.10.5" + es6-symbol "~2.0.1" + +es6-map@^0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/es6-map/-/es6-map-0.1.4.tgz#a34b147be224773a4d7da8072794cefa3632b897" + dependencies: + d "~0.1.1" + es5-ext "~0.10.11" + es6-iterator "2" + es6-set "~0.1.3" + es6-symbol "~3.1.0" + event-emitter "~0.3.4" + +es6-promise@^3.0.2, es6-promise@^3.1.2: + version "3.3.1" + resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-3.3.1.tgz#a08cdde84ccdbf34d027a1451bc91d4bcd28a613" + +es6-set@~0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/es6-set/-/es6-set-0.1.4.tgz#9516b6761c2964b92ff479456233a247dc707ce8" + dependencies: + d "~0.1.1" + es5-ext "~0.10.11" + es6-iterator "2" + es6-symbol "3" + event-emitter "~0.3.4" + +es6-symbol@3, es6-symbol@~3.1, es6-symbol@~3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.0.tgz#94481c655e7a7cad82eba832d97d5433496d7ffa" + dependencies: + d "~0.1.1" + es5-ext "~0.10.11" + +es6-symbol@~2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-2.0.1.tgz#761b5c67cfd4f1d18afb234f691d678682cb3bf3" + dependencies: + d "~0.1.1" + es5-ext "~0.10.5" + +es6-weak-map@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/es6-weak-map/-/es6-weak-map-2.0.1.tgz#0d2bbd8827eb5fb4ba8f97fbfea50d43db21ea81" + dependencies: + d "^0.1.1" + es5-ext "^0.10.8" + es6-iterator "2" + es6-symbol "3" + +es6-weak-map@~0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/es6-weak-map/-/es6-weak-map-0.1.4.tgz#706cef9e99aa236ba7766c239c8b9e286ea7d228" + dependencies: + d "~0.1.1" + es5-ext "~0.10.6" + es6-iterator "~0.1.3" + es6-symbol "~2.0.1" + +escape-string-regexp@^1.0.0, escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + +escope@^3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/escope/-/escope-3.6.0.tgz#e01975e812781a163a6dadfdd80398dc64c889c3" + dependencies: + es6-map "^0.1.3" + es6-weak-map "^2.0.1" + esrecurse "^4.1.0" + estraverse "^4.1.1" + +eslint-config-xo@^0.17.0: + version "0.17.0" + resolved "https://registry.yarnpkg.com/eslint-config-xo/-/eslint-config-xo-0.17.0.tgz#1e7d4a86bf49179805c4622e832a7b1beeb4e881" + +eslint-formatter-pretty@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/eslint-formatter-pretty/-/eslint-formatter-pretty-1.1.0.tgz#ab4d06da02fed8c13ae9f0dc540a433ef7ed6f5e" + dependencies: + ansi-escapes "^1.4.0" + chalk "^1.1.3" + log-symbols "^1.0.2" + plur "^2.1.2" + string-width "^2.0.0" + +eslint-import-resolver-node@^0.2.0: + version "0.2.3" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.2.3.tgz#5add8106e8c928db2cba232bcd9efa846e3da16c" + dependencies: + debug "^2.2.0" + object-assign "^4.0.1" + resolve "^1.1.6" + +eslint-module-utils@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.0.0.tgz#a6f8c21d901358759cdc35dbac1982ae1ee58bce" + dependencies: + debug "2.2.0" + pkg-dir "^1.0.0" + +eslint-plugin-ava@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-ava/-/eslint-plugin-ava-3.1.1.tgz#fdcf1ad9605867639ae0007d58100ee40a6de25d" + dependencies: + arrify "^1.0.1" + deep-strict-equal "^0.2.0" + enhance-visitors "^1.0.0" + espree "^3.1.3" + espurify "^1.5.0" + multimatch "^2.1.0" + pkg-up "^1.0.0" + req-all "^0.1.0" + +eslint-plugin-import@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.2.0.tgz#72ba306fad305d67c4816348a4699a4229ac8b4e" + dependencies: + builtin-modules "^1.1.1" + contains-path "^0.1.0" + debug "^2.2.0" + doctrine "1.5.0" + eslint-import-resolver-node "^0.2.0" + eslint-module-utils "^2.0.0" + has "^1.0.1" + lodash.cond "^4.3.0" + minimatch "^3.0.3" + pkg-up "^1.0.0" + +eslint-plugin-no-use-extend-native@^0.3.2: + version "0.3.12" + resolved "https://registry.yarnpkg.com/eslint-plugin-no-use-extend-native/-/eslint-plugin-no-use-extend-native-0.3.12.tgz#3ad9a00c2df23b5d7f7f6be91550985a4ab701ea" + dependencies: + is-get-set-prop "^1.0.0" + is-js-type "^2.0.0" + is-obj-prop "^1.0.0" + is-proto-prop "^1.0.0" + +eslint-plugin-promise@^3.0.0: + version "3.5.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-promise/-/eslint-plugin-promise-3.5.0.tgz#78fbb6ffe047201627569e85a6c5373af2a68fca" + +eslint-plugin-unicorn@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-unicorn/-/eslint-plugin-unicorn-1.0.0.tgz#b761ad233d34d164cda5c41217571609bd1ac161" + dependencies: + lodash.camelcase "^4.1.1" + lodash.kebabcase "^4.0.1" + lodash.snakecase "^4.0.1" + lodash.upperfirst "^4.2.0" + req-all "^0.1.0" + +eslint@^3.6.0: + version "3.19.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-3.19.0.tgz#c8fc6201c7f40dd08941b87c085767386a679acc" + dependencies: + babel-code-frame "^6.16.0" + chalk "^1.1.3" + concat-stream "^1.5.2" + debug "^2.1.1" + doctrine "^2.0.0" + escope "^3.6.0" + espree "^3.4.0" + esquery "^1.0.0" + estraverse "^4.2.0" + esutils "^2.0.2" + file-entry-cache "^2.0.0" + glob "^7.0.3" + globals "^9.14.0" + ignore "^3.2.0" + imurmurhash "^0.1.4" + inquirer "^0.12.0" + is-my-json-valid "^2.10.0" + is-resolvable "^1.0.0" + js-yaml "^3.5.1" + json-stable-stringify "^1.0.0" + levn "^0.3.0" + lodash "^4.0.0" + mkdirp "^0.5.0" + natural-compare "^1.4.0" + optionator "^0.8.2" + path-is-inside "^1.0.1" + pluralize "^1.2.1" + progress "^1.1.8" + require-uncached "^1.0.2" + shelljs "^0.7.5" + strip-bom "^3.0.0" + strip-json-comments "~2.0.1" + table "^3.7.8" + text-table "~0.2.0" + user-home "^2.0.0" + +espree@^3.1.3, espree@^3.4.0: + version "3.4.0" + resolved "https://registry.yarnpkg.com/espree/-/espree-3.4.0.tgz#41656fa5628e042878025ef467e78f125cb86e1d" + dependencies: + acorn "4.0.4" + acorn-jsx "^3.0.0" + +esprima@^3.1.1: + version "3.1.3" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-3.1.3.tgz#fdca51cee6133895e3c88d535ce49dbff62a4633" + +"esprima@~ 1.0.2", esprima@~1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-1.0.4.tgz#9f557e08fc3b4d26ece9dd34f8fbf476b62585ad" + +espurify@^1.5.0: + version "1.6.1" + resolved "https://registry.yarnpkg.com/espurify/-/espurify-1.6.1.tgz#a618c3b320071a4e9e7136c5d78717cdd07020da" + dependencies: + core-js "^2.0.0" + +esquery@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.0.0.tgz#cfba8b57d7fba93f17298a8a006a04cda13d80fa" + dependencies: + estraverse "^4.0.0" + +esrecurse@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.1.0.tgz#4713b6536adf7f2ac4f327d559e7756bff648220" + dependencies: + estraverse "~4.1.0" + object-assign "^4.0.1" + +estraverse@^4.0.0, estraverse@^4.1.1, estraverse@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.2.0.tgz#0dee3fed31fcd469618ce7342099fc1afa0bdb13" + +estraverse@~4.1.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.1.1.tgz#f6caca728933a850ef90661d0e17982ba47111a2" + +esutils@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b" + +event-emitter@~0.3.4: + version "0.3.4" + resolved "https://registry.yarnpkg.com/event-emitter/-/event-emitter-0.3.4.tgz#8d63ddfb4cfe1fae3b32ca265c4c720222080bb5" + dependencies: + d "~0.1.1" + es5-ext "~0.10.7" + +eventemitter2@~0.4.13: + version "0.4.14" + resolved "https://registry.yarnpkg.com/eventemitter2/-/eventemitter2-0.4.14.tgz#8f61b75cde012b2e9eb284d4545583b5643b61ab" + +execa@^0.5.0: + version "0.5.1" + resolved "https://registry.yarnpkg.com/execa/-/execa-0.5.1.tgz#de3fb85cb8d6e91c85bcbceb164581785cb57b36" + dependencies: + cross-spawn "^4.0.0" + get-stream "^2.2.0" + is-stream "^1.1.0" + npm-run-path "^2.0.0" + p-finally "^1.0.0" + signal-exit "^3.0.0" + strip-eof "^1.0.0" + +execall@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/execall/-/execall-1.0.0.tgz#73d0904e395b3cab0658b08d09ec25307f29bb73" + dependencies: + clone-regexp "^1.0.0" + +exit-hook@^1.0.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/exit-hook/-/exit-hook-1.1.1.tgz#f05ca233b48c05d54fff07765df8507e95c02ff8" + +exit@~0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" + +expand-brackets@^0.1.4: + version "0.1.5" + resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-0.1.5.tgz#df07284e342a807cd733ac5af72411e581d1177b" + dependencies: + is-posix-bracket "^0.1.0" + +expand-range@^1.8.1: + version "1.8.2" + resolved "https://registry.yarnpkg.com/expand-range/-/expand-range-1.8.2.tgz#a299effd335fe2721ebae8e257ec79644fc85337" + dependencies: + fill-range "^2.1.0" + +extend@^3.0.0, extend@~3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.0.tgz#5a474353b9f3353ddd8176dfd37b91c83a46f1d4" + +external-editor@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-1.1.1.tgz#12d7b0db850f7ff7e7081baf4005700060c4600b" + dependencies: + extend "^3.0.0" + spawn-sync "^1.0.15" + tmp "^0.0.29" + +extglob@^0.3.1: + version "0.3.2" + resolved "https://registry.yarnpkg.com/extglob/-/extglob-0.3.2.tgz#2e18ff3d2f49ab2765cec9023f011daa8d8349a1" + dependencies: + is-extglob "^1.0.0" + +extsprintf@1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.0.2.tgz#e1080e0658e300b06294990cc70e1502235fd550" + +eyes@0.1.x: + version "0.1.8" + resolved "https://registry.yarnpkg.com/eyes/-/eyes-0.1.8.tgz#62cf120234c683785d902348a800ef3e0cc20bc0" + +faker@3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/faker/-/faker-3.0.1.tgz#c36278cd423f3c5375bc270466a223485c0e7bb2" + +fast-levenshtein@~2.0.4: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + +figures@^1.0.1, figures@^1.3.2, figures@^1.3.5: + version "1.7.0" + resolved "https://registry.yarnpkg.com/figures/-/figures-1.7.0.tgz#cbe1e3affcf1cd44b80cadfed28dc793a9701d2e" + dependencies: + escape-string-regexp "^1.0.5" + object-assign "^4.1.0" + +file-entry-cache@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-2.0.0.tgz#c392990c3e684783d838b8c84a45d8a048458361" + dependencies: + flat-cache "^1.2.1" + object-assign "^4.0.1" + +file-sync-cmp@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/file-sync-cmp/-/file-sync-cmp-0.1.1.tgz#a5e7a8ffbfa493b43b923bbd4ca89a53b63b612b" + +file@^0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/file/-/file-0.2.2.tgz#c3dfd8f8cf3535ae455c2b423c2e52635d76b4d3" + +filename-regex@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/filename-regex/-/filename-regex-2.0.0.tgz#996e3e80479b98b9897f15a8a58b3d084e926775" + +filesize@~3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/filesize/-/filesize-3.2.1.tgz#a06f1c5497ed6358057c415e53403f764c1fb5e6" + +fill-keys@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/fill-keys/-/fill-keys-1.0.2.tgz#9a8fa36f4e8ad634e3bf6b4f3c8882551452eb20" + dependencies: + is-object "~1.0.1" + merge-descriptors "~1.0.0" + +fill-range@^2.1.0: + version "2.2.3" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-2.2.3.tgz#50b77dfd7e469bc7492470963699fe7a8485a723" + dependencies: + is-number "^2.1.0" + isobject "^2.0.0" + randomatic "^1.1.3" + repeat-element "^1.1.2" + repeat-string "^1.5.2" + +filled-array@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/filled-array/-/filled-array-1.1.0.tgz#c3c4f6c663b923459a9aa29912d2d031f1507f84" + +find-up@^1.0.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" + dependencies: + path-exists "^2.0.0" + pinkie-promise "^2.0.0" + +find-up@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" + dependencies: + locate-path "^2.0.0" + +findup-sync@~0.1.0, findup-sync@~0.1.2: + version "0.1.3" + resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-0.1.3.tgz#7f3e7a97b82392c653bf06589bd85190e93c3683" + dependencies: + glob "~3.2.9" + lodash "~2.4.1" + +flat-cache@^1.2.1: + version "1.2.2" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-1.2.2.tgz#fa86714e72c21db88601761ecf2f555d1abc6b96" + dependencies: + circular-json "^0.3.1" + del "^2.0.2" + graceful-fs "^4.1.2" + write "^0.2.1" + +flatten@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/flatten/-/flatten-1.0.2.tgz#dae46a9d78fbe25292258cc1e780a41d95c03782" + +for-in@^0.1.5: + version "0.1.6" + resolved "https://registry.yarnpkg.com/for-in/-/for-in-0.1.6.tgz#c9f96e89bfad18a545af5ec3ed352a1d9e5b4dc8" + +for-own@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/for-own/-/for-own-0.1.4.tgz#0149b41a39088c7515f51ebe1c1386d45f935072" + dependencies: + for-in "^0.1.5" + +foreach@^2.0.4: + version "2.0.5" + resolved "https://registry.yarnpkg.com/foreach/-/foreach-2.0.5.tgz#0bee005018aeb260d0a3af3ae658dd0136ec1b99" + +forever-agent@~0.5.0: + version "0.5.2" + resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.5.2.tgz#6d0e09c4921f94a27f63d3b49c5feff1ea4c5130" + +forever-agent@~0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" + +form-data@~0.1.0: + version "0.1.4" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-0.1.4.tgz#91abd788aba9702b1aabfa8bc01031a2ac9e3b12" + dependencies: + async "~0.9.0" + combined-stream "~0.0.4" + mime "~1.2.11" + +form-data@~0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-0.2.0.tgz#26f8bc26da6440e299cbdcfb69035c4f77a6e466" + dependencies: + async "~0.9.0" + combined-stream "~0.0.4" + mime-types "~2.0.3" + +form-data@~2.1.1: + version "2.1.2" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.1.2.tgz#89c3534008b97eada4cbb157d58f6f5df025eae4" + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.5" + mime-types "^2.1.12" + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + +fstream-ignore@~1.0.1: + version "1.0.5" + resolved "https://registry.yarnpkg.com/fstream-ignore/-/fstream-ignore-1.0.5.tgz#9c31dae34767018fe1d249b24dada67d092da105" + dependencies: + fstream "^1.0.0" + inherits "2" + minimatch "^3.0.0" + +fstream@^1.0.0, fstream@~1.0.2: + version "1.0.10" + resolved "https://registry.yarnpkg.com/fstream/-/fstream-1.0.10.tgz#604e8a92fe26ffd9f6fae30399d4984e1ab22822" + dependencies: + graceful-fs "^4.1.2" + inherits "~2.0.0" + mkdirp ">=0.5 0" + rimraf "2" + +function-bind@^1.0.2: + version "1.1.0" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.0.tgz#16176714c801798e4e8f2cf7f7529467bb4a5771" + +fury-adapter-apib-parser@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/fury-adapter-apib-parser/-/fury-adapter-apib-parser-0.3.0.tgz#f99810f05833cb905e848532fe0a28ec77eb3506" + dependencies: + babel-runtime "^5.8.20" + deckardcain "^0.1.6" + drafter "^1.0.0" + +fury-adapter-swagger@^0.9.7: + version "0.9.13" + resolved "https://registry.yarnpkg.com/fury-adapter-swagger/-/fury-adapter-swagger-0.9.13.tgz#597cb74caf40049b68cb388f7adade35b6a508d2" + dependencies: + babel-runtime "^5.8.20" + js-yaml "^3.4.2" + json-schema-faker "^0.4.0" + lodash "^4.15.0" + media-typer "^0.3.0" + swagger-parser "^3.3.0" + yaml-js "^0.1.3" + +fury@^2.1.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/fury/-/fury-2.3.0.tgz#8601f9029d8f23bfe006576838df78e5c002f478" + dependencies: + babel-runtime "^5.5.6" + minim "^0.14.0" + minim-parse-result "^0.2.2" + +gather-stream@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/gather-stream/-/gather-stream-1.0.0.tgz#b33994af457a8115700d410f317733cbe7a0904b" + +gavel@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/gavel/-/gavel-1.1.0.tgz#832769e84c19416000963461db9336eed3291cfd" + dependencies: + amanda "^0.5.1" + async "^2.0.0-rc.6" + caseless "^0.11.0" + clone "^1.0.2" + commander "^2.9.0" + curl-trace-parser "0.0.8" + deep-equal "^1.0.1" + googlediff "^0.1.0" + http-string-parser "0.0.5" + is-type "0.0.1" + json-pointer "^0.5.0" + jsonlint josdejong/jsonlint + media-typer "^0.3.0" + tv4 "^1.2.7" + +generate-function@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/generate-function/-/generate-function-2.0.0.tgz#6858fe7c0969b7d4e9093337647ac79f60dfbe74" + +generate-object-property@^1.1.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/generate-object-property/-/generate-object-property-1.2.0.tgz#9c0e1c40308ce804f4783618b937fa88f99d50d0" + dependencies: + is-property "^1.0.0" + +get-caller-file@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.2.tgz#f702e63127e7e231c160a80c1554acb70d5047e5" + +get-set-props@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/get-set-props/-/get-set-props-0.1.0.tgz#998475c178445686d0b32246da5df8dbcfbe8ea3" + +get-stdin@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-4.0.1.tgz#b968c6b0a04384324902e8bf1a5df32579a450fe" + +get-stdin@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-5.0.1.tgz#122e161591e21ff4c52530305693f20e6393a398" + +get-stream@^2.2.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-2.3.1.tgz#5f38f93f346009666ee0150a054167f91bdd95de" + dependencies: + object-assign "^4.0.1" + pinkie-promise "^2.0.0" + +getobject@~0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/getobject/-/getobject-0.1.0.tgz#047a449789fa160d018f5486ed91320b6ec7885c" + +getpass@^0.1.1: + version "0.1.6" + resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.6.tgz#283ffd9fc1256840875311c1b60e8c40187110e6" + dependencies: + assert-plus "^1.0.0" + +glob-base@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/glob-base/-/glob-base-0.3.0.tgz#dbb164f6221b1c0b1ccf82aea328b497df0ea3c4" + dependencies: + glob-parent "^2.0.0" + is-glob "^2.0.0" + +glob-parent@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-2.0.0.tgz#81383d72db054fcccf5336daa902f182f6edbb28" + dependencies: + is-glob "^2.0.0" + +glob@^7.0.0, glob@^7.0.3, glob@^7.0.5: + version "7.1.1" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.1.tgz#805211df04faaf1c63a3600306cdf5ade50b2ec8" + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.2" + once "^1.3.0" + path-is-absolute "^1.0.0" + +glob@~3.1.21: + version "3.1.21" + resolved "https://registry.yarnpkg.com/glob/-/glob-3.1.21.tgz#d29e0a055dea5138f4d07ed40e8982e83c2066cd" + dependencies: + graceful-fs "~1.2.0" + inherits "1" + minimatch "~0.2.11" + +glob@~3.2.9: + version "3.2.11" + resolved "https://registry.yarnpkg.com/glob/-/glob-3.2.11.tgz#4a973f635b9190f715d10987d5c00fd2815ebe3d" + dependencies: + inherits "2" + minimatch "0.3" + +glob@~4.0.2: + version "4.0.6" + resolved "https://registry.yarnpkg.com/glob/-/glob-4.0.6.tgz#695c50bdd4e2fb5c5d370b091f388d3707e291a7" + dependencies: + graceful-fs "^3.0.2" + inherits "2" + minimatch "^1.0.0" + once "^1.3.0" + +globals@^9.14.0: + version "9.17.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-9.17.0.tgz#0c0ca696d9b9bb694d2e5470bd37777caad50286" + +globby@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-5.0.0.tgz#ebd84667ca0dbb330b99bcfc68eac2bc54370e0d" + dependencies: + array-union "^1.0.1" + arrify "^1.0.0" + glob "^7.0.3" + object-assign "^4.0.1" + pify "^2.0.0" + pinkie-promise "^2.0.0" + +globby@^6.0.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c" + dependencies: + array-union "^1.0.1" + glob "^7.0.3" + object-assign "^4.0.1" + pify "^2.0.0" + pinkie-promise "^2.0.0" + +globjoin@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/globjoin/-/globjoin-0.1.4.tgz#2f4494ac8919e3767c5cbb691e9f463324285d43" + +googlediff@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/googlediff/-/googlediff-0.1.0.tgz#99acf05cc06223eb66c29008d81f9b2d18c2453d" + +got@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/got/-/got-0.3.0.tgz#888ec66ca4bc735ab089dbe959496d0f79485493" + dependencies: + object-assign "^0.3.0" + +got@^3.2.0: + version "3.3.1" + resolved "https://registry.yarnpkg.com/got/-/got-3.3.1.tgz#e5d0ed4af55fc3eef4d56007769d98192bcb2eca" + dependencies: + duplexify "^3.2.0" + infinity-agent "^2.0.0" + is-redirect "^1.0.0" + is-stream "^1.0.0" + lowercase-keys "^1.0.0" + nested-error-stacks "^1.0.0" + object-assign "^3.0.0" + prepend-http "^1.0.0" + read-all-stream "^3.0.0" + timed-out "^2.0.0" + +got@^5.0.0: + version "5.7.1" + resolved "https://registry.yarnpkg.com/got/-/got-5.7.1.tgz#5f81635a61e4a6589f180569ea4e381680a51f35" + dependencies: + create-error-class "^3.0.1" + duplexer2 "^0.1.4" + is-redirect "^1.0.0" + is-retry-allowed "^1.0.0" + is-stream "^1.0.0" + lowercase-keys "^1.0.0" + node-status-codes "^1.0.0" + object-assign "^4.0.1" + parse-json "^2.1.0" + pinkie-promise "^2.0.0" + read-all-stream "^3.0.0" + readable-stream "^2.0.5" + timed-out "^3.0.0" + unzip-response "^1.0.2" + url-parse-lax "^1.0.0" + +graceful-fs@^3.0.1, graceful-fs@^3.0.2, graceful-fs@~3.0.0, graceful-fs@~3.0.1: + version "3.0.11" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-3.0.11.tgz#7613c778a1afea62f25c630a086d7f3acbbdd818" + dependencies: + natives "^1.1.0" + +graceful-fs@^4.1.11, graceful-fs@^4.1.2: + version "4.1.11" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.1.11.tgz#0e8bdfe4d1ddb8854d64e04ea7c00e2a026e5658" + +graceful-fs@~1.1: + version "1.1.14" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-1.1.14.tgz#07078db5f6377f6321fceaaedf497de124dc9465" + +graceful-fs@~1.2.0: + version "1.2.3" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-1.2.3.tgz#15a4806a57547cb2d2dbf27f42e89a8c3451b364" + +graceful-fs@~2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-2.0.3.tgz#7cd2cdb228a4a3f36e95efa6cc142de7d1a136d0" + +"graceful-readlink@>= 1.0.0": + version "1.0.1" + resolved "https://registry.yarnpkg.com/graceful-readlink/-/graceful-readlink-1.0.1.tgz#4cafad76bc62f02fa039b2f94e9a3dd3a391a725" + +grunt-bower-concat@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/grunt-bower-concat/-/grunt-bower-concat-1.0.0.tgz#f430c7b718704c6815215c6ca94d2fd5dd4a7b5b" + dependencies: + async "~1.5.2" + bower "~1.7.7" + detective "~4.3.1" + filesize "~3.2.1" + lodash "~4.3.0" + underscore.string "~3.2.3" + +grunt-bower-task@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/grunt-bower-task/-/grunt-bower-task-0.4.0.tgz#daea0a55682a79a8c79db895b79be6f3ecb65817" + dependencies: + async "~0.1.22" + bower "~1.3.0" + colors "~0.6.0-1" + lodash "~0.10.0" + rimraf "~2.0.2" + wrench "~1.4.3" + +grunt-cli@^0.1.13: + version "0.1.13" + resolved "https://registry.yarnpkg.com/grunt-cli/-/grunt-cli-0.1.13.tgz#e9ebc4047631f5012d922770c39378133cad10f4" + dependencies: + findup-sync "~0.1.0" + nopt "~1.0.10" + resolve "~0.3.1" + +grunt-contrib-clean@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/grunt-contrib-clean/-/grunt-contrib-clean-1.0.0.tgz#6b2ed94117e2c7ffe32ee04578c96fe4625a9b6d" + dependencies: + async "^1.5.2" + rimraf "^2.5.1" + +grunt-contrib-copy@^0.8.2: + version "0.8.2" + resolved "https://registry.yarnpkg.com/grunt-contrib-copy/-/grunt-contrib-copy-0.8.2.tgz#df31c90ffcc409bc9fafe44ec0dd1e4259916fea" + dependencies: + chalk "^1.1.1" + file-sync-cmp "^0.1.0" + +grunt-contrib-cssmin@^0.14.0: + version "0.14.0" + resolved "https://registry.yarnpkg.com/grunt-contrib-cssmin/-/grunt-contrib-cssmin-0.14.0.tgz#88b0a92536969bb566281c5c61ec5062d833f3b7" + dependencies: + chalk "^1.0.0" + clean-css "~3.4.2" + maxmin "^1.1.0" + +grunt-contrib-uglify@^0.11.1: + version "0.11.1" + resolved "https://registry.yarnpkg.com/grunt-contrib-uglify/-/grunt-contrib-uglify-0.11.1.tgz#5e22a2f676cd11d871fc2a0f08aa9b2973045325" + dependencies: + chalk "^1.0.0" + lodash "^4.0.1" + maxmin "^2.0.0" + uglify-js "~2.6.0" + uri-path "^1.0.0" + +grunt-legacy-log-utils@~0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/grunt-legacy-log-utils/-/grunt-legacy-log-utils-0.1.1.tgz#c0706b9dd9064e116f36f23fe4e6b048672c0f7e" + dependencies: + colors "~0.6.2" + lodash "~2.4.1" + underscore.string "~2.3.3" + +grunt-legacy-log@~0.1.0: + version "0.1.3" + resolved "https://registry.yarnpkg.com/grunt-legacy-log/-/grunt-legacy-log-0.1.3.tgz#ec29426e803021af59029f87d2f9cd7335a05531" + dependencies: + colors "~0.6.2" + grunt-legacy-log-utils "~0.1.1" + hooker "~0.2.3" + lodash "~2.4.1" + underscore.string "~2.3.3" + +grunt-legacy-util@~0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/grunt-legacy-util/-/grunt-legacy-util-0.2.0.tgz#93324884dbf7e37a9ff7c026dff451d94a9e554b" + dependencies: + async "~0.1.22" + exit "~0.1.1" + getobject "~0.1.0" + hooker "~0.2.3" + lodash "~0.9.2" + underscore.string "~2.2.1" + which "~1.0.5" + +grunt@^0.4.5: + version "0.4.5" + resolved "https://registry.yarnpkg.com/grunt/-/grunt-0.4.5.tgz#56937cd5194324adff6d207631832a9d6ba4e7f0" + dependencies: + async "~0.1.22" + coffee-script "~1.3.3" + colors "~0.6.2" + dateformat "1.0.2-1.2.3" + eventemitter2 "~0.4.13" + exit "~0.1.1" + findup-sync "~0.1.2" + getobject "~0.1.0" + glob "~3.1.21" + grunt-legacy-log "~0.1.0" + grunt-legacy-util "~0.2.0" + hooker "~0.2.3" + iconv-lite "~0.2.11" + js-yaml "~2.0.5" + lodash "~0.9.2" + minimatch "~0.2.12" + nopt "~1.0.10" + rimraf "~2.2.8" + underscore.string "~2.2.1" + which "~1.0.5" + +gzip-size@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/gzip-size/-/gzip-size-1.0.0.tgz#66cf8b101047227b95bace6ea1da0c177ed5c22f" + dependencies: + browserify-zlib "^0.1.4" + concat-stream "^1.4.1" + +gzip-size@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/gzip-size/-/gzip-size-3.0.0.tgz#546188e9bdc337f673772f81660464b389dce520" + dependencies: + duplexer "^0.1.1" + +handlebars@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-2.0.0.tgz#6e9d7f8514a3467fa5e9f82cc158ecfc1d5ac76f" + dependencies: + optimist "~0.3" + optionalDependencies: + uglify-js "~2.3" + +har-schema@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-1.0.5.tgz#d263135f43307c02c602afc8fe95970c0151369e" + +har-validator@~4.2.1: + version "4.2.1" + resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-4.2.1.tgz#33481d0f1bbff600dd203d75812a6a5fba002e2a" + dependencies: + ajv "^4.9.1" + har-schema "^1.0.5" + +has-ansi@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-0.1.0.tgz#84f265aae8c0e6a88a12d7022894b7568894c62e" + dependencies: + ansi-regex "^0.2.0" + +has-ansi@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" + dependencies: + ansi-regex "^2.0.0" + +has-color@~0.1.0: + version "0.1.7" + resolved "https://registry.yarnpkg.com/has-color/-/has-color-0.1.7.tgz#67144a5260c34fc3cca677d041daf52fe7b78b2f" + +has-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa" + +has-flag@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-2.0.0.tgz#e8207af1cc7b30d446cc70b734b5e8be18f88d51" + +has@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.1.tgz#8461733f538b0837c9361e39a9ab9e9704dc2f28" + dependencies: + function-bind "^1.0.2" + +hasbin@^1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/hasbin/-/hasbin-1.2.3.tgz#78c5926893c80215c2b568ae1fd3fcab7a2696b0" + dependencies: + async "~1.5" + +hawk@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/hawk/-/hawk-1.1.1.tgz#87cd491f9b46e4e2aeaca335416766885d2d1ed9" + dependencies: + boom "0.4.x" + cryptiles "0.2.x" + hoek "0.9.x" + sntp "0.2.x" + +hawk@~3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/hawk/-/hawk-3.1.3.tgz#078444bd7c1640b0fe540d2c9b73d59678e8e1c4" + dependencies: + boom "2.x.x" + cryptiles "2.x.x" + hoek "2.x.x" + sntp "1.x.x" + +hoek@0.9.x: + version "0.9.1" + resolved "https://registry.yarnpkg.com/hoek/-/hoek-0.9.1.tgz#3d322462badf07716ea7eb85baf88079cddce505" + +hoek@2.x.x: + version "2.16.3" + resolved "https://registry.yarnpkg.com/hoek/-/hoek-2.16.3.tgz#20bb7403d3cea398e91dc4710a8ff1b8274a25ed" + +hooker@~0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/hooker/-/hooker-0.2.3.tgz#b834f723cc4a242aa65963459df6d984c5d3d959" + +hosted-git-info@^2.1.4: + version "2.2.0" + resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.2.0.tgz#7a0d097863d886c0fabbdcd37bf1758d8becf8a5" + +html-tags@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/html-tags/-/html-tags-1.1.1.tgz#869f43859f12d9bdc3892419e494a628aa1b204e" + +html@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/html/-/html-1.0.0.tgz#a544fa9ea5492bfb3a2cca8210a10be7b5af1f61" + dependencies: + concat-stream "^1.4.7" + +htmlencode@0.0.4: + version "0.0.4" + resolved "https://registry.yarnpkg.com/htmlencode/-/htmlencode-0.0.4.tgz#f7e2d6afbe18a87a78e63ba3308e753766740e3f" + +http-signature@~0.10.0: + version "0.10.1" + resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-0.10.1.tgz#4fbdac132559aa8323121e540779c0a012b27e66" + dependencies: + asn1 "0.1.11" + assert-plus "^0.1.5" + ctype "0.5.3" + +http-signature@~1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.1.1.tgz#df72e267066cd0ac67fb76adf8e134a8fbcf91bf" + dependencies: + assert-plus "^0.2.0" + jsprim "^1.2.2" + sshpk "^1.7.0" + +http-string-parser@0.0.4: + version "0.0.4" + resolved "https://registry.yarnpkg.com/http-string-parser/-/http-string-parser-0.0.4.tgz#6b2538e3520d42b349a0ac4b7234e0e39476c5b3" + +http-string-parser@0.0.5: + version "0.0.5" + resolved "https://registry.yarnpkg.com/http-string-parser/-/http-string-parser-0.0.5.tgz#8f2da0781fe0a6e480343f53d2ecf93af86461c8" + +iconv-lite@~0.2.11: + version "0.2.11" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.2.11.tgz#1ce60a3a57864a292d1321ff4609ca4bb965adc8" + +ignore@^3.2.0: + version "3.2.2" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.2.2.tgz#1c51e1ef53bab6ddc15db4d9ac4ec139eceb3410" + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + +indent-string@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-2.1.0.tgz#8e2d48348742121b4a8218b7a137e9a52049dc80" + dependencies: + repeating "^2.0.0" + +indexes-of@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607" + +infinity-agent@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/infinity-agent/-/infinity-agent-2.0.3.tgz#45e0e2ff7a9eb030b27d62b74b3744b7a7ac4216" + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-1.0.2.tgz#ca4309dadee6b54cc0b8d247e8d7c7a0975bdc9b" + +inherits@2, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.0, inherits@~2.0.1: + version "2.0.3" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" + +ini@1.x.x, ini@^1.2.0, ini@^1.3.4, ini@~1.3.0: + version "1.3.4" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.4.tgz#0537cb79daf59b59a1a517dff706c86ec039162e" + +inquirer@0.7.1: + version "0.7.1" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-0.7.1.tgz#b8acf140165bd581862ed1198fb6d26430091fac" + dependencies: + chalk "^0.5.0" + cli-color "~0.3.2" + figures "^1.3.2" + lodash "~2.4.1" + mute-stream "0.0.4" + readline2 "~0.1.0" + rx "^2.2.27" + through "~2.3.4" + +inquirer@1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-1.0.3.tgz#ebe3a0948571bcc46ccccbe2f9bcec251e984bd0" + dependencies: + ansi-escapes "^1.1.0" + chalk "^1.0.0" + cli-cursor "^1.0.1" + cli-width "^2.0.0" + figures "^1.3.5" + lodash "^4.3.0" + mute-stream "0.0.6" + pinkie-promise "^2.0.0" + run-async "^2.2.0" + rx "^4.1.0" + string-width "^1.0.1" + strip-ansi "^3.0.0" + through "^2.3.6" + +inquirer@^0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-0.12.0.tgz#1ef2bfd63504df0bc75785fff8c2c41df12f077e" + dependencies: + ansi-escapes "^1.1.0" + ansi-regex "^2.0.0" + chalk "^1.0.0" + cli-cursor "^1.0.1" + cli-width "^2.0.0" + figures "^1.3.5" + lodash "^4.3.0" + readline2 "^1.0.1" + run-async "^0.1.0" + rx-lite "^3.1.2" + string-width "^1.0.1" + strip-ansi "^3.0.0" + through "^2.3.6" + +inquirer@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-0.6.0.tgz#614d7bb3e48f9e6a8028e94a0c38f23ef29823d3" + dependencies: + chalk "^0.5.0" + cli-color "~0.3.2" + lodash "~2.4.1" + mute-stream "0.0.4" + readline2 "~0.1.0" + rx "^2.2.27" + through "~2.3.4" + +inquirer@^1.1.0: + version "1.2.3" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-1.2.3.tgz#4dec6f32f37ef7bb0b2ed3f1d1a5c3f545074918" + dependencies: + ansi-escapes "^1.1.0" + chalk "^1.0.0" + cli-cursor "^1.0.1" + cli-width "^2.0.0" + external-editor "^1.1.0" + figures "^1.3.5" + lodash "^4.3.0" + mute-stream "0.0.6" + pinkie-promise "^2.0.0" + run-async "^2.2.0" + rx "^4.1.0" + string-width "^1.0.1" + strip-ansi "^3.0.0" + through "^2.3.6" + +insight@0.4.3: + version "0.4.3" + resolved "https://registry.yarnpkg.com/insight/-/insight-0.4.3.tgz#76d653c5c0d8048b03cdba6385a6948f74614af0" + dependencies: + async "^0.9.0" + chalk "^0.5.1" + configstore "^0.3.1" + inquirer "^0.6.0" + lodash.debounce "^2.4.1" + object-assign "^1.0.0" + os-name "^1.0.0" + request "^2.40.0" + tough-cookie "^0.12.1" + +interpret@^1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.0.3.tgz#cbc35c62eeee73f19ab7b10a801511401afc0f90" + +intersect@~0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/intersect/-/intersect-0.0.3.tgz#c1a4a5e5eac6ede4af7504cc07e0ada7bc9f4920" + +invert-kv@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-1.0.0.tgz#104a8e4aaca6d3d8cd157a8ef8bfab2d7a3ffdb6" + +irregular-plurals@^1.0.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/irregular-plurals/-/irregular-plurals-1.2.0.tgz#38f299834ba8c00c30be9c554e137269752ff3ac" + +is-arrayish@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + +is-buffer@^1.0.2: + version "1.1.4" + resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.4.tgz#cfc86ccd5dc5a52fa80489111c6920c457e2d98b" + +is-builtin-module@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-builtin-module/-/is-builtin-module-1.0.0.tgz#540572d34f7ac3119f8f76c30cbc1b1e037affbe" + dependencies: + builtin-modules "^1.0.0" + +is-dotfile@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-dotfile/-/is-dotfile-1.0.2.tgz#2c132383f39199f8edc268ca01b9b007d205cc4d" + +is-equal-shallow@^0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/is-equal-shallow/-/is-equal-shallow-0.1.3.tgz#2238098fc221de0bcfa5d9eac4c45d638aa1c534" + dependencies: + is-primitive "^2.0.0" + +is-error@^2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/is-error/-/is-error-2.2.1.tgz#684a96d84076577c98f4cdb40c6d26a5123bf19c" + +is-extendable@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" + +is-extglob@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-1.0.0.tgz#ac468177c4943405a092fc8f29760c6ffc6206c0" + +is-finite@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.0.2.tgz#cc6677695602be550ef11e8b4aa6305342b6d0aa" + dependencies: + number-is-nan "^1.0.0" + +is-fullwidth-code-point@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" + dependencies: + number-is-nan "^1.0.0" + +is-fullwidth-code-point@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" + +is-get-set-prop@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-get-set-prop/-/is-get-set-prop-1.0.0.tgz#2731877e4d78a6a69edcce6bb9d68b0779e76312" + dependencies: + get-set-props "^0.1.0" + lowercase-keys "^1.0.0" + +is-glob@^2.0.0, is-glob@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-2.0.1.tgz#d096f926a3ded5600f3fdfd91198cb0888c2d863" + dependencies: + is-extglob "^1.0.0" + +is-js-type@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-js-type/-/is-js-type-2.0.0.tgz#73617006d659b4eb4729bba747d28782df0f7e22" + dependencies: + js-types "^1.0.0" + +is-my-json-valid@^2.10.0: + version "2.15.0" + resolved "https://registry.yarnpkg.com/is-my-json-valid/-/is-my-json-valid-2.15.0.tgz#936edda3ca3c211fd98f3b2d3e08da43f7b2915b" + dependencies: + generate-function "^2.0.0" + generate-object-property "^1.1.0" + jsonpointer "^4.0.0" + xtend "^4.0.0" + +is-npm@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-1.0.0.tgz#f2fb63a65e4905b406c86072765a1a4dc793b9f4" + +is-number@^2.0.2, is-number@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-2.1.0.tgz#01fcbbb393463a548f2f466cce16dece49db908f" + dependencies: + kind-of "^3.0.2" + +is-obj-prop@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-obj-prop/-/is-obj-prop-1.0.0.tgz#b34de79c450b8d7c73ab2cdf67dc875adb85f80e" + dependencies: + lowercase-keys "^1.0.0" + obj-props "^1.0.0" + +is-obj@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" + +is-object@~1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-object/-/is-object-1.0.1.tgz#8952688c5ec2ffd6b03ecc85e769e02903083470" + +is-path-cwd@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-1.0.0.tgz#d225ec23132e89edd38fda767472e62e65f1106d" + +is-path-in-cwd@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-1.0.0.tgz#6477582b8214d602346094567003be8a9eac04dc" + dependencies: + is-path-inside "^1.0.0" + +is-path-inside@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-1.0.0.tgz#fc06e5a1683fbda13de667aff717bbc10a48f37f" + dependencies: + path-is-inside "^1.0.1" + +is-plain-obj@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" + +is-posix-bracket@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/is-posix-bracket/-/is-posix-bracket-0.1.1.tgz#3334dc79774368e92f016e6fbc0a88f5cd6e6bc4" + +is-primitive@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-primitive/-/is-primitive-2.0.0.tgz#207bab91638499c07b2adf240a41a87210034575" + +is-promise@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.1.0.tgz#79a2a9ece7f096e80f36d2b2f3bc16c1ff4bf3fa" + +is-property@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-property/-/is-property-1.0.2.tgz#57fe1c4e48474edd65b09911f26b1cd4095dda84" + +is-proto-prop@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-proto-prop/-/is-proto-prop-1.0.0.tgz#b3951f95c089924fb5d4fcda6542ab3e83e2b220" + dependencies: + lowercase-keys "^1.0.0" + proto-props "^0.2.0" + +is-redirect@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-redirect/-/is-redirect-1.0.0.tgz#1d03dded53bd8db0f30c26e4f95d36fc7c87dc24" + +is-regexp@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-regexp/-/is-regexp-1.0.0.tgz#fd2d883545c46bac5a633e7b9a09e87fa2cb5069" + +is-resolvable@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-resolvable/-/is-resolvable-1.0.0.tgz#8df57c61ea2e3c501408d100fb013cf8d6e0cc62" + dependencies: + tryit "^1.0.1" + +is-retry-allowed@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-1.1.0.tgz#11a060568b67339444033d0125a61a20d564fb34" + +is-root@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-root/-/is-root-1.0.0.tgz#07b6c233bc394cd9d02ba15c966bd6660d6342d5" + +is-stream@^1.0.0, is-stream@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" + +is-supported-regexp-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-supported-regexp-flag/-/is-supported-regexp-flag-1.0.0.tgz#8b520c85fae7a253382d4b02652e045576e13bb8" + +is-type@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/is-type/-/is-type-0.0.1.tgz#f651d85c365d44955d14a51d8d7061f3f6b4779c" + dependencies: + core-util-is "~1.0.0" + +is-typedarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + +is-utf8@^0.2.0: + version "0.2.1" + resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" + +isarray@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" + +isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + +isobject@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" + dependencies: + isarray "1.0.0" + +isstream@0.1.x, isstream@~0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" + +jodid25519@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/jodid25519/-/jodid25519-1.0.2.tgz#06d4912255093419477d425633606e0e90782967" + dependencies: + jsbn "~0.1.0" + +js-base64@^2.1.9: + version "2.1.9" + resolved "https://registry.yarnpkg.com/js-base64/-/js-base64-2.1.9.tgz#f0e80ae039a4bd654b5f281fc93f04a914a7fcce" + +js-tokens@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.1.tgz#08e9f132484a2c45a30907e9dc4d5567b7f114d7" + +js-types@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/js-types/-/js-types-1.0.0.tgz#d242e6494ed572ad3c92809fc8bed7f7687cbf03" + +js-yaml@^3.1.0, js-yaml@^3.4.2, js-yaml@^3.4.3, js-yaml@^3.4.6, js-yaml@^3.5.1, js-yaml@^3.5.3, js-yaml@^3.8.3: + version "3.8.3" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.8.3.tgz#33a05ec481c850c8875929166fe1beb61c728766" + dependencies: + argparse "^1.0.7" + esprima "^3.1.1" + +js-yaml@~2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-2.0.5.tgz#a25ae6509999e97df278c6719da11bd0687743a8" + dependencies: + argparse "~ 0.1.11" + esprima "~ 1.0.2" + +jsbn@~0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" + +json-pointer@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/json-pointer/-/json-pointer-0.5.0.tgz#e0cdd1e561d50608fbc7de810923beb4df11ba0c" + dependencies: + foreach "^2.0.4" + +json-schema-faker@^0.4.0: + version "0.4.1" + resolved "https://registry.yarnpkg.com/json-schema-faker/-/json-schema-faker-0.4.1.tgz#3cfb15ab1355b88c05b9f4cc98555295dead3cf9" + dependencies: + chance "^1.0.4" + deref "^0.6.4" + faker "3.0.1" + randexp "^0.4.3" + +json-schema-ref-parser@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/json-schema-ref-parser/-/json-schema-ref-parser-1.4.1.tgz#c0c2e438bf0796723b02451bae8bc7dd0b37fed0" + dependencies: + call-me-maybe "^1.0.1" + debug "^2.2.0" + es6-promise "^3.0.2" + js-yaml "^3.4.6" + ono "^2.0.1" + +json-schema@0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" + +json-stable-stringify@^1.0.0, json-stable-stringify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz#9a759d39c5f2ff503fd5300646ed445f88c4f9af" + dependencies: + jsonify "~0.0.0" + +json-stringify-safe@~5.0.0, json-stringify-safe@~5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" + +jsonfilter@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/jsonfilter/-/jsonfilter-1.1.2.tgz#21ef7cedc75193813c75932e96a98be205ba5a11" + dependencies: + JSONStream "^0.8.4" + minimist "^1.1.0" + stream-combiner "^0.2.1" + through2 "^0.6.3" + +jsonify@~0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.0.tgz#2c74b6ee41d93ca51b7b5aaee8f503631d252a73" + +jsonlint@josdejong/jsonlint: + version "1.6.2" + resolved "https://codeload.github.com/josdejong/jsonlint/tar.gz/85a19d77126771f3177582e3d09c6ffae185d391" + dependencies: + JSV ">= 4.0.x" + nomnom ">= 1.5.x" + +jsonparse@0.0.5: + version "0.0.5" + resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-0.0.5.tgz#330542ad3f0a654665b778f3eb2d9a9fa507ac64" + +jsonpointer@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/jsonpointer/-/jsonpointer-4.0.1.tgz#4fd92cb34e0e9db3c89c8622ecf51f9b978c6cb9" + +jsprim@^1.2.2: + version "1.3.1" + resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.3.1.tgz#2a7256f70412a29ee3670aaca625994c4dcff252" + dependencies: + extsprintf "1.0.2" + json-schema "0.2.3" + verror "1.3.6" + +junk@~1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/junk/-/junk-1.0.3.tgz#87be63488649cbdca6f53ab39bec9ccd2347f592" + +keypress@0.1.x: + version "0.1.0" + resolved "https://registry.yarnpkg.com/keypress/-/keypress-0.1.0.tgz#4a3188d4291b66b4f65edb99f806aa9ae293592a" + +kind-of@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.1.0.tgz#475d698a5e49ff5e53d14e3e732429dc8bf4cf47" + dependencies: + is-buffer "^1.0.2" + +known-css-properties@^0.0.7: + version "0.0.7" + resolved "https://registry.yarnpkg.com/known-css-properties/-/known-css-properties-0.0.7.tgz#9104343a2adfd8ef3b07bdee7a325e4d44ed9371" + +latest-version@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-0.2.0.tgz#adaf898d5f22380d3f9c45386efdff0a1b5b7501" + dependencies: + package-json "^0.2.0" + +latest-version@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-1.0.1.tgz#72cfc46e3e8d1be651e1ebb54ea9f6ea96f374bb" + dependencies: + package-json "^1.0.0" + +latest-version@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-2.0.0.tgz#56f8d6139620847b8017f8f1f4d78e211324168b" + dependencies: + package-json "^2.0.0" + +lazy-cache@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-1.0.4.tgz#a1d78fc3a50474cb80845d3b3b6e1da49a446e8e" + +lazy-req@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/lazy-req/-/lazy-req-1.1.0.tgz#bdaebead30f8d824039ce0ce149d4daa07ba1fac" + +lcid@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/lcid/-/lcid-1.0.0.tgz#308accafa0bc483a3867b4b6f2b9506251d1b835" + dependencies: + invert-kv "^1.0.0" + +ldjson-stream@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/ldjson-stream/-/ldjson-stream-1.2.1.tgz#91beceda5ac4ed2b17e649fb777e7abfa0189c2b" + dependencies: + split2 "^0.2.1" + through2 "^0.6.1" + +levn@^0.3.0, levn@~0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" + dependencies: + prelude-ls "~1.1.2" + type-check "~0.3.2" + +linkify-it@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/linkify-it/-/linkify-it-2.0.3.tgz#d94a4648f9b1c179d64fa97291268bdb6ce9434f" + dependencies: + uc.micro "^1.0.1" + +load-grunt-tasks@^3.3.0: + version "3.5.2" + resolved "https://registry.yarnpkg.com/load-grunt-tasks/-/load-grunt-tasks-3.5.2.tgz#0728561180fd20ff8a6927505852fc58aaea0c88" + dependencies: + arrify "^1.0.0" + multimatch "^2.0.0" + pkg-up "^1.0.0" + resolve-pkg "^0.1.0" + +load-json-file@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" + dependencies: + graceful-fs "^4.1.2" + parse-json "^2.2.0" + pify "^2.0.0" + pinkie-promise "^2.0.0" + strip-bom "^2.0.0" + +load-json-file@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-2.0.0.tgz#7947e42149af80d696cbf797bcaabcfe1fe29ca8" + dependencies: + graceful-fs "^4.1.2" + parse-json "^2.2.0" + pify "^2.0.0" + strip-bom "^3.0.0" + +locate-path@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" + dependencies: + p-locate "^2.0.0" + path-exists "^3.0.0" + +lockfile@~1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/lockfile/-/lockfile-1.0.3.tgz#2638fc39a0331e9cac1a04b71799931c9c50df79" + +lodash._isnative@~2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/lodash._isnative/-/lodash._isnative-2.4.1.tgz#3ea6404b784a7be836c7b57580e1cdf79b14832c" + +lodash._objecttypes@~2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/lodash._objecttypes/-/lodash._objecttypes-2.4.1.tgz#7c0b7f69d98a1f76529f890b0cdb1b4dfec11c11" + +lodash.assign@^4.0.3, lodash.assign@^4.0.6: + version "4.2.0" + resolved "https://registry.yarnpkg.com/lodash.assign/-/lodash.assign-4.2.0.tgz#0d99f3ccd7a6d261d19bdaeb9245005d285808e7" + +lodash.camelcase@^4.1.1: + version "4.3.0" + resolved "https://registry.yarnpkg.com/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz#b28aa6288a2b9fc651035c7711f65ab6190331a6" + +lodash.clonedeep@^4.3.0, lodash.clonedeep@^4.3.1: + version "4.5.0" + resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef" + +lodash.cond@^4.3.0: + version "4.5.2" + resolved "https://registry.yarnpkg.com/lodash.cond/-/lodash.cond-4.5.2.tgz#f471a1da486be60f6ab955d17115523dd1d255d5" + +lodash.debounce@^2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-2.4.1.tgz#d8cead246ec4b926e8b85678fc396bfeba8cc6fc" + dependencies: + lodash.isfunction "~2.4.1" + lodash.isobject "~2.4.1" + lodash.now "~2.4.1" + +lodash.defaults@^4.0.1: + version "4.2.0" + resolved "https://registry.yarnpkg.com/lodash.defaults/-/lodash.defaults-4.2.0.tgz#d09178716ffea4dde9e5fb7b37f6f0802274580c" + +lodash.defaultsdeep@^4.3.1: + version "4.6.0" + resolved "https://registry.yarnpkg.com/lodash.defaultsdeep/-/lodash.defaultsdeep-4.6.0.tgz#bec1024f85b1bd96cbea405b23c14ad6443a6f81" + +lodash.get@^4.1.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99" + +lodash.isequal@^4.4.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/lodash.isequal/-/lodash.isequal-4.5.0.tgz#415c4478f2bcc30120c22ce10ed3226f7d3e18e0" + +lodash.isfunction@~2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/lodash.isfunction/-/lodash.isfunction-2.4.1.tgz#2cfd575c73e498ab57e319b77fa02adef13a94d1" + +lodash.isobject@~2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/lodash.isobject/-/lodash.isobject-2.4.1.tgz#5a2e47fe69953f1ee631a7eba1fe64d2d06558f5" + dependencies: + lodash._objecttypes "~2.4.1" + +lodash.kebabcase@^4.0.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz#8489b1cb0d29ff88195cceca448ff6d6cc295c36" + +lodash.mergewith@^4.3.1: + version "4.6.0" + resolved "https://registry.yarnpkg.com/lodash.mergewith/-/lodash.mergewith-4.6.0.tgz#150cf0a16791f5903b8891eab154609274bdea55" + +lodash.now@~2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/lodash.now/-/lodash.now-2.4.1.tgz#6872156500525185faf96785bb7fe7fe15b562c6" + dependencies: + lodash._isnative "~2.4.1" + +lodash.snakecase@^4.0.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/lodash.snakecase/-/lodash.snakecase-4.1.1.tgz#39d714a35357147837aefd64b5dcbb16becd8f8d" + +lodash.upperfirst@^4.2.0: + version "4.3.1" + resolved "https://registry.yarnpkg.com/lodash.upperfirst/-/lodash.upperfirst-4.3.1.tgz#1365edf431480481ef0d1c68957a5ed99d49f7ce" + +lodash@^4.0.0, lodash@^4.0.1, lodash@^4.1.0, lodash@^4.13.1, lodash@^4.14.0, lodash@^4.14.2, lodash@^4.15.0, lodash@^4.17.4, lodash@^4.3.0: + version "4.17.4" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.4.tgz#78203a4d1c328ae1d86dca6460e369b57f4055ae" + +lodash@~0.10.0: + version "0.10.0" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-0.10.0.tgz#5254bbc2c46c827f535a27d631fd4f2bff374ce7" + +lodash@~0.9.2: + version "0.9.2" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-0.9.2.tgz#8f3499c5245d346d682e5b0d3b40767e09f1a92c" + +lodash@~2.4.1: + version "2.4.2" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-2.4.2.tgz#fadd834b9683073da179b3eae6d9c0d15053f73e" + +lodash@~4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.3.0.tgz#efd9c4a6ec53f3b05412429915c3e4824e4d25a4" + +log-symbols@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-1.0.2.tgz#376ff7b58ea3086a0f09facc74617eca501e1a18" + dependencies: + chalk "^1.0.0" + +longest@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/longest/-/longest-1.0.1.tgz#30a0b2da38f73770e8294a0d22e6625ed77d0097" + +loud-rejection@^1.0.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/loud-rejection/-/loud-rejection-1.6.0.tgz#5b46f80147edee578870f086d04821cf998e551f" + dependencies: + currently-unhandled "^0.4.1" + signal-exit "^3.0.0" + +lowercase-keys@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.0.tgz#4e3366b39e7f5457e35f1324bdf6f88d0bfc7306" + +lru-cache@2, lru-cache@~2.5.0: + version "2.5.2" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-2.5.2.tgz#1fddad938aae1263ce138680be1b3f591c0ab41c" + +lru-cache@^4.0.0, lru-cache@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.0.2.tgz#1d17679c069cda5d040991a09dbc2c0db377e55e" + dependencies: + pseudomap "^1.0.1" + yallist "^2.0.0" + +lru-cache@~2.3.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-2.3.1.tgz#b3adf6b3d856e954e2c390e6cef22081245a53d6" + +lru-queue@0.1: + version "0.1.0" + resolved "https://registry.yarnpkg.com/lru-queue/-/lru-queue-0.1.0.tgz#2738bd9f0d3cf4f84490c5736c48699ac632cda3" + dependencies: + es5-ext "~0.10.2" + +map-obj@^1.0.0, map-obj@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" + +markdown-it@^8.3.1: + version "8.3.1" + resolved "https://registry.yarnpkg.com/markdown-it/-/markdown-it-8.3.1.tgz#2f4b622948ccdc193d66f3ca2d43125ac4ac7323" + dependencies: + argparse "^1.0.7" + entities "~1.1.1" + linkify-it "^2.0.0" + mdurl "^1.0.1" + uc.micro "^1.0.3" + +maxmin@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/maxmin/-/maxmin-1.1.0.tgz#71365e84a99dd8f8b3f7d5fde2f00d1e7f73be61" + dependencies: + chalk "^1.0.0" + figures "^1.0.1" + gzip-size "^1.0.0" + pretty-bytes "^1.0.0" + +maxmin@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/maxmin/-/maxmin-2.1.0.tgz#4d3b220903d95eee7eb7ac7fa864e72dc09a3166" + dependencies: + chalk "^1.0.0" + figures "^1.0.1" + gzip-size "^3.0.0" + pretty-bytes "^3.0.0" + +mdurl@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/mdurl/-/mdurl-1.0.1.tgz#fe85b2ec75a59037f2adfec100fd6c601761152e" + +media-typer@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" + +memoizee@~0.3.8: + version "0.3.10" + resolved "https://registry.yarnpkg.com/memoizee/-/memoizee-0.3.10.tgz#4eca0d8aed39ec9d017f4c5c2f2f6432f42e5c8f" + dependencies: + d "~0.1.1" + es5-ext "~0.10.11" + es6-weak-map "~0.1.4" + event-emitter "~0.3.4" + lru-queue "0.1" + next-tick "~0.2.2" + timers-ext "0.1" + +meow@^3.1.0, meow@^3.3.0, meow@^3.4.2: + version "3.7.0" + resolved "https://registry.yarnpkg.com/meow/-/meow-3.7.0.tgz#72cb668b425228290abbfa856892587308a801fb" + dependencies: + camelcase-keys "^2.0.0" + decamelize "^1.1.2" + loud-rejection "^1.0.0" + map-obj "^1.0.1" + minimist "^1.1.3" + normalize-package-data "^2.3.4" + object-assign "^4.0.1" + read-pkg-up "^1.0.1" + redent "^1.0.0" + trim-newlines "^1.0.0" + +merge-descriptors@~1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" + +micromatch@^2.3.11: + version "2.3.11" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-2.3.11.tgz#86677c97d1720b363431d04d0d15293bd38c1565" + dependencies: + arr-diff "^2.0.0" + array-unique "^0.2.1" + braces "^1.8.2" + expand-brackets "^0.1.4" + extglob "^0.3.1" + filename-regex "^2.0.0" + is-extglob "^1.0.0" + is-glob "^2.0.1" + kind-of "^3.0.2" + normalize-path "^2.0.1" + object.omit "^2.0.0" + parse-glob "^3.0.4" + regex-cache "^0.4.2" + +mime-db@~1.12.0: + version "1.12.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.12.0.tgz#3d0c63180f458eb10d325aaa37d7c58ae312e9d7" + +mime-db@~1.26.0: + version "1.26.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.26.0.tgz#eaffcd0e4fc6935cf8134da246e2e6c35305adff" + +mime-types@^2.1.12, mime-types@~2.1.7: + version "2.1.14" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.14.tgz#f7ef7d97583fcaf3b7d282b6f8b5679dab1e94ee" + dependencies: + mime-db "~1.26.0" + +mime-types@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-1.0.2.tgz#995ae1392ab8affcbfcb2641dd054e943c0d5dce" + +mime-types@~2.0.3: + version "2.0.14" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.0.14.tgz#310e159db23e077f8bb22b748dabfa4957140aa6" + dependencies: + mime-db "~1.12.0" + +mime@~1.2.11: + version "1.2.11" + resolved "https://registry.yarnpkg.com/mime/-/mime-1.2.11.tgz#58203eed86e3a5ef17aed2b7d9ebd47f0a60dd10" + +minim-api-description@^0.1.4: + version "0.1.8" + resolved "https://registry.yarnpkg.com/minim-api-description/-/minim-api-description-0.1.8.tgz#e360ff1cdedb76019e097769de7c89d20bbbca18" + dependencies: + babel-runtime "^5.8.20" + +minim-parse-result@^0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/minim-parse-result/-/minim-parse-result-0.2.2.tgz#61ec3929aea02f73835c102c0055eed50f9b1249" + dependencies: + babel-runtime "^5.8.20" + minim-api-description "^0.1.4" + +minim@^0.14.0: + version "0.14.2" + resolved "https://registry.yarnpkg.com/minim/-/minim-0.14.2.tgz#4ce30b0d9db2522f71bd9c361bd3424738127718" + dependencies: + lodash "^4.15.0" + uptown "^0.4.1" + +minimatch@0.3: + version "0.3.0" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-0.3.0.tgz#275d8edaac4f1bb3326472089e7949c8394699dd" + dependencies: + lru-cache "2" + sigmund "~1.0.0" + +minimatch@3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.2.tgz#0f398a7300ea441e9c348c83d98ab8c9dbf9c40a" + dependencies: + brace-expansion "^1.0.0" + +minimatch@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-1.0.0.tgz#e0dd2120b49e1b724ce8d714c520822a9438576d" + dependencies: + lru-cache "2" + sigmund "~1.0.0" + +minimatch@^3.0.0, minimatch@^3.0.2, minimatch@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.3.tgz#2a4e4090b96b2db06a9d7df01055a62a77c9b774" + dependencies: + brace-expansion "^1.0.0" + +minimatch@~0.2.11, minimatch@~0.2.12: + version "0.2.14" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-0.2.14.tgz#c74e780574f63c6f9a090e90efbe6ef53a6a756a" + dependencies: + lru-cache "2" + sigmund "~1.0.0" + +minimist@0.0.8, minimist@~0.0.1: + version "0.0.8" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" + +minimist@^1.1.0, minimist@^1.1.3, minimist@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" + +mkdirp@0.5.0, "mkdirp@>=0.5 0", mkdirp@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.0.tgz#1d73076a6df986cd9344e15e71fcc05a4c9abf12" + dependencies: + minimist "0.0.8" + +mkdirp@^0.5.1: + version "0.5.1" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" + dependencies: + minimist "0.0.8" + +mkdirp@~0.3.5: + version "0.3.5" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.3.5.tgz#de3e5f8961c88c787ee1368df849ac4413eca8d7" + +mkpath@~0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/mkpath/-/mkpath-0.1.0.tgz#7554a6f8d871834cc97b5462b122c4c124d6de91" + +module-not-found-error@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/module-not-found-error/-/module-not-found-error-1.0.1.tgz#cf8b4ff4f29640674d6cdd02b0e3bc523c2bbdc0" + +mout@~0.9.0: + version "0.9.1" + resolved "https://registry.yarnpkg.com/mout/-/mout-0.9.1.tgz#84f0f3fd6acc7317f63de2affdcc0cee009b0477" + +ms@0.7.1: + version "0.7.1" + resolved "https://registry.yarnpkg.com/ms/-/ms-0.7.1.tgz#9cd13c03adbff25b65effde7ce864ee952017098" + +ms@0.7.2: + version "0.7.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-0.7.2.tgz#ae25cf2512b3885a1d95d7f037868d8431124765" + +multimatch@^2.0.0, multimatch@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/multimatch/-/multimatch-2.1.0.tgz#9c7906a22fb4c02919e2f5f75161b4cdbd4b2a2b" + dependencies: + array-differ "^1.0.0" + array-union "^1.0.1" + arrify "^1.0.0" + minimatch "^3.0.0" + +mute-stream@0.0.4: + version "0.0.4" + resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.4.tgz#a9219960a6d5d5d046597aee51252c6655f7177e" + +mute-stream@0.0.5: + version "0.0.5" + resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.5.tgz#8fbfabb0a98a253d3184331f9e8deb7372fac6c0" + +mute-stream@0.0.6, mute-stream@~0.0.4: + version "0.0.6" + resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.6.tgz#48962b19e169fd1dfc240b3f1e7317627bbc47db" + +nan@~2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/nan/-/nan-2.2.1.tgz#d68693f6b34bb41d66bc68b3a4f9defc79d7149b" + +natives@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/natives/-/natives-1.1.0.tgz#e9ff841418a6b2ec7a495e939984f78f163e6e31" + +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + +nconf@^0.7.2: + version "0.7.2" + resolved "https://registry.yarnpkg.com/nconf/-/nconf-0.7.2.tgz#a05fdf22dc01c378dd5c4df27f2dc90b9aa8bb00" + dependencies: + async "~0.9.0" + ini "1.x.x" + yargs "~3.15.0" + +nested-error-stacks@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/nested-error-stacks/-/nested-error-stacks-1.0.2.tgz#19f619591519f096769a5ba9a86e6eeec823c3cf" + dependencies: + inherits "~2.0.1" + +next-tick@~0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-0.2.2.tgz#75da4a927ee5887e39065880065b7336413b310d" + +node-status-codes@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/node-status-codes/-/node-status-codes-1.0.0.tgz#5ae5541d024645d32a58fcddc9ceecea7ae3ac2f" + +node-uuid@~1.4.0: + version "1.4.7" + resolved "https://registry.yarnpkg.com/node-uuid/-/node-uuid-1.4.7.tgz#6da5a17668c4b3dd59623bda11cf7fa4c1f60a6f" + +"nomnom@>= 1.5.x": + version "1.8.1" + resolved "https://registry.yarnpkg.com/nomnom/-/nomnom-1.8.1.tgz#2151f722472ba79e50a76fc125bb8c8f2e4dc2a7" + dependencies: + chalk "~0.4.0" + underscore "~1.6.0" + +nopt@~1.0.10: + version "1.0.10" + resolved "https://registry.yarnpkg.com/nopt/-/nopt-1.0.10.tgz#6ddd21bd2a31417b92727dd585f8a6f37608ebee" + dependencies: + abbrev "1" + +nopt@~2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/nopt/-/nopt-2.2.1.tgz#2aa09b7d1768487b3b89a9c5aa52335bff0baea7" + dependencies: + abbrev "1" + +nopt@~3.0.0, nopt@~3.0.1: + version "3.0.6" + resolved "https://registry.yarnpkg.com/nopt/-/nopt-3.0.6.tgz#c6465dbf08abcd4db359317f79ac68a646b28ff9" + dependencies: + abbrev "1" + +normalize-package-data@^2.3.2, normalize-package-data@^2.3.4: + version "2.3.5" + resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.3.5.tgz#8d924f142960e1777e7ffe170543631cc7cb02df" + dependencies: + hosted-git-info "^2.1.4" + is-builtin-module "^1.0.0" + semver "2 || 3 || 4 || 5" + validate-npm-package-license "^3.0.1" + +normalize-path@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.0.1.tgz#47886ac1662760d4261b7d979d241709d3ce3f7a" + +normalize-range@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" + +normalize-selector@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/normalize-selector/-/normalize-selector-0.2.0.tgz#d0b145eb691189c63a78d201dc4fdb1293ef0c03" + +npm-run-path@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" + dependencies: + path-key "^2.0.0" + +npmconf@^2.0.1: + version "2.1.2" + resolved "https://registry.yarnpkg.com/npmconf/-/npmconf-2.1.2.tgz#66606a4a736f1e77a059aa071a79c94ab781853a" + dependencies: + config-chain "~1.1.8" + inherits "~2.0.0" + ini "^1.2.0" + mkdirp "^0.5.0" + nopt "~3.0.1" + once "~1.3.0" + osenv "^0.1.0" + semver "2 || 3 || 4" + uid-number "0.0.5" + +num2fraction@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/num2fraction/-/num2fraction-1.2.2.tgz#6f682b6a027a4e9ddfa4564cd2589d1d4e669ede" + +number-is-nan@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" + +oauth-sign@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.4.0.tgz#f22956f31ea7151a821e5f2fb32c113cad8b9f69" + +oauth-sign@~0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.5.0.tgz#d767f5169325620eab2e087ef0c472e773db6461" + +oauth-sign@~0.8.1: + version "0.8.2" + resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.8.2.tgz#46a6ab7f0aead8deae9ec0565780b7d4efeb9d43" + +obj-props@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/obj-props/-/obj-props-1.1.0.tgz#626313faa442befd4a44e9a02c3cb6bde937b511" + +object-assign@^0.3.0: + version "0.3.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-0.3.1.tgz#060e2a2a27d7c0d77ec77b78f11aa47fd88008d2" + +object-assign@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-1.0.0.tgz#e65dc8766d3b47b4b8307465c8311da030b070a6" + +object-assign@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-2.1.1.tgz#43c36e5d569ff8e4816c4efa8be02d26967c18aa" + +object-assign@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-3.0.0.tgz#9bedd5ca0897949bca47e7ff408062d549f587f2" + +object-assign@^4.0.1, object-assign@^4.1.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + +object.omit@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/object.omit/-/object.omit-2.0.1.tgz#1a9c744829f39dbb858c76ca3579ae2a54ebd1fa" + dependencies: + for-own "^0.1.4" + is-extendable "^0.1.1" + +once@^1.3.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + dependencies: + wrappy "1" + +once@~1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.2.0.tgz#de1905c636af874a8fba862d9aabddd1f920461c" + +once@~1.3.0: + version "1.3.3" + resolved "https://registry.yarnpkg.com/once/-/once-1.3.3.tgz#b2e261557ce4c314ec8304f3fa82663e4297ca20" + dependencies: + wrappy "1" + +onecolor@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/onecolor/-/onecolor-3.0.4.tgz#75a46f80da6c7aaa5b4daae17a47198bd9652494" + +onetime@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-1.1.0.tgz#a1f7838f8314c516f05ecefcbc4ccfe04b4ed789" + +ono@^2.0.1: + version "2.2.4" + resolved "https://registry.yarnpkg.com/ono/-/ono-2.2.4.tgz#f6c1d9ea64da07a54863986535da3de67e502696" + +open@^0.0.5: + version "0.0.5" + resolved "https://registry.yarnpkg.com/open/-/open-0.0.5.tgz#42c3e18ec95466b6bf0dc42f3a2945c3f0cad8fc" + +opn@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/opn/-/opn-1.0.2.tgz#b909643346d00a1abc977a8b96f3ce3c53d5cf5f" + +optimist@^0.6.1, optimist@~0.6.0: + version "0.6.1" + resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686" + dependencies: + minimist "~0.0.1" + wordwrap "~0.0.2" + +optimist@~0.3, optimist@~0.3.5: + version "0.3.7" + resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.3.7.tgz#c90941ad59e4273328923074d2cf2e7cbc6ec0d9" + dependencies: + wordwrap "~0.0.2" + +optionator@^0.8.2: + version "0.8.2" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.2.tgz#364c5e409d3f4d6301d6c0b4c05bba50180aeb64" + dependencies: + deep-is "~0.1.3" + fast-levenshtein "~2.0.4" + levn "~0.3.0" + prelude-ls "~1.1.2" + type-check "~0.3.2" + wordwrap "~1.0.0" + +os-homedir@^1.0.0, os-homedir@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" + +os-locale@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-1.4.0.tgz#20f9f17ae29ed345e8bde583b13d2009803c14d9" + dependencies: + lcid "^1.0.0" + +os-name@^1.0.0, os-name@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/os-name/-/os-name-1.0.3.tgz#1b379f64835af7c5a7f498b357cb95215c159edf" + dependencies: + osx-release "^1.0.0" + win-release "^1.0.0" + +os-shim@^0.1.2: + version "0.1.3" + resolved "https://registry.yarnpkg.com/os-shim/-/os-shim-0.1.3.tgz#6b62c3791cf7909ea35ed46e17658bb417cb3917" + +os-tmpdir@^1.0.0, os-tmpdir@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" + +osenv@0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.0.3.tgz#cd6ad8ddb290915ad9e22765576025d411f29cb6" + +osenv@0.1.0, osenv@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.0.tgz#61668121eec584955030b9f470b1d2309504bfcb" + +osx-release@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/osx-release/-/osx-release-1.1.0.tgz#f217911a28136949af1bf9308b241e2737d3cd6c" + dependencies: + minimist "^1.1.0" + +p-finally@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" + +p-limit@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.1.0.tgz#b07ff2d9a5d88bec806035895a2bab66a27988bc" + +p-locate@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" + dependencies: + p-limit "^1.1.0" + +p-throttler@0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/p-throttler/-/p-throttler-0.1.0.tgz#1b16907942c333e6f1ddeabcb3479204b8c417c4" + dependencies: + q "~0.9.2" + +package-json@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/package-json/-/package-json-0.2.0.tgz#0316e177b8eb149985d34f706b4a5543b274bec5" + dependencies: + got "^0.3.0" + registry-url "^0.1.0" + +package-json@^1.0.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/package-json/-/package-json-1.2.0.tgz#c8ecac094227cdf76a316874ed05e27cc939a0e0" + dependencies: + got "^3.2.0" + registry-url "^3.0.0" + +package-json@^2.0.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/package-json/-/package-json-2.4.0.tgz#0d15bd67d1cbbddbb2ca222ff2edb86bcb31a8bb" + dependencies: + got "^5.0.0" + registry-auth-token "^3.0.1" + registry-url "^3.0.3" + semver "^5.1.0" + +pako@~0.2.0: + version "0.2.9" + resolved "https://registry.yarnpkg.com/pako/-/pako-0.2.9.tgz#f3f7522f4ef782348da8161bad9ecfd51bf83a75" + +parse-gitignore@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/parse-gitignore/-/parse-gitignore-0.3.1.tgz#09adda265a4a5be2ce5e905b95a02f7f0e0044fa" + dependencies: + array-unique "^0.2.1" + is-glob "^2.0.1" + +parse-glob@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/parse-glob/-/parse-glob-3.0.4.tgz#b2c376cfb11f35513badd173ef0bb6e3a388391c" + dependencies: + glob-base "^0.3.0" + is-dotfile "^1.0.0" + is-extglob "^1.0.0" + is-glob "^2.0.0" + +parse-json@^2.1.0, parse-json@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" + dependencies: + error-ex "^1.2.0" + +path-exists@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" + dependencies: + pinkie-promise "^2.0.0" + +path-exists@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + +path-is-inside@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" + +path-key@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" + +path-type@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" + dependencies: + graceful-fs "^4.1.2" + pify "^2.0.0" + pinkie-promise "^2.0.0" + +path-type@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-2.0.0.tgz#f012ccb8415b7096fc2daa1054c3d72389594c73" + dependencies: + pify "^2.0.0" + +pct-encode@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/pct-encode/-/pct-encode-1.0.2.tgz#b99b7b044d6bd7c39e4839a7a80122ad7515caa5" + +performance-now@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-0.2.0.tgz#33ef30c5c77d4ea21c5a53869d91b56d8f2555e5" + +pidusage@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/pidusage/-/pidusage-0.1.1.tgz#7c8a2900538931ebbe1a62a9308e77b37b36a530" + +pify@^2.0.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" + +pinkie-promise@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" + dependencies: + pinkie "^2.0.0" + +pinkie@^2.0.0: + version "2.0.4" + resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" + +pipetteur@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/pipetteur/-/pipetteur-2.0.3.tgz#1955760959e8d1a11cb2a50ec83eec470633e49f" + dependencies: + onecolor "^3.0.4" + synesthesia "^1.0.1" + +pitboss-ng@^0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/pitboss-ng/-/pitboss-ng-0.3.2.tgz#e730e97ac5d9a594ff03d3b736d3e58f795cd534" + dependencies: + clone "^1.0.2" + csv "^0.4.2" + pidusage "^0.1.1" + +pkg-conf@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/pkg-conf/-/pkg-conf-2.0.0.tgz#071c87650403bccfb9c627f58751bfe47c067279" + dependencies: + find-up "^2.0.0" + load-json-file "^2.0.0" + +pkg-dir@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-1.0.0.tgz#7a4b508a8d5bb2d629d447056ff4e9c9314cf3d4" + dependencies: + find-up "^1.0.0" + +pkg-up@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-1.0.0.tgz#3e08fb461525c4421624a33b9f7e6d0af5b05a26" + dependencies: + find-up "^1.0.0" + +plur@^2.0.0, plur@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/plur/-/plur-2.1.2.tgz#7482452c1a0f508e3e344eaec312c91c29dc655a" + dependencies: + irregular-plurals "^1.0.0" + +pluralize@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-1.2.1.tgz#d1a21483fd22bb41e58a12fa3421823140897c45" + +postcss-less@^0.14.0: + version "0.14.0" + resolved "https://registry.yarnpkg.com/postcss-less/-/postcss-less-0.14.0.tgz#c631b089c6cce422b9a10f3a958d2bedd3819324" + dependencies: + postcss "^5.0.21" + +postcss-media-query-parser@^0.2.0: + version "0.2.3" + resolved "https://registry.yarnpkg.com/postcss-media-query-parser/-/postcss-media-query-parser-0.2.3.tgz#27b39c6f4d94f81b1a73b8f76351c609e5cef244" + +postcss-reporter@^1.2.1, postcss-reporter@^1.3.3: + version "1.4.1" + resolved "https://registry.yarnpkg.com/postcss-reporter/-/postcss-reporter-1.4.1.tgz#c136f0a5b161915f379dd3765c61075f7e7b9af2" + dependencies: + chalk "^1.0.0" + lodash "^4.1.0" + log-symbols "^1.0.2" + postcss "^5.0.0" + +postcss-reporter@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/postcss-reporter/-/postcss-reporter-3.0.0.tgz#09ea0f37a444c5693878606e09b018ebeff7cf8f" + dependencies: + chalk "^1.0.0" + lodash "^4.1.0" + log-symbols "^1.0.2" + postcss "^5.0.0" + +postcss-resolve-nested-selector@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/postcss-resolve-nested-selector/-/postcss-resolve-nested-selector-0.1.1.tgz#29ccbc7c37dedfac304e9fff0bf1596b3f6a0e4e" + +postcss-scss@^0.4.0: + version "0.4.1" + resolved "https://registry.yarnpkg.com/postcss-scss/-/postcss-scss-0.4.1.tgz#ad771b81f0f72f5f4845d08aa60f93557653d54c" + dependencies: + postcss "^5.2.13" + +postcss-selector-parser@^2.0.0, postcss-selector-parser@^2.1.1: + version "2.2.2" + resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-2.2.2.tgz#3d70f5adda130da51c7c0c2fc023f56b1374fe08" + dependencies: + flatten "^1.0.2" + indexes-of "^1.0.1" + uniq "^1.0.1" + +postcss-value-parser@^3.1.1, postcss-value-parser@^3.2.3: + version "3.3.0" + resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.0.tgz#87f38f9f18f774a4ab4c8a232f5c5ce8872a9d15" + +postcss@^5.0.0, postcss@^5.0.18, postcss@^5.0.20, postcss@^5.0.21, postcss@^5.0.4, postcss@^5.0.8, postcss@^5.2.13, postcss@^5.2.4: + version "5.2.13" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-5.2.13.tgz#1be52a32cf2ef58c0d75f1aedb3beabcf257cef3" + dependencies: + chalk "^1.1.3" + js-base64 "^2.1.9" + source-map "^0.5.6" + supports-color "^3.2.3" + +prelude-ls@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" + +prepend-http@^1.0.0, prepend-http@^1.0.1: + version "1.0.4" + resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-1.0.4.tgz#d4f4562b0ce3696e41ac52d0e002e57a635dc6dc" + +preserve@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/preserve/-/preserve-0.2.0.tgz#815ed1f6ebc65926f865b310c0713bcb3315ce4b" + +pretty-bytes@^1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/pretty-bytes/-/pretty-bytes-1.0.4.tgz#0a22e8210609ad35542f8c8d5d2159aff0751c84" + dependencies: + get-stdin "^4.0.1" + meow "^3.1.0" + +pretty-bytes@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/pretty-bytes/-/pretty-bytes-3.0.1.tgz#27d0008d778063a0b4811bb35c79f1bd5d5fbccf" + dependencies: + number-is-nan "^1.0.0" + +process-nextick-args@~1.0.6: + version "1.0.7" + resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-1.0.7.tgz#150e20b756590ad3f91093f25a4f2ad8bff30ba3" + +progress@^1.1.8: + version "1.1.8" + resolved "https://registry.yarnpkg.com/progress/-/progress-1.1.8.tgz#e260c78f6161cdd9b0e56cc3e0a85de17c7a57be" + +"promise@>=3.2 <8": + version "7.1.1" + resolved "https://registry.yarnpkg.com/promise/-/promise-7.1.1.tgz#489654c692616b8aa55b0724fa809bb7db49c5bf" + dependencies: + asap "~2.0.3" + +promptly@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/promptly/-/promptly-0.2.0.tgz#73ef200fa8329d5d3a8df41798950b8646ca46d9" + dependencies: + read "~1.0.4" + +protagonist@^1.6.0: + version "1.6.7" + resolved "https://registry.yarnpkg.com/protagonist/-/protagonist-1.6.7.tgz#142717f6ee7de8235f0ec07d4313bae81add0b04" + dependencies: + nan "~2.2.1" + +proto-list@~1.2.1: + version "1.2.4" + resolved "https://registry.yarnpkg.com/proto-list/-/proto-list-1.2.4.tgz#212d5bfe1318306a420f6402b8e26ff39647a849" + +proto-props@^0.2.0: + version "0.2.1" + resolved "https://registry.yarnpkg.com/proto-props/-/proto-props-0.2.1.tgz#5e01dc2675a0de9abfa76e799dfa334d6f483f4b" + +proxyquire@^1.7.10: + version "1.7.11" + resolved "https://registry.yarnpkg.com/proxyquire/-/proxyquire-1.7.11.tgz#13b494eb1e71fb21cc3ebe3699e637d3bec1af9e" + dependencies: + fill-keys "^1.0.2" + module-not-found-error "^1.0.0" + resolve "~1.1.7" + +pseudomap@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" + +pump@^0.3.5: + version "0.3.5" + resolved "https://registry.yarnpkg.com/pump/-/pump-0.3.5.tgz#ae5ff8c1f93ed87adc6530a97565b126f585454b" + dependencies: + end-of-stream "~1.0.0" + once "~1.2.0" + +punycode@1.3.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" + +punycode@>=0.2.0, punycode@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" + +q@~0.9.2: + version "0.9.7" + resolved "https://registry.yarnpkg.com/q/-/q-0.9.7.tgz#4de2e6cb3b29088c9e4cbc03bf9d42fb96ce2f75" + +q@~1.0.0, q@~1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/q/-/q-1.0.1.tgz#11872aeedee89268110b10a718448ffb10112a14" + +qs@~1.2.0: + version "1.2.2" + resolved "https://registry.yarnpkg.com/qs/-/qs-1.2.2.tgz#19b57ff24dc2a99ce1f8bdf6afcda59f8ef61f88" + +qs@~2.3.1: + version "2.3.3" + resolved "https://registry.yarnpkg.com/qs/-/qs-2.3.3.tgz#e9e85adbe75da0bbe4c8e0476a086290f863b404" + +qs@~6.4.0: + version "6.4.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.4.0.tgz#13e26d28ad6b0ffaa91312cd3bf708ed351e7233" + +querystring@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" + +randexp@^0.4.3: + version "0.4.5" + resolved "https://registry.yarnpkg.com/randexp/-/randexp-0.4.5.tgz#ffe3a80c3f666cd71e6b008e477e584c1a32ff3e" + dependencies: + discontinuous-range "1.0.0" + ret "~0.1.10" + +randomatic@^1.1.3: + version "1.1.6" + resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-1.1.6.tgz#110dcabff397e9dcff7c0789ccc0a49adf1ec5bb" + dependencies: + is-number "^2.0.2" + kind-of "^3.0.2" + +rc@^1.0.1, rc@^1.1.6: + version "1.1.6" + resolved "https://registry.yarnpkg.com/rc/-/rc-1.1.6.tgz#43651b76b6ae53b5c802f1151fa3fc3b059969c9" + dependencies: + deep-extend "~0.4.0" + ini "~1.3.0" + minimist "^1.2.0" + strip-json-comments "~1.0.4" + +read-all-stream@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/read-all-stream/-/read-all-stream-3.1.0.tgz#35c3e177f2078ef789ee4bfafa4373074eaef4fa" + dependencies: + pinkie-promise "^2.0.0" + readable-stream "^2.0.0" + +read-file-stdin@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/read-file-stdin/-/read-file-stdin-0.2.1.tgz#25eccff3a153b6809afacb23ee15387db9e0ee61" + dependencies: + gather-stream "^1.0.0" + +read-pkg-up@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" + dependencies: + find-up "^1.0.0" + read-pkg "^1.0.0" + +read-pkg-up@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-2.0.0.tgz#6b72a8048984e0c41e79510fd5e9fa99b3b549be" + dependencies: + find-up "^2.0.0" + read-pkg "^2.0.0" + +read-pkg@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" + dependencies: + load-json-file "^1.0.0" + normalize-package-data "^2.3.2" + path-type "^1.0.0" + +read-pkg@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-2.0.0.tgz#8ef1c0623c6a6db0dc6713c4bfac46332b2368f8" + dependencies: + load-json-file "^2.0.0" + normalize-package-data "^2.3.2" + path-type "^2.0.0" + +read@~1.0.4: + version "1.0.7" + resolved "https://registry.yarnpkg.com/read/-/read-1.0.7.tgz#b3da19bd052431a97671d44a42634adf710b40c4" + dependencies: + mute-stream "~0.0.4" + +"readable-stream@>=1.0.33-1 <1.1.0-0", readable-stream@~1.0.26: + version "1.0.34" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.0.34.tgz#125820e34bc842d2f2aaafafe4c2916ee32c157c" + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.1" + isarray "0.0.1" + string_decoder "~0.10.x" + +readable-stream@^1.0.27-1, readable-stream@^1.0.33, readable-stream@~1.1.8, readable-stream@~1.1.9: + version "1.1.14" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.1.14.tgz#7cf4c54ef648e3813084c636dd2079e166c081d9" + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.1" + isarray "0.0.1" + string_decoder "~0.10.x" + +readable-stream@^2.0.0, readable-stream@^2.0.2, readable-stream@^2.0.5, readable-stream@^2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.2.2.tgz#a9e6fec3c7dda85f8bb1b3ba7028604556fc825e" + dependencies: + buffer-shims "^1.0.0" + core-util-is "~1.0.0" + inherits "~2.0.1" + isarray "~1.0.0" + process-nextick-args "~1.0.6" + string_decoder "~0.10.x" + util-deprecate "~1.0.1" + +readline2@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/readline2/-/readline2-1.0.1.tgz#41059608ffc154757b715d9989d199ffbf372e35" + dependencies: + code-point-at "^1.0.0" + is-fullwidth-code-point "^1.0.0" + mute-stream "0.0.5" + +readline2@~0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/readline2/-/readline2-0.1.1.tgz#99443ba6e83b830ef3051bfd7dc241a82728d568" + dependencies: + mute-stream "0.0.4" + strip-ansi "^2.0.1" + +rechoir@^0.6.2: + version "0.6.2" + resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.6.2.tgz#85204b54dba82d5742e28c96756ef43af50e3384" + dependencies: + resolve "^1.1.6" + +redent@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/redent/-/redent-1.0.0.tgz#cf916ab1fd5f1f16dfb20822dd6ec7f730c2afde" + dependencies: + indent-string "^2.1.0" + strip-indent "^1.0.1" + +redeyed@~0.4.0: + version "0.4.4" + resolved "https://registry.yarnpkg.com/redeyed/-/redeyed-0.4.4.tgz#37e990a6f2b21b2a11c2e6a48fd4135698cba97f" + dependencies: + esprima "~1.0.4" + +regex-cache@^0.4.2: + version "0.4.3" + resolved "https://registry.yarnpkg.com/regex-cache/-/regex-cache-0.4.3.tgz#9b1a6c35d4d0dfcef5711ae651e8e9d3d7114145" + dependencies: + is-equal-shallow "^0.1.3" + is-primitive "^2.0.0" + +registry-auth-token@^3.0.1: + version "3.1.0" + resolved "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-3.1.0.tgz#997c08256e0c7999837b90e944db39d8a790276b" + dependencies: + rc "^1.1.6" + +registry-url@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/registry-url/-/registry-url-0.1.1.tgz#1739427b81b110b302482a1c7cd727ffcc82d5be" + dependencies: + npmconf "^2.0.1" + +registry-url@^3.0.0, registry-url@^3.0.3: + version "3.1.0" + resolved "https://registry.yarnpkg.com/registry-url/-/registry-url-3.1.0.tgz#3d4ef870f73dde1d77f0cf9a381432444e174942" + dependencies: + rc "^1.0.1" + +repeat-element@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.2.tgz#ef089a178d1483baae4d93eb98b4f9e4e11d990a" + +repeat-string@^1.5.2: + version "1.6.1" + resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" + +repeating@^1.1.2: + version "1.1.3" + resolved "https://registry.yarnpkg.com/repeating/-/repeating-1.1.3.tgz#3d4114218877537494f97f77f9785fab810fa4ac" + dependencies: + is-finite "^1.0.0" + +repeating@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/repeating/-/repeating-2.0.1.tgz#5214c53a926d3552707527fbab415dbc08d06dda" + dependencies: + is-finite "^1.0.0" + +req-all@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/req-all/-/req-all-0.1.0.tgz#130051e2ace58a02eacbfc9d448577a736a9273a" + +request-progress@0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/request-progress/-/request-progress-0.3.0.tgz#bdf2062bfc197c5d492500d44cb3aff7865b492e" + dependencies: + throttleit "~0.0.2" + +request-replay@~0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/request-replay/-/request-replay-0.2.0.tgz#9b693a5d118b39f5c596ead5ed91a26444057f60" + dependencies: + retry "~0.6.0" + +request@^2.40.0, request@^2.74.0, request@^2.81.0: + version "2.81.0" + resolved "https://registry.yarnpkg.com/request/-/request-2.81.0.tgz#c6928946a0e06c5f8d6f8a9333469ffda46298a0" + dependencies: + aws-sign2 "~0.6.0" + aws4 "^1.2.1" + caseless "~0.12.0" + combined-stream "~1.0.5" + extend "~3.0.0" + forever-agent "~0.6.1" + form-data "~2.1.1" + har-validator "~4.2.1" + hawk "~3.1.3" + http-signature "~1.1.0" + is-typedarray "~1.0.0" + isstream "~0.1.2" + json-stringify-safe "~5.0.1" + mime-types "~2.1.7" + oauth-sign "~0.8.1" + performance-now "^0.2.0" + qs "~6.4.0" + safe-buffer "^5.0.1" + stringstream "~0.0.4" + tough-cookie "~2.3.0" + tunnel-agent "^0.6.0" + uuid "^3.0.0" + +request@~2.42.0: + version "2.42.0" + resolved "https://registry.yarnpkg.com/request/-/request-2.42.0.tgz#572bd0148938564040ac7ab148b96423a063304a" + dependencies: + bl "~0.9.0" + caseless "~0.6.0" + forever-agent "~0.5.0" + json-stringify-safe "~5.0.0" + mime-types "~1.0.1" + node-uuid "~1.4.0" + qs "~1.2.0" + tunnel-agent "~0.4.0" + optionalDependencies: + aws-sign2 "~0.5.0" + form-data "~0.1.0" + hawk "1.1.1" + http-signature "~0.10.0" + oauth-sign "~0.4.0" + stringstream "~0.0.4" + tough-cookie ">=0.12.0" + +request@~2.51.0: + version "2.51.0" + resolved "https://registry.yarnpkg.com/request/-/request-2.51.0.tgz#35d00bbecc012e55f907b1bd9e0dbd577bfef26e" + dependencies: + aws-sign2 "~0.5.0" + bl "~0.9.0" + caseless "~0.8.0" + combined-stream "~0.0.5" + forever-agent "~0.5.0" + form-data "~0.2.0" + hawk "1.1.1" + http-signature "~0.10.0" + json-stringify-safe "~5.0.0" + mime-types "~1.0.1" + node-uuid "~1.4.0" + oauth-sign "~0.5.0" + qs "~2.3.1" + stringstream "~0.0.4" + tough-cookie ">=0.12.0" + tunnel-agent "~0.4.0" + +require-directory@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + +require-from-string@^1.1.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-1.2.1.tgz#529c9ccef27380adfec9a2f965b649bbee636418" + +require-main-filename@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" + +require-uncached@^1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/require-uncached/-/require-uncached-1.0.3.tgz#4e0d56d6c9662fd31e43011c4b95aa49955421d3" + dependencies: + caller-path "^0.1.0" + resolve-from "^1.0.0" + +resolve-cwd@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-1.0.0.tgz#4eaeea41ed040d1702457df64a42b2b07d246f9f" + dependencies: + resolve-from "^2.0.0" + +resolve-from@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-1.0.1.tgz#26cbfe935d1aeeeabb29bc3fe5aeb01e93d44226" + +resolve-from@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-2.0.0.tgz#9480ab20e94ffa1d9e80a804c7ea147611966b57" + +resolve-pkg@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/resolve-pkg/-/resolve-pkg-0.1.0.tgz#02cc993410e2936962bd97166a1b077da9725531" + dependencies: + resolve-from "^2.0.0" + +resolve@^1.1.6, resolve@~1.1.7: + version "1.1.7" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" + +resolve@~0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-0.3.1.tgz#34c63447c664c70598d1c9b126fc43b2a24310a4" + +restore-cursor@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-1.0.1.tgz#34661f46886327fed2991479152252df92daa541" + dependencies: + exit-hook "^1.0.0" + onetime "^1.0.0" + +ret@~0.1.10: + version "0.1.14" + resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.14.tgz#58c636837b12e161f8a380cf081c6a230fd1664e" + +retry@0.6.0, retry@~0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/retry/-/retry-0.6.0.tgz#1c010713279a6fd1e8def28af0c3ff1871caa537" + +right-align@^0.1.1: + version "0.1.3" + resolved "https://registry.yarnpkg.com/right-align/-/right-align-0.1.3.tgz#61339b722fe6a3515689210d24e14c96148613ef" + dependencies: + align-text "^0.1.1" + +rimraf@2, rimraf@^2.2.8, rimraf@^2.5.1: + version "2.5.4" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.5.4.tgz#96800093cbf1a0c86bd95b4625467535c29dfa04" + dependencies: + glob "^7.0.5" + +rimraf@~2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.0.3.tgz#f50a2965e7144e9afd998982f15df706730f56a9" + optionalDependencies: + graceful-fs "~1.1" + +rimraf@~2.2.0, rimraf@~2.2.8: + version "2.2.8" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.2.8.tgz#e439be2aaee327321952730f99a8929e4fc50582" + +run-async@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/run-async/-/run-async-0.1.0.tgz#c8ad4a5e110661e402a7d21b530e009f25f8e389" + dependencies: + once "^1.3.0" + +run-async@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/run-async/-/run-async-2.3.0.tgz#0371ab4ae0bdd720d4166d7dfda64ff7a445a6c0" + dependencies: + is-promise "^2.1.0" + +rx-lite@^3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/rx-lite/-/rx-lite-3.1.2.tgz#19ce502ca572665f3b647b10939f97fd1615f102" + +rx@^2.2.27: + version "2.5.3" + resolved "https://registry.yarnpkg.com/rx/-/rx-2.5.3.tgz#21adc7d80f02002af50dae97fd9dbf248755f566" + +rx@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/rx/-/rx-4.1.0.tgz#a5f13ff79ef3b740fe30aa803fb09f98805d4782" + +safe-buffer@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.0.1.tgz#d263ca54696cd8a306b5ca6551e92de57918fbe7" + +semver-diff@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-0.1.0.tgz#4f6057ca3eba23cc484b51f64aaf88b131a3855d" + dependencies: + semver "^2.2.1" + +semver-diff@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-2.1.0.tgz#4bbb8437c8d37e4b0cf1a68fd726ec6d645d6d36" + dependencies: + semver "^5.0.3" + +"semver@2 || 3 || 4", semver@^2.2.1, semver@~2.3.0: + version "2.3.2" + resolved "https://registry.yarnpkg.com/semver/-/semver-2.3.2.tgz#b9848f25d6cf36333073ec9ef8856d42f1233e52" + +"semver@2 || 3 || 4 || 5", semver@^5.0.1, semver@^5.0.3, semver@^5.1.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.3.0.tgz#9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f" + +set-blocking@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" + +shebang-command@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" + dependencies: + shebang-regex "^1.0.0" + +shebang-regex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" + +shell-quote@~1.4.1: + version "1.4.3" + resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.4.3.tgz#952c44e0b1ed9013ef53958179cc643e8777466b" + dependencies: + array-filter "~0.0.0" + array-map "~0.0.0" + array-reduce "~0.0.0" + jsonify "~0.0.0" + +shelljs@^0.7.5: + version "0.7.7" + resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.7.7.tgz#b2f5c77ef97148f4b4f6e22682e10bba8667cff1" + dependencies: + glob "^7.0.0" + interpret "^1.0.0" + rechoir "^0.6.2" + +sift@^3.2.1: + version "3.3.2" + resolved "https://registry.yarnpkg.com/sift/-/sift-3.3.2.tgz#e781e35166867683ddc28f6f8a7591989d92d7ee" + +sigmund@~1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/sigmund/-/sigmund-1.0.1.tgz#3ff21f198cad2175f9f3b781853fd94d0d19b590" + +signal-exit@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" + +slice-ansi@0.0.4: + version "0.0.4" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-0.0.4.tgz#edbf8903f66f7ce2f8eafd6ceed65e264c831b35" + +slide@^1.1.5: + version "1.1.6" + resolved "https://registry.yarnpkg.com/slide/-/slide-1.1.6.tgz#56eb027d65b4d2dce6cb2e2d32c4d4afc9e1d707" + +sntp@0.2.x: + version "0.2.4" + resolved "https://registry.yarnpkg.com/sntp/-/sntp-0.2.4.tgz#fb885f18b0f3aad189f824862536bceeec750900" + dependencies: + hoek "0.9.x" + +sntp@1.x.x: + version "1.0.9" + resolved "https://registry.yarnpkg.com/sntp/-/sntp-1.0.9.tgz#6541184cc90aeea6c6e7b35e2659082443c66198" + dependencies: + hoek "2.x.x" + +snyk-config@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/snyk-config/-/snyk-config-1.0.1.tgz#f27aec2498b24027ac719214026521591111508f" + dependencies: + debug "^2.2.0" + nconf "^0.7.2" + path-is-absolute "^1.0.0" + +snyk-module@1.7.0, snyk-module@^1.6.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/snyk-module/-/snyk-module-1.7.0.tgz#07c6ca8556d281de6f9e2368c04ecb6dd1f2631a" + dependencies: + debug "^2.2.0" + hosted-git-info "^2.1.4" + validate-npm-package-name "^2.2.2" + +snyk-policy@1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/snyk-policy/-/snyk-policy-1.7.0.tgz#2151c751ab1edc040fc6b94a872aa989db492324" + dependencies: + debug "^2.2.0" + es6-promise "^3.1.2" + js-yaml "^3.5.3" + lodash.clonedeep "^4.3.1" + semver "^5.1.0" + snyk-module "^1.6.0" + snyk-resolve "^1.0.0" + snyk-try-require "^1.1.1" + then-fs "^2.0.0" + +snyk-recursive-readdir@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/snyk-recursive-readdir/-/snyk-recursive-readdir-2.0.0.tgz#5cb59e94698169e0205a60e7d6a506d0b4d52ff3" + dependencies: + minimatch "3.0.2" + +snyk-resolve-deps@1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/snyk-resolve-deps/-/snyk-resolve-deps-1.7.0.tgz#13743a058437dff890baaf437c333c966a743cb6" + dependencies: + abbrev "^1.0.7" + ansicolors "^0.3.2" + clite "^0.3.0" + debug "^2.2.0" + es6-promise "^3.0.2" + lodash "^4.0.0" + lru-cache "^4.0.0" + minimist "^1.2.0" + semver "^5.1.0" + snyk-module "^1.6.0" + snyk-resolve "^1.0.0" + snyk-tree "^1.0.0" + snyk-try-require "^1.1.1" + then-fs "^2.0.0" + +snyk-resolve@1.0.0, snyk-resolve@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/snyk-resolve/-/snyk-resolve-1.0.0.tgz#bbe9196d37f57c39251e6be75ccdd5b2097e99a2" + dependencies: + debug "^2.2.0" + then-fs "^2.0.0" + +snyk-tree@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/snyk-tree/-/snyk-tree-1.0.0.tgz#0fb73176dbf32e782f19100294160448f9111cc8" + dependencies: + archy "^1.0.0" + +snyk-try-require@^1.1.1, snyk-try-require@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/snyk-try-require/-/snyk-try-require-1.2.0.tgz#30fc2b11c07064591ee35780c826be91312f2144" + dependencies: + debug "^2.2.0" + es6-promise "^3.1.2" + lodash.clonedeep "^4.3.0" + lru-cache "^4.0.0" + then-fs "^2.0.0" + +snyk@^1.9.1: + version "1.25.0" + resolved "https://registry.yarnpkg.com/snyk/-/snyk-1.25.0.tgz#47edab6b41cd78b27f666b2650d808c65d65f342" + dependencies: + abbrev "^1.0.7" + ansi-escapes "^1.3.0" + chalk "^1.1.1" + configstore "^1.2.0" + debug "^2.2.0" + es6-promise "^3.0.2" + hasbin "^1.2.3" + inquirer "1.0.3" + open "^0.0.5" + os-name "^1.0.3" + request "^2.74.0" + semver "^5.1.0" + snyk-config "1.0.1" + snyk-module "1.7.0" + snyk-policy "1.7.0" + snyk-recursive-readdir "^2.0.0" + snyk-resolve "1.0.0" + snyk-resolve-deps "1.7.0" + snyk-tree "^1.0.0" + snyk-try-require "^1.2.0" + tempfile "^1.1.1" + then-fs "^2.0.0" + undefsafe "0.0.3" + update-notifier "^0.5.0" + url "^0.11.0" + uuid "^3.0.1" + +sort-keys@^1.1.1, sort-keys@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-1.1.2.tgz#441b6d4d346798f1b4e49e8920adfba0e543f9ad" + dependencies: + is-plain-obj "^1.0.0" + +source-map@0.4.x, source-map@^0.4.2: + version "0.4.4" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.4.4.tgz#eba4f5da9c0dc999de68032d8b4f76173652036b" + dependencies: + amdefine ">=0.0.4" + +source-map@^0.5.6, source-map@~0.5.1: + version "0.5.6" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.6.tgz#75ce38f52bf0733c5a7f0c118d81334a2bb5f412" + +source-map@~0.1.7: + version "0.1.43" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.1.43.tgz#c24bc146ca517c1471f5dacbe2571b2b7f9e3346" + dependencies: + amdefine ">=0.0.4" + +spawn-args@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/spawn-args/-/spawn-args-0.2.0.tgz#fb7d0bd1d70fd4316bd9e3dec389e65f9d6361bb" + +spawn-sync@^1.0.15: + version "1.0.15" + resolved "https://registry.yarnpkg.com/spawn-sync/-/spawn-sync-1.0.15.tgz#b00799557eb7fb0c8376c29d44e8a1ea67e57476" + dependencies: + concat-stream "^1.4.7" + os-shim "^0.1.2" + +spdx-correct@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-1.0.2.tgz#4b3073d933ff51f3912f03ac5519498a4150db40" + dependencies: + spdx-license-ids "^1.0.2" + +spdx-expression-parse@~1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-1.0.4.tgz#9bdf2f20e1f40ed447fbe273266191fced51626c" + +spdx-license-ids@^1.0.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-1.2.2.tgz#c9df7a3424594ade6bd11900d596696dc06bac57" + +specificity@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/specificity/-/specificity-0.3.0.tgz#332472d4e5eb5af20821171933998a6bc3b1ce6f" + +split2@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/split2/-/split2-0.2.1.tgz#02ddac9adc03ec0bb78c1282ec079ca6e85ae900" + dependencies: + through2 "~0.6.1" + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + +sshpk@^1.7.0: + version "1.10.2" + resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.10.2.tgz#d5a804ce22695515638e798dbe23273de070a5fa" + dependencies: + asn1 "~0.2.3" + assert-plus "^1.0.0" + dashdash "^1.12.0" + getpass "^0.1.1" + optionalDependencies: + bcrypt-pbkdf "^1.0.0" + ecc-jsbn "~0.1.1" + jodid25519 "^1.0.0" + jsbn "~0.1.0" + tweetnacl "~0.14.0" + +stack-trace@0.0.x: + version "0.0.9" + resolved "https://registry.yarnpkg.com/stack-trace/-/stack-trace-0.0.9.tgz#a8f6eaeca90674c333e7c43953f275b451510695" + +stream-combiner@^0.2.1: + version "0.2.2" + resolved "https://registry.yarnpkg.com/stream-combiner/-/stream-combiner-0.2.2.tgz#aec8cbac177b56b6f4fa479ced8c1912cee52858" + dependencies: + duplexer "~0.1.1" + through "~2.3.4" + +stream-shift@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.0.tgz#d5c752825e5367e786f78e18e445ea223a155952" + +stream-transform@^0.1.0: + version "0.1.2" + resolved "https://registry.yarnpkg.com/stream-transform/-/stream-transform-0.1.2.tgz#7d8e6b4e03ac4781778f8c79517501bfb0762a9f" + +string-length@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/string-length/-/string-length-0.1.2.tgz#ab04bb33867ee74beed7fb89bb7f089d392780f2" + dependencies: + strip-ansi "^0.2.1" + +string-length@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/string-length/-/string-length-1.0.1.tgz#56970fb1c38558e9e70b728bf3de269ac45adfac" + dependencies: + strip-ansi "^3.0.0" + +string-width@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" + dependencies: + code-point-at "^1.0.0" + is-fullwidth-code-point "^1.0.0" + strip-ansi "^3.0.0" + +string-width@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.0.0.tgz#635c5436cc72a6e0c387ceca278d4e2eec52687e" + dependencies: + is-fullwidth-code-point "^2.0.0" + strip-ansi "^3.0.0" + +string_decoder@~0.10.x: + version "0.10.31" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" + +stringify-object@~1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/stringify-object/-/stringify-object-1.0.1.tgz#86d35e7dbfbce9aa45637d7ecdd7847e159db8a2" + +stringstream@~0.0.4: + version "0.0.5" + resolved "https://registry.yarnpkg.com/stringstream/-/stringstream-0.0.5.tgz#4e484cd4de5a0bbbee18e46307710a8a81621878" + +strip-ansi@^0.2.1: + version "0.2.2" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-0.2.2.tgz#854d290c981525fc8c397a910b025ae2d54ffc08" + dependencies: + ansi-regex "^0.1.0" + +strip-ansi@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-0.3.0.tgz#25f48ea22ca79187f3174a4db8759347bb126220" + dependencies: + ansi-regex "^0.2.1" + +strip-ansi@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-2.0.1.tgz#df62c1aa94ed2f114e1d0f21fd1d50482b79a60e" + dependencies: + ansi-regex "^1.0.0" + +strip-ansi@^3.0.0, strip-ansi@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" + dependencies: + ansi-regex "^2.0.0" + +strip-ansi@~0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-0.1.1.tgz#39e8a98d044d150660abe4a6808acf70bb7bc991" + +strip-bom@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" + dependencies: + is-utf8 "^0.2.0" + +strip-bom@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + +strip-eof@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" + +strip-indent@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-1.0.1.tgz#0c7962a6adefa7bbd4ac366460a638552ae1a0a2" + dependencies: + get-stdin "^4.0.1" + +strip-json-comments@~1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-1.0.4.tgz#1e15fbcac97d3ee99bf2d73b4c656b082bbafb91" + +strip-json-comments@~2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" + +style-search@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/style-search/-/style-search-0.1.0.tgz#7958c793e47e32e07d2b5cafe5c0bf8e12e77902" + +stylehacks@^2.3.2: + version "2.3.2" + resolved "https://registry.yarnpkg.com/stylehacks/-/stylehacks-2.3.2.tgz#64c83e0438a68c9edf449e8c552a7d9ab6009b0b" + dependencies: + browserslist "^1.1.3" + chalk "^1.1.1" + log-symbols "^1.0.2" + minimist "^1.2.0" + plur "^2.1.2" + postcss "^5.0.18" + postcss-reporter "^1.3.3" + postcss-selector-parser "^2.0.0" + read-file-stdin "^0.2.1" + text-table "^0.2.0" + write-file-stdout "0.0.2" + +stylelint-config-standard@^16.0.0: + version "16.0.0" + resolved "https://registry.yarnpkg.com/stylelint-config-standard/-/stylelint-config-standard-16.0.0.tgz#bb7387bff1d7dd7186a52b3ebf885b2405d691bf" + +stylelint@^7.9.0: + version "7.10.1" + resolved "https://registry.yarnpkg.com/stylelint/-/stylelint-7.10.1.tgz#209a7ce5e781fc2a62489fbb31ec0201ec675db2" + dependencies: + autoprefixer "^6.0.0" + balanced-match "^0.4.0" + chalk "^1.1.1" + colorguard "^1.2.0" + cosmiconfig "^2.1.1" + debug "^2.6.0" + doiuse "^2.4.1" + execall "^1.0.0" + file-entry-cache "^2.0.0" + get-stdin "^5.0.0" + globby "^6.0.0" + globjoin "^0.1.4" + html-tags "^1.1.1" + ignore "^3.2.0" + imurmurhash "^0.1.4" + known-css-properties "^0.0.7" + lodash "^4.17.4" + log-symbols "^1.0.2" + meow "^3.3.0" + micromatch "^2.3.11" + normalize-selector "^0.2.0" + postcss "^5.0.20" + postcss-less "^0.14.0" + postcss-media-query-parser "^0.2.0" + postcss-reporter "^3.0.0" + postcss-resolve-nested-selector "^0.1.1" + postcss-scss "^0.4.0" + postcss-selector-parser "^2.1.1" + postcss-value-parser "^3.1.1" + resolve-from "^2.0.0" + specificity "^0.3.0" + string-width "^2.0.0" + style-search "^0.1.0" + stylehacks "^2.3.2" + sugarss "^0.2.0" + svg-tags "^1.0.0" + table "^4.0.1" + +sugarss@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/sugarss/-/sugarss-0.2.0.tgz#ac34237563327c6ff897b64742bf6aec190ad39e" + dependencies: + postcss "^5.2.4" + +supports-color@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-0.2.0.tgz#d92de2694eb3f67323973d7ae3d8b55b4c22190a" + +supports-color@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" + +supports-color@^3.2.3: + version "3.2.3" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6" + dependencies: + has-flag "^1.0.0" + +svg-tags@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/svg-tags/-/svg-tags-1.0.0.tgz#58f71cee3bd519b59d4b2a843b6c7de64ac04764" + +swagger-methods@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/swagger-methods/-/swagger-methods-1.0.0.tgz#b39c77957d305a6535c0a1e015081185b99d61fc" + +swagger-parser@^3.3.0: + version "3.4.1" + resolved "https://registry.yarnpkg.com/swagger-parser/-/swagger-parser-3.4.1.tgz#0290529dbae254d178b442a95df60d23d142301d" + dependencies: + call-me-maybe "^1.0.1" + debug "^2.2.0" + es6-promise "^3.0.2" + json-schema-ref-parser "^1.4.1" + ono "^2.0.1" + swagger-methods "^1.0.0" + swagger-schema-official "2.0.0-bab6bed" + z-schema "^3.16.1" + +swagger-schema-official@2.0.0-bab6bed: + version "2.0.0-bab6bed" + resolved "https://registry.yarnpkg.com/swagger-schema-official/-/swagger-schema-official-2.0.0-bab6bed.tgz#70070468d6d2977ca5237b2e519ca7d06a2ea3fd" + +synesthesia@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/synesthesia/-/synesthesia-1.0.1.tgz#5ef95ea548c0d5c6e6f9bb4b0d0731dff864a777" + dependencies: + css-color-names "0.0.3" + +table@^3.7.8: + version "3.8.3" + resolved "https://registry.yarnpkg.com/table/-/table-3.8.3.tgz#2bbc542f0fda9861a755d3947fefd8b3f513855f" + dependencies: + ajv "^4.7.0" + ajv-keywords "^1.0.0" + chalk "^1.1.1" + lodash "^4.0.0" + slice-ansi "0.0.4" + string-width "^2.0.0" + +table@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/table/-/table-4.0.1.tgz#a8116c133fac2c61f4a420ab6cdf5c4d61f0e435" + dependencies: + ajv "^4.7.0" + ajv-keywords "^1.0.0" + chalk "^1.1.1" + lodash "^4.0.0" + slice-ansi "0.0.4" + string-width "^2.0.0" + +tar-fs@0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-0.5.2.tgz#0f59424be7eeee45232316e302f66d3f6ea6db3e" + dependencies: + mkdirp "^0.5.0" + pump "^0.3.5" + tar-stream "^0.4.6" + +tar-stream@^0.4.6: + version "0.4.7" + resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-0.4.7.tgz#1f1d2ce9ebc7b42765243ca0e8f1b7bfda0aadcd" + dependencies: + bl "^0.9.0" + end-of-stream "^1.0.0" + readable-stream "^1.0.27-1" + xtend "^4.0.0" + +tempfile@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/tempfile/-/tempfile-1.1.1.tgz#5bcc4eaecc4ab2c707d8bc11d99ccc9a2cb287f2" + dependencies: + os-tmpdir "^1.0.0" + uuid "^2.0.1" + +text-table@^0.2.0, text-table@~0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + +the-argv@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/the-argv/-/the-argv-1.0.0.tgz#0084705005730dd84db755253c931ae398db9522" + +then-fs@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/then-fs/-/then-fs-2.0.0.tgz#72f792dd9d31705a91ae19ebfcf8b3f968c81da2" + dependencies: + promise ">=3.2 <8" + +throttleit@~0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/throttleit/-/throttleit-0.0.2.tgz#cfedf88e60c00dd9697b61fdd2a8343a9b680eaf" + +through2@^0.6.1, through2@^0.6.3, through2@~0.6.1: + version "0.6.5" + resolved "https://registry.yarnpkg.com/through2/-/through2-0.6.5.tgz#41ab9c67b29d57209071410e1d7a7a968cd3ad48" + dependencies: + readable-stream ">=1.0.33-1 <1.1.0-0" + xtend ">=4.0.0 <4.1.0-0" + +"through@>=2.2.7 <3", through@^2.3.6, through@~2.3.4: + version "2.3.8" + resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" + +timed-out@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/timed-out/-/timed-out-2.0.0.tgz#f38b0ae81d3747d628001f41dafc652ace671c0a" + +timed-out@^3.0.0: + version "3.1.3" + resolved "https://registry.yarnpkg.com/timed-out/-/timed-out-3.1.3.tgz#95860bfcc5c76c277f8f8326fd0f5b2e20eba217" + +timers-ext@0.1: + version "0.1.0" + resolved "https://registry.yarnpkg.com/timers-ext/-/timers-ext-0.1.0.tgz#00345a2ca93089d1251322054389d263e27b77e2" + dependencies: + es5-ext "~0.10.2" + next-tick "~0.2.2" + +tmp@0.0.23: + version "0.0.23" + resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.23.tgz#de874aa5e974a85f0a32cdfdbd74663cb3bd9c74" + +tmp@^0.0.29: + version "0.0.29" + resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.29.tgz#f25125ff0dd9da3ccb0c2dd371ee1288bb9128c0" + dependencies: + os-tmpdir "~1.0.1" + +touch@0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/touch/-/touch-0.0.2.tgz#a65a777795e5cbbe1299499bdc42281ffb21b5f4" + dependencies: + nopt "~1.0.10" + +tough-cookie@>=0.12.0, tough-cookie@~2.3.0: + version "2.3.2" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.3.2.tgz#f081f76e4c85720e6c37a5faced737150d84072a" + dependencies: + punycode "^1.4.1" + +tough-cookie@^0.12.1: + version "0.12.1" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-0.12.1.tgz#8220c7e21abd5b13d96804254bd5a81ebf2c7d62" + dependencies: + punycode ">=0.2.0" + +"traverse@>=0.3.0 <0.4": + version "0.3.9" + resolved "https://registry.yarnpkg.com/traverse/-/traverse-0.3.9.tgz#717b8f220cc0bb7b44e40514c22b2e8bbc70d8b9" + +traverse@^0.6.6: + version "0.6.6" + resolved "https://registry.yarnpkg.com/traverse/-/traverse-0.6.6.tgz#cbdf560fd7b9af632502fed40f918c157ea97137" + +trim-newlines@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-1.0.0.tgz#5887966bb582a4503a41eb524f7d35011815a613" + +tryit@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/tryit/-/tryit-1.0.3.tgz#393be730a9446fd1ead6da59a014308f36c289cb" + +tunnel-agent@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" + dependencies: + safe-buffer "^5.0.1" + +tunnel-agent@~0.4.0: + version "0.4.3" + resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.4.3.tgz#6373db76909fe570e08d73583365ed828a74eeeb" + +tv4@^1.2.7: + version "1.3.0" + resolved "https://registry.yarnpkg.com/tv4/-/tv4-1.3.0.tgz#d020c846fadd50c855abb25ebaecc68fc10f7963" + +tweetnacl@^0.14.3, tweetnacl@~0.14.0: + version "0.14.5" + resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" + +type-check@~0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" + dependencies: + prelude-ls "~1.1.2" + +type-detect@0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-0.1.1.tgz#0ba5ec2a885640e470ea4e8505971900dac58822" + +type-detect@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-1.0.0.tgz#762217cc06db258ec48908a1298e8b95121e8ea2" + +typedarray@^0.0.6: + version "0.0.6" + resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" + +uc.micro@^1.0.1, uc.micro@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/uc.micro/-/uc.micro-1.0.3.tgz#7ed50d5e0f9a9fb0a573379259f2a77458d50192" + +uglify-js@~2.3: + version "2.3.6" + resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.3.6.tgz#fa0984770b428b7a9b2a8058f46355d14fef211a" + dependencies: + async "~0.2.6" + optimist "~0.3.5" + source-map "~0.1.7" + +uglify-js@~2.6.0: + version "2.6.4" + resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.6.4.tgz#65ea2fb3059c9394692f15fed87c2b36c16b9adf" + dependencies: + async "~0.2.6" + source-map "~0.5.1" + uglify-to-browserify "~1.0.0" + yargs "~3.10.0" + +uglify-to-browserify@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz#6e0924d6bda6b5afe349e39a6d632850a0f882b7" + +uid-number@0.0.5: + version "0.0.5" + resolved "https://registry.yarnpkg.com/uid-number/-/uid-number-0.0.5.tgz#5a3db23ef5dbd55b81fce0ec9a2ac6fccdebb81e" + +undefsafe@0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/undefsafe/-/undefsafe-0.0.3.tgz#ecca3a03e56b9af17385baac812ac83b994a962f" + +underscore.string@~2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/underscore.string/-/underscore.string-2.2.1.tgz#d7c0fa2af5d5a1a67f4253daee98132e733f0f19" + +underscore.string@~2.3.3: + version "2.3.3" + resolved "https://registry.yarnpkg.com/underscore.string/-/underscore.string-2.3.3.tgz#71c08bf6b428b1133f37e78fa3a21c82f7329b0d" + +underscore.string@~2.4.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/underscore.string/-/underscore.string-2.4.0.tgz#8cdd8fbac4e2d2ea1e7e2e8097c42f442280f85b" + +underscore.string@~3.2.3: + version "3.2.3" + resolved "https://registry.yarnpkg.com/underscore.string/-/underscore.string-3.2.3.tgz#806992633665d5e5fcb4db1fb3a862eb68e9e6da" + +underscore@~1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.6.0.tgz#8b38b10cacdef63337b8b24e4ff86d45aea529a8" + +underscore@~1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.7.0.tgz#6bbaf0877500d36be34ecaa584e0db9fef035209" + +uniq@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/uniq/-/uniq-1.0.1.tgz#b31c5ae8254844a3a8281541ce2b04b865a734ff" + +unzip-response@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/unzip-response/-/unzip-response-1.0.2.tgz#b984f0877fc0a89c2c773cc1ef7b5b232b5b06fe" + +update-notifier@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-0.2.0.tgz#a010c928adcf02090b8e0ce7fef6fb0a7cacc34a" + dependencies: + chalk "^0.5.0" + configstore "^0.3.0" + latest-version "^0.2.0" + semver-diff "^0.1.0" + string-length "^0.1.2" + +update-notifier@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-0.5.0.tgz#07b5dc2066b3627ab3b4f530130f7eddda07a4cc" + dependencies: + chalk "^1.0.0" + configstore "^1.0.0" + is-npm "^1.0.0" + latest-version "^1.0.0" + repeating "^1.1.2" + semver-diff "^2.0.0" + string-length "^1.0.0" + +update-notifier@^0.6.0: + version "0.6.3" + resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-0.6.3.tgz#776dec8daa13e962a341e8a1d98354306b67ae08" + dependencies: + boxen "^0.3.1" + chalk "^1.0.0" + configstore "^2.0.0" + is-npm "^1.0.0" + latest-version "^2.0.0" + semver-diff "^2.0.0" + +update-notifier@^1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-1.0.3.tgz#8f92c515482bd6831b7c93013e70f87552c7cf5a" + dependencies: + boxen "^0.6.0" + chalk "^1.0.0" + configstore "^2.0.0" + is-npm "^1.0.0" + latest-version "^2.0.0" + lazy-req "^1.1.0" + semver-diff "^2.0.0" + xdg-basedir "^2.0.0" + +uptown@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/uptown/-/uptown-0.4.1.tgz#2aa732b75c05c34432373498e2b419fbd9cd5452" + dependencies: + lodash "^4.14.2" + +uri-path@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/uri-path/-/uri-path-1.0.0.tgz#9747f018358933c31de0fccfd82d138e67262e32" + +uri-template@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/uri-template/-/uri-template-1.0.1.tgz#14a925a37e4d93f7625432aa116b05e50cae81ad" + dependencies: + pct-encode "~1.0.0" + +url-parse-lax@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-1.0.0.tgz#7af8f303645e9bd79a272e7a14ac68bc0609da73" + dependencies: + prepend-http "^1.0.1" + +url@^0.11.0: + version "0.11.0" + resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1" + dependencies: + punycode "1.3.2" + querystring "0.2.0" + +user-home@^1.0.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/user-home/-/user-home-1.1.1.tgz#2b5be23a32b63a7c9deb8d0f28d485724a3df190" + +user-home@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/user-home/-/user-home-2.0.0.tgz#9c70bfd8169bc1dcbf48604e0f04b8b49cde9e9f" + dependencies: + os-homedir "^1.0.0" + +util-deprecate@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + +uuid@^2.0.1: + version "2.0.3" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-2.0.3.tgz#67e2e863797215530dff318e5bf9dcebfd47b21a" + +uuid@^3.0.0, uuid@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.0.1.tgz#6544bba2dfda8c1cf17e629a3a305e2bb1fee6c1" + +validate-npm-package-license@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.1.tgz#2804babe712ad3379459acfbe24746ab2c303fbc" + dependencies: + spdx-correct "~1.0.0" + spdx-expression-parse "~1.0.0" + +validate-npm-package-name@^2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/validate-npm-package-name/-/validate-npm-package-name-2.2.2.tgz#f65695b22f7324442019a3c7fa39a6e7fd299085" + dependencies: + builtins "0.0.7" + +validator@^6.0.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/validator/-/validator-6.3.0.tgz#47ce23ed8d4eaddfa9d4b8ef0071b6cf1078d7c8" + +verror@1.3.6: + version "1.3.6" + resolved "https://registry.yarnpkg.com/verror/-/verror-1.3.6.tgz#cff5df12946d297d2baaefaa2689e25be01c005c" + dependencies: + extsprintf "1.0.2" + +which-module@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/which-module/-/which-module-1.0.0.tgz#bba63ca861948994ff307736089e3b96026c2a4f" + +which@^1.2.14, which@^1.2.9: + version "1.2.14" + resolved "https://registry.yarnpkg.com/which/-/which-1.2.14.tgz#9a87c4378f03e827cecaf1acdf56c736c01c14e5" + dependencies: + isexe "^2.0.0" + +which@~1.0.5: + version "1.0.9" + resolved "https://registry.yarnpkg.com/which/-/which-1.0.9.tgz#460c1da0f810103d0321a9b633af9e575e64486f" + +widest-line@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-1.0.0.tgz#0c09c85c2a94683d0d7eaf8ee097d564bf0e105c" + dependencies: + string-width "^1.0.1" + +win-release@^1.0.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/win-release/-/win-release-1.1.1.tgz#5fa55e02be7ca934edfc12665632e849b72e5209" + dependencies: + semver "^5.0.1" + +window-size@0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.1.0.tgz#5438cd2ea93b202efa3a19fe8887aee7c94f9c9d" + +window-size@^0.1.1: + version "0.1.4" + resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.1.4.tgz#f8e1aa1ee5a53ec5bf151ffa09742a6ad7697876" + +window-size@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.2.0.tgz#b4315bb4214a3d7058ebeee892e13fa24d98b075" + +winston@^2.2.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/winston/-/winston-2.3.1.tgz#0b48420d978c01804cf0230b648861598225a119" + dependencies: + async "~1.0.0" + colors "1.0.x" + cycle "1.0.x" + eyes "0.1.x" + isstream "0.1.x" + stack-trace "0.0.x" + +wordwrap@0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.2.tgz#b79669bb42ecb409f83d583cad52ca17eaa1643f" + +wordwrap@~0.0.2: + version "0.0.3" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" + +wordwrap@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" + +wrap-ansi@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" + dependencies: + string-width "^1.0.1" + strip-ansi "^3.0.1" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + +wrench@~1.4.3: + version "1.4.4" + resolved "https://registry.yarnpkg.com/wrench/-/wrench-1.4.4.tgz#7f523efdb71b0100e77dce834c06523cbe3d54e0" + +write-file-atomic@^1.1.2: + version "1.3.1" + resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-1.3.1.tgz#7d45ba32316328dd1ec7d90f60ebc0d845bb759a" + dependencies: + graceful-fs "^4.1.11" + imurmurhash "^0.1.4" + slide "^1.1.5" + +write-file-stdout@0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/write-file-stdout/-/write-file-stdout-0.0.2.tgz#c252d7c7c5b1b402897630e3453c7bfe690d9ca1" + +write-json-file@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/write-json-file/-/write-json-file-2.0.0.tgz#0eaec981fcf9288dbc2806cbd26e06ab9bdca4ed" + dependencies: + graceful-fs "^4.1.2" + mkdirp "^0.5.1" + pify "^2.0.0" + sort-keys "^1.1.1" + write-file-atomic "^1.1.2" + +write-pkg@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/write-pkg/-/write-pkg-2.1.0.tgz#353aa44c39c48c21440f5c08ce6abd46141c9c08" + dependencies: + sort-keys "^1.1.2" + write-json-file "^2.0.0" + +write@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/write/-/write-0.2.1.tgz#5fc03828e264cea3fe91455476f7a3c566cb0757" + dependencies: + mkdirp "^0.5.1" + +xdg-basedir@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-1.0.1.tgz#14ff8f63a4fdbcb05d5b6eea22b36f3033b9f04e" + dependencies: + user-home "^1.0.0" + +xdg-basedir@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-2.0.0.tgz#edbc903cc385fc04523d966a335504b5504d1bd2" + dependencies: + os-homedir "^1.0.0" + +xo-init@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/xo-init/-/xo-init-0.4.0.tgz#e92562e38117eb71e55b8d34ee2d006252a49d6a" + dependencies: + arrify "^1.0.0" + execa "^0.5.0" + minimist "^1.1.3" + path-exists "^3.0.0" + read-pkg-up "^2.0.0" + the-argv "^1.0.0" + write-pkg "^2.0.0" + +xo@^0.17.1: + version "0.17.1" + resolved "https://registry.yarnpkg.com/xo/-/xo-0.17.1.tgz#de65bc8120474fa76104f8a80b3b792d88c50ef6" + dependencies: + arrify "^1.0.0" + debug "^2.2.0" + deep-assign "^1.0.0" + eslint "^3.6.0" + eslint-config-xo "^0.17.0" + eslint-formatter-pretty "^1.0.0" + eslint-plugin-ava "^3.1.0" + eslint-plugin-import "^2.0.0" + eslint-plugin-no-use-extend-native "^0.3.2" + eslint-plugin-promise "^3.0.0" + eslint-plugin-unicorn "^1.0.0" + get-stdin "^5.0.0" + globby "^6.0.0" + has-flag "^2.0.0" + meow "^3.4.2" + multimatch "^2.1.0" + parse-gitignore "^0.3.1" + path-exists "^3.0.0" + pkg-conf "^2.0.0" + resolve-cwd "^1.0.0" + resolve-from "^2.0.0" + update-notifier "^1.0.0" + xo-init "^0.4.0" + +"xtend@>=4.0.0 <4.1.0-0", xtend@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af" + +y18n@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-3.2.1.tgz#6d15fba884c08679c0d77e88e7759e811e07fa41" + +yallist@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.0.0.tgz#306c543835f09ee1a4cb23b7bce9ab341c91cdd4" + +yaml-js@^0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/yaml-js/-/yaml-js-0.1.4.tgz#ac965488a94daad5b65a92cab28c262407730fc0" + +yargs-parser@^2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-2.4.1.tgz#85568de3cf150ff49fa51825f03a8c880ddcc5c4" + dependencies: + camelcase "^3.0.0" + lodash.assign "^4.0.6" + +yargs@^1.2.6: + version "1.3.3" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-1.3.3.tgz#054de8b61f22eefdb7207059eaef9d6b83fb931a" + +yargs@^3.5.4, yargs@~3.15.0: + version "3.15.0" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-3.15.0.tgz#3d9446ef21fb3791b3985690662e4b9683c7f181" + dependencies: + camelcase "^1.0.2" + cliui "^2.1.0" + decamelize "^1.0.0" + window-size "^0.1.1" + +yargs@^4.3.2: + version "4.8.1" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-4.8.1.tgz#c0c42924ca4aaa6b0e6da1739dfb216439f9ddc0" + dependencies: + cliui "^3.2.0" + decamelize "^1.1.1" + get-caller-file "^1.0.1" + lodash.assign "^4.0.3" + os-locale "^1.4.0" + read-pkg-up "^1.0.1" + require-directory "^2.1.1" + require-main-filename "^1.0.1" + set-blocking "^2.0.0" + string-width "^1.0.1" + which-module "^1.0.0" + window-size "^0.2.0" + y18n "^3.2.1" + yargs-parser "^2.4.1" + +yargs@~3.10.0: + version "3.10.0" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-3.10.0.tgz#f7ee7bd857dd7c1d2d38c0e74efbd681d1431fd1" + dependencies: + camelcase "^1.0.2" + cliui "^2.1.0" + decamelize "^1.0.0" + window-size "0.1.0" + +z-schema@^3.16.1: + version "3.18.2" + resolved "https://registry.yarnpkg.com/z-schema/-/z-schema-3.18.2.tgz#e422196b5efe60b46adef3c3f2aef2deaa911161" + dependencies: + lodash.get "^4.1.2" + lodash.isequal "^4.4.0" + validator "^6.0.0" + optionalDependencies: + commander "^2.7.1" diff --git a/.gitattributes b/.gitattributes index 661add7183..117bf51f1b 100644 --- a/.gitattributes +++ b/.gitattributes @@ -3,6 +3,7 @@ # Handle line endings automatically for files detected as text # and leave all files detected as binary untouched. * text=auto +* eol=lf # # The above will handle all files NOT found below diff --git a/.gitignore b/.gitignore index bd9757ebbd..12a9e8d9f9 100644 --- a/.gitignore +++ b/.gitignore @@ -67,8 +67,8 @@ lib/unrar2/UnRAR.exe # Grunt # ###################### -.build/bower_components -.build/node_modules +**/bower_components +**/node_modules .build/dist .build/package.json diff --git a/.travis.yml b/.travis.yml index 26fe432071..4c151629fb 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,31 +1,32 @@ language: python python: -- 2.7.9 + - 2.7.9 sudo: false branches: except: - - master -env: -- TRAVIS_NODE_VERSION="5.0.0" -before_install: -#- rm -rf ~/.nvm && git clone https://github.com/creationix/nvm.git ~/.nvm && (cd ~/.nvm -# && git checkout `git describe --abbrev=0 --tags`) && source ~/.nvm/nvm.sh && nvm -# install $TRAVIS_NODE_VERSION -#- npm install -g grunt-cli -#- npm install -g bower -#- cd .build && cp ../package.json . && npm install --quiet && bower install && cd .. + - master install: -- pip install --upgrade pip -- pip install --upgrade tox + - pip install --upgrade pip + - pip install --upgrade tox + - pip install dredd_hooks + - pip install PyYAML + - pip install six + - nvm install 6.9.1 + - nvm use 6.9.1 + - 'curl -o- -L https://yarnpkg.com/install.sh | bash' + - 'export PATH="$HOME/.yarn/bin:$PATH" && yarn install' script: -- tox -v --recreate + - yarn test-js + - tox -v --recreate + - yarn test-api cache: + yarn: true directories: - - $HOME/.cache/pip - - .build/bower_components - - .build/node_modules + - $HOME/.cache/pip + - node_modules after_failure: -- cat ./Logs/application.log + - cat ./dredd/data/Logs/application.log notifications: slack: - secure: YYOoxBgy4+iAIWylJX0ndT+KwctRzSL/8mUOPErIJOaGAwU6w9JT2WyO1uP/xq9xm+MjgGWqKJ7bpwGRJ12bCkP4mPcnn5A7c/UqFcCgwVgWfKdI/5EeHB6RfzK76J01amenN69/hzt5WjifE4wCONfJAcftKxylh69kWV5QipDcZZA//yQiO9BfYlsW3CxkHb3OGPHSJVYb32IdfCn4hnt3WaWkentXBj5R0v0kyNKbkFqQ5OGPlsjrYpxDMF8vgduxmg8zGw2tXjWGIC2bWuPoAurZy0ewyoKIna82wgkNySFjNBXoZCtssW7yPFJjUQHP/lHIZa4gLmA5Gdli7WoaN/lYaoGXlxAXSskfvgGXin92CRHukSbzEdzQznPhzxzIcuJA2je0gAvmIn2nw0itUGKbVQBZTV3nRFJb3iEHaodC0+1zozGQASxfXQzzBuHU6ZUAFWzlMNQ80RjuiS5951mmgIBo8fOfTWkVFTX8ayEfGbYhqoJqJ5QMjTjoEt8SYKrlHdlDBh803LmKOsID9B8dDn0onXlYNZAioqTTFb/xqL95aCDr84PKYbNSfraqPU6hsSc8ITtxeMS454k8BGxzed0s8bKsCDQP7HXmYKbShByMYX8NipuhtEDXeCGyCLX3atoO0qFiZ0/sUXXf67w/14eLRBAdKfnr02I= + secure: >- + YYOoxBgy4+iAIWylJX0ndT+KwctRzSL/8mUOPErIJOaGAwU6w9JT2WyO1uP/xq9xm+MjgGWqKJ7bpwGRJ12bCkP4mPcnn5A7c/UqFcCgwVgWfKdI/5EeHB6RfzK76J01amenN69/hzt5WjifE4wCONfJAcftKxylh69kWV5QipDcZZA//yQiO9BfYlsW3CxkHb3OGPHSJVYb32IdfCn4hnt3WaWkentXBj5R0v0kyNKbkFqQ5OGPlsjrYpxDMF8vgduxmg8zGw2tXjWGIC2bWuPoAurZy0ewyoKIna82wgkNySFjNBXoZCtssW7yPFJjUQHP/lHIZa4gLmA5Gdli7WoaN/lYaoGXlxAXSskfvgGXin92CRHukSbzEdzQznPhzxzIcuJA2je0gAvmIn2nw0itUGKbVQBZTV3nRFJb3iEHaodC0+1zozGQASxfXQzzBuHU6ZUAFWzlMNQ80RjuiS5951mmgIBo8fOfTWkVFTX8ayEfGbYhqoJqJ5QMjTjoEt8SYKrlHdlDBh803LmKOsID9B8dDn0onXlYNZAioqTTFb/xqL95aCDr84PKYbNSfraqPU6hsSc8ITtxeMS454k8BGxzed0s8bKsCDQP7HXmYKbShByMYX8NipuhtEDXeCGyCLX3atoO0qFiZ0/sUXXf67w/14eLRBAdKfnr02I= diff --git a/api-description.yml b/api-description.yml deleted file mode 100644 index 4f70853bf2..0000000000 --- a/api-description.yml +++ /dev/null @@ -1,342 +0,0 @@ -swagger: '2.0' -info: - title: Medusa API - description: DESCRIPTION - version: "1.0.0" -host: localhost:8081 -schemes: - - http - - https -securityDefinitions: - x-api-key: - type: apiKey - name: x-api-key - in: query - Bearer: - type: apiKey - name: Authorization - in: header -basePath: /api/v2 -produces: - - application/json -security: [ { x-api-key: [] }, { Bearer: [] } ] -paths: - /authenticate: - post: - security: [] - summary: "Returns a JWT for the provided user. This is required for all other routes." - description: | - DESCRIPTION. - responses: - 200: - description: "The JWT for the user." - schema: - type: string - 401: - description: "No credentials provided or invalid credentials." - schema: - type: object - parameters: - - name: auth - in: body - description: auth object - required: true - schema: - $ref: '#/definitions/Auth' - /show: - get: - summary: "Returns all shows in Medusa that the user has access to." - description: | - The Show endpoint returns information about the Shows added to Medusa. - responses: - 200: - description: "An array of shows" - schema: - type: array - items: - $ref: '#/definitions/Show' - default: - description: "Unexpected error" - schema: - $ref: '#/definitions/Error' - /config: - get: - summary: "Returns all config values for Medusa." - description: | - DESCRIPTION - responses: - 200: - description: "An object containing all the config values." - schema: - type: object - properties: - wikiUrl: - type: string - themeName: - type: string - namingForceFolders: - type: boolean - databaseVersion: - type: object - properties: - major: - type: integer - minor: - type: integer - layout: - type: object - properties: - show: - type: object - properties: - specials: - type: boolean - allSeasons: - type: boolean - home: - type: string - history: - type: string - schedule: - type: string - trimZero: - type: boolean - configFile: - type: string - animeSplitHome: - type: boolean - sortArticle: - type: boolean - sourceUrl: - type: string - fanartBackgroundOpacity: - type: integer - emby: - type: object - properties: - enabled: - type: boolean - logDir: - type: string - posterSortby: - type: string - subtitles: - type: object - properties: - enabled: - type: boolean - fuzzyDating: - type: boolean - timePreset: - type: string - kodi: - type: object - properties: - enabled: - type: boolean - dbFilename: - type: string - pythonVersion: - type: string - downloadUrl: - type: string - nzb: - type: object - properties: - username: - type: string - host: - type: string - password: - type: string - enabled: - type: boolean - priority: - type: integer - release: - type: string - posterSortDir: - type: string - locale: - type: string - webRoot: - type: string - torrents: - type: object - properties: - username: - type: string - seedTime: - type: integer - rpcurl: - type: string - authType: - type: string - paused: - type: boolean - host: - type: string - path: - type: string - password: - type: string - verifySSL: - type: boolean - highBandwidth: - type: boolean - enabled: - type: boolean - label: - type: string - labelAnime: - type: string - method: - type: string - enum: ["blackhole", "utorrent", "transmission", "deluge", "deluged", "download_station", "rtorrent", "qbittorrent", "mlnet"] -definitions: - Show: - type: object - properties: - id: - type: object - description: | - All of the ids mapped to a certain show, atleast one of these is required. - properties: - tvdb: - type: string - description: "This is the ID from thetvdb.com" - tvmaze: - type: string - description: "This is the ID from tvmaze.com" - imdb: - type: string - description: "This is the ID from imdb.com" - title: - type: string - description: | - This is the title of the show, this should be in the main - language the show was originally created in. If other - titles exist for other languages they will be part of the "akas" field. - indexer: - type: string - network: - type: string - type: - type: string - status: - type: string - airs: - type: string - language: - type: string - showType: - type: string - akas: - type: object - year: - type: object - airDates: - type: object - items: - type: string - runtime: - type: integer - genres: - type: array - items: - type: string - rating: - type: object - properties: - imdb: - type: object - properties: - stars: - type: string - votes: - type: integer - classification: - type: string - cache: - type: object - countries: - type: array - items: - type: string - config: - type: object - properties: - location: - type: string - qualities: - type: object - properties: - allowed: - type: array - items: - type: string - prefered: - type: array - items: - type: string - paused: - type: boolean - airByDate: - type: boolean - subtitlesEnabled: - type: boolean - dvdOrder: - type: boolean - flattenFolders: - type: boolean - scene: - type: boolean - defaultEpisodeStatus: - type: string - aliases: - type: array - items: - type: string - release: - type: object - properties: - blacklist: - type: array - items: - type: string - whitelist: - type: array - items: - type: string - ignoredWords: - type: array - items: - type: string - requiredWords: - type: array - items: - type: string - Auth: - description: Auth object - properties: - username: - type: string - password: - type: string - exp: - type: integer - minimum: 3600 - maximum: 31536000 - required: - - username - - password - Error: - type: object - properties: - code: - type: integer - format: int32 - message: - type: string - fields: - type: string diff --git a/contrib/nzbToMedia b/contrib/nzbToMedia deleted file mode 160000 index c50957d0b4..0000000000 --- a/contrib/nzbToMedia +++ /dev/null @@ -1 +0,0 @@ -Subproject commit c50957d0b4683a125df039329d8f90df630d4e28 diff --git a/contrib/readme.md b/contrib/readme.md deleted file mode 100644 index f4340d621f..0000000000 --- a/contrib/readme.md +++ /dev/null @@ -1,10 +0,0 @@ -SickRage Contrib -===== -Stuff contributed to SickRage, or included of users leisure - -## What will you find in here? - - Scripts - - Links to other repositories we think make your life better - - Custom themes - - Other things that may come in handy - diff --git a/dredd/.gitignore b/dredd/.gitignore new file mode 100644 index 0000000000..1269488f7f --- /dev/null +++ b/dredd/.gitignore @@ -0,0 +1 @@ +data diff --git a/dredd/api-description.yml b/dredd/api-description.yml new file mode 100644 index 0000000000..c51d12359e --- /dev/null +++ b/dredd/api-description.yml @@ -0,0 +1,1801 @@ +swagger: '2.0' +info: + title: Medusa API + description: DESCRIPTION + version: "1.0.0" +host: localhost:8081 +schemes: + - http + - https +securityDefinitions: + x-api-key: + type: apiKey + name: x-api-key + in: query + Bearer: + type: apiKey + name: Authorization + in: header +basePath: /api/v2 +consumes: + - "application/json" +produces: + - "application/json; charset=UTF-8" + - "text/plain; charset=UTF-8" + - "image/jpeg" +security: [ { x-api-key: [] }, { Bearer: [] } ] +paths: + /series: + post: + summary: Add series + description: | + Given an indexer and its id, adds the series to Medusa. + parameters: + - name: series + in: body + required: true + description: Only id (with indexer information) should be specified + schema: + $ref: '#/definitions/Series' + example: + id: + tvdb: 301824 + responses: + 201: + description: Series added + headers: + Location: + type: string + description: The location of the newly added series + schema: + $ref: '#/definitions/Series' + 400: + $ref: '#/responses/error' + description: Invalid request + x-request: + body: {} + 404: + $ref: '#/responses/error' + description: Series not found in the indexer + x-request: + body: + id: + tvdb: 99999999 + 409: + $ref: '#/responses/error' + description: Series already added + get: + summary: Return series that the user has access to + description: | + The Series endpoint returns information about the Series added to Medusa. + parameters: + - name: paused + in: query + required: false + description: Filter series based on paused status + type: boolean + - $ref: '#/parameters/detailed' + - $ref: '#/parameters/page' + - $ref: '#/parameters/limit' + - $ref: '#/parameters/sort' + responses: + 200: + $ref: '#/responses/pagination' + description: A paged array of series + schema: + type: array + items: + $ref: '#/definitions/Series' + 400: + $ref: '#/responses/error' + description: Invalid pagination parameters + x-request: + query-params: + sort: abc + /series/{id}: + get: + summary: Return information about a specific series + description: Retrieves information about a specific series + parameters: + - $ref: '#/parameters/series-id' + name: id + responses: + 200: + description: The series information + schema: + $ref: '#/definitions/Series' + 400: + $ref: '#/responses/error' + description: Invalid id + x-request: + path-params: + id: 123456 + 404: + $ref: '#/responses/error' + description: Series not found + x-request: + path-params: + id: tvdb999999999 + patch: + summary: Partial update series + description: Partial update series + parameters: + - $ref: '#/parameters/series-id' + name: id + - name: series + in: body + required: false + description: Currently, only pause field is supported + schema: + $ref: '#/definitions/Series' + responses: + 200: + description: Response with only the updated fields + schema: + $ref: '#/definitions/Series' + 400: + $ref: '#/responses/error' + description: Invalid id + x-request: + body: + id: + tvdb: 80379 + path-params: + id: tvdb301824 + 404: + $ref: '#/responses/error' + description: Series not found + x-request: + body: + id: + tvdb: 999999999 + path-params: + id: tvdb999999999 + delete: + summary: Delete a series + description: Delete a series + parameters: + - $ref: '#/parameters/series-id' + name: id + - name: remove-files + in: query + required: false + description: Whether files from the series should be removed + type: boolean + responses: + 204: + description: Series is deleted successfully + 400: + $ref: '#/responses/error' + description: Invalid id + x-request: + path-params: + id: 123456 + 404: + $ref: '#/responses/error' + description: Series not found + x-request: + path-params: + id: tvdb999999999 + 409: + $ref: '#/responses/error' + description: Unable to delete series + /series/{id}/{field}: + get: + summary: Return a specific field from a given series + description: Retrieves a specific field from a given series + parameters: + - $ref: '#/parameters/series-id' + name: id + - name: field + in: path + required: true + description: Any series field + x-example: network + type: string + responses: + 200: + description: The series information + schema: {} + 400: + $ref: '#/responses/error' + description: Invalid field + x-request: + path-params: + field: abc + 404: + $ref: '#/responses/error' + description: Series not found + x-request: + path-params: + id: tvdb999999999 + /series/{seriesid}/episode: + get: + summary: Return episodes from a given series + description: | + The Episode endpoint returns information about the Episodes from a given Series. + parameters: + - $ref: '#/parameters/series-id' + name: seriesid + - name: season + in: query + required: false + description: The episode season + type: integer + format: int32 + - $ref: '#/parameters/detailed' + - $ref: '#/parameters/page' + - $ref: '#/parameters/limit' + - $ref: '#/parameters/sort' + responses: + 200: + $ref: '#/responses/pagination' + description: A paged array of episodes + schema: + type: array + items: + $ref: '#/definitions/Episode' + 400: + $ref: '#/responses/error' + description: Invalid series id or pagination parameters + x-request: + query-params: + sort: abc + 404: + $ref: '#/responses/error' + description: Series not found + x-request: + path-params: + seriesid: tvdb999999999 + /series/{seriesid}/episode/{id}: + get: + summary: Return a specific episode from a given series + description: Retrieve a specific episode from a given series + parameters: + - $ref: '#/parameters/series-id' + name: seriesid + - $ref: '#/parameters/episode-id' + name: id + - $ref: '#/parameters/detailed' + responses: + 200: + description: The episode information + schema: + $ref: '#/definitions/Episode' + 400: + $ref: '#/responses/error' + description: Invalid series or episode id + x-request: + path-params: + id: abc + 404: + $ref: '#/responses/error' + description: Series or episode not found + x-request: + path-params: + id: e999 + /series/{seriesid}/episode/{id}/{field}: + get: + summary: Return a specific field from a given episode + description: Retrieve a specific field from a given episode + parameters: + - $ref: '#/parameters/series-id' + name: seriesid + - $ref: '#/parameters/episode-id' + name: id + - name: field + in: path + required: true + description: The episode field + x-example: airDate + type: string + responses: + 200: + description: The episode field value + schema: {} + 400: + $ref: '#/responses/error' + description: Invalid series or episode id or invalid field + x-request: + path-params: + field: abc + 404: + $ref: '#/responses/error' + description: Series or episode not found + x-request: + path-params: + id: "2050-12-31" + /series/{seriesid}/episode/{id}/metadata: + get: + summary: Return the video metadata from a specific episode + description: Retrieve the video metadata from a specific episode + parameters: + - $ref: '#/parameters/series-id' + name: seriesid + - $ref: '#/parameters/episode-id' + name: id + responses: + 200: + description: The video metadata + schema: + $ref: '#/definitions/Metadata' + 400: + $ref: '#/responses/error' + description: Invalid series or episode id + x-request: + path-params: + id: s01 + 404: + $ref: '#/responses/error' + description: Series or episode not found + x-request: + path-params: + id: s99e99 + /series/{seriesid}/asset/{id}: + get: + summary: Return a specific asset from a given series + description: Retrieves a specific asset from a given series + produces: + - "image/jpeg" + parameters: + - $ref: '#/parameters/series-id' + name: seriesid + - $ref: '#/parameters/asset-id' + name: id + responses: + 200: + description: The asset stream + x-expect: + no-body: true + 400: + $ref: '#/responses/error' + description: Invalid series id + x-request: + path-params: + seriesid: abc + 404: + $ref: '#/responses/error' + description: Series or asset not found + x-request: + path-params: + id: abc + /series/{id}/operation: + post: + summary: Create an operation that relates to a specific series + description: > + Create an operation that relates to a specific series. + Currently only type='ARCHIVE_EPISODES' is supported + parameters: + - $ref: '#/parameters/series-id' + name: id + - name: operation + in: body + required: true + schema: + $ref: '#/definitions/Operation' + example: + type: ARCHIVE_EPISODES + responses: + 201: + description: "When type='ARCHIVE_EPISODES': episodes were archived" + x-disabled: true + 204: + description: "When type='ARCHIVE_EPISODES': no episode was archived" + 400: + $ref: '#/responses/error' + description: Invalid id or invalid operation type + x-request: + body: + type: SUPER_OPERATION + 404: + $ref: '#/responses/error' + description: Series not found + x-request: + path-params: + id: tvmaze999999999 + /alias: + get: + summary: Return existing aliases + description: Retrieve existing aliases + parameters: + - name: series + in: query + required: false + description: "Series identifier. E.g.: tvdb1234" + type: string + - name: season + in: query + required: false + description: The season number + type: integer + format: int32 + - name: type + in: query + required: false + description: Alias type + type: string + enum: [local] + - $ref: '#/parameters/page' + - $ref: '#/parameters/limit' + - $ref: '#/parameters/sort' + responses: + 200: + $ref: '#/responses/pagination' + description: A paged array of aliases + schema: + type: array + items: + $ref: '#/definitions/Alias' + 400: + $ref: '#/responses/error' + description: Invalid series or pagination parameters + x-request: + query-params: + limit: 0 + post: + summary: Create a new alias + description: Create a new alias + parameters: + - name: alias + in: body + required: true + schema: + $ref: '#/definitions/Alias' + responses: + 201: + description: Alias created + headers: + Location: + type: string + description: The location of the newly created alias + schema: + $ref: '#/definitions/Alias' + x-stash: + alias-id: "${body['id']}" + 400: + $ref: '#/responses/error' + description: Invalid request + x-request: + body: + type: local + 409: + $ref: '#/responses/error' + description: Unable to create alias + x-disabled: true + /alias/{id}/{field}: + get: + summary: Return a specific field from a given alias + description: Retrieve a specific field from a a given alias + parameters: + - $ref: '#/parameters/alias-id' + name: id + - name: field + in: path + required: true + description: Any alias field + x-example: name + type: string + responses: + 200: + description: The alias information + schema: {} + x-request: + path-params: + id: "${stash['alias-id']}" + 400: + $ref: '#/responses/error' + description: Invalid field + x-request: + path-params: + id: "${stash['alias-id']}" + field: abc + 404: + $ref: '#/responses/error' + description: Alias not found + x-request: + path-params: + id: 999999999 + /alias/{id}: + get: + summary: Return information about a given alias + description: Retrieves information about a given alias + parameters: + - $ref: '#/parameters/alias-id' + name: id + responses: + 200: + description: The alias information + schema: + $ref: '#/definitions/Alias' + x-request: + path-params: + id: "${stash['alias-id']}" + 404: + $ref: '#/responses/error' + description: Alias not found + x-request: + path-params: + id: 999999999 + put: + summary: Replace alias data + description: Replace alias data + parameters: + - $ref: '#/parameters/alias-id' + name: id + x-example: 1 + - name: alias + in: body + required: true + schema: + $ref: '#/definitions/Alias' + example: + $ref: '#/definitions/Alias/example' + id: 1 + responses: + 204: + description: Alias data replaced + x-request: + path-params: + id: "${stash['alias-id']}" + body: + id: "${stash['alias-id']}" + series: tvdb301824 + name: TheBig + type: local + 400: + $ref: '#/responses/error' + description: Invalid request + x-request: + body: + id: 1 + 404: + $ref: '#/responses/error' + description: Alias not found + x-request: + body: + id: 999999999 + series: tvdb301824 + name: TheBig + type: local + path-params: + id: 999999999 + delete: + summary: Delete an alias + description: Delete an alias + parameters: + - $ref: '#/parameters/alias-id' + name: id + x-example: 123456 + responses: + 204: + description: Alias deleted + x-request: + path-params: + id: "${stash['alias-id']}" + 404: + $ref: '#/responses/error' + description: Alias not found + x-request: + path-params: + id: 999999999 + /alias-source: + get: + summary: Return existing sources for aliases + description: Retrieve existing sources for aliases + parameters: + - $ref: '#/parameters/page' + - $ref: '#/parameters/limit' + - $ref: '#/parameters/sort' + responses: + 200: + $ref: '#/responses/pagination' + description: A paged array of alias-source + schema: + type: array + items: + $ref: '#/definitions/AliasSource' + 400: + $ref: '#/responses/error' + description: Invalid pagination parameters + x-request: + query-params: + page: abc + /alias-source/{id}: + get: + summary: Return a specific source for aliases + description: Retrieves a specific source for aliases + parameters: + - $ref: '#/parameters/alias-source-id' + name: id + responses: + 200: + description: The alias source information + schema: + $ref: '#/definitions/AliasSource' + 404: + $ref: '#/responses/error' + description: Alias source not found + x-request: + path-params: + id: abc + /alias-source/{id}/{field}: + get: + summary: Return a specific field from a given source for aliases + description: Retrieve a specific field from a given source for aliases + parameters: + - $ref: '#/parameters/alias-source-id' + name: id + - name: field + in: path + required: true + x-example: lastRefresh + type: string + responses: + 200: + description: The alias source field value + schema: {} + 400: + $ref: '#/responses/error' + description: Invalid field + x-request: + path-params: + field: abc + 404: + $ref: '#/responses/error' + description: Alias source not found + x-request: + path-params: + id: abc + /alias-source/{id}/operation: + post: + summary: Create an operation that relates to a given alias source + description: > + Create an operation that relates to a given alias source. + Currently only type='REFRESH' is supported + parameters: + - $ref: '#/parameters/alias-source-with-all-id' + name: id + - name: operation + in: body + required: true + schema: + $ref: '#/definitions/Operation' + example: + type: REFRESH + responses: + 201: + description: "When type='REFRESH': aliases for that source were refreshed" + schema: + $ref: '#/definitions/Operation' + 400: + $ref: '#/responses/error' + description: Invalid id or invalid operation type + x-request: + body: + type: SUPER_OPERATION + 404: + $ref: '#/responses/error' + description: Alias source not found + x-request: + path-params: + id: abc + /config: + get: + summary: Return configurations for Medusa + description: Retrieve configurations + parameters: + - $ref: '#/parameters/page' + - $ref: '#/parameters/limit' + - $ref: '#/parameters/sort' + responses: + 200: + $ref: '#/responses/pagination' + description: A paged array of config objects + schema: + type: array + items: + $ref: '#/definitions/Config' + 400: + $ref: '#/responses/error' + description: Invalid pagination parameters + x-request: + query-params: + page: 0 + /config/{id}: + get: + summary: Return information about a given configuration + description: Retrieve information about a given configuration + parameters: + - $ref: '#/parameters/config-id' + name: id + responses: + 200: + description: The configuration information + schema: + $ref: '#/definitions/Config' + 404: + $ref: '#/responses/error' + description: Configuration not found + x-request: + path-params: + id: super + /config/{id}/{field}: + get: + summary: Return a specific field from a given configuration + description: Retrieve a specific field from a given configuration + parameters: + - $ref: '#/parameters/config-id' + name: id + - name: field + in: path + required: true + description: Any configuration field + x-example: themeName + type: string + responses: + 200: + description: The configuration information + schema: {} + 400: + $ref: '#/responses/error' + description: Invalid field + x-request: + path-params: + field: abc + 404: + $ref: '#/responses/error' + description: Configuration not found + x-request: + path-params: + id: abc + /log: + get: + summary: Return log messages from the application. + description: Retrieve log messages. Default sorting is descending by timestamp + parameters: + - $ref: '#/parameters/log-level' + - $ref: '#/parameters/page' + - $ref: '#/parameters/limit' + - $ref: '#/parameters/sort' + responses: + 200: + $ref: '#/responses/pagination_stream' + description: A paged array of log messages + schema: + type: array + items: + $ref: '#/definitions/Log' + examples: + 'application/json': + - $ref: '#/definitions/Log/example' + - $ref: '#/definitions/Log/example' + - $ref: '#/definitions/Log/example' + 400: + $ref: '#/responses/error' + description: Invalid log level or pagination parameter + x-request: + query-params: + log-level: abc + post: + summary: Log a message + description: Log a message + parameters: + - name: log + in: body + required: true + schema: + $ref: '#/definitions/Log' + responses: + 201: + description: Log message successfully created + x-expect: + no-body: true + 400: + $ref: '#/responses/error' + description: Invalid request + x-request: + body: + level: error + /authenticate: + post: + security: [] + summary: Return a JWT for the provided user. This is required for all other routes + description: DESCRIPTION + parameters: + - name: auth + in: body + description: auth object + required: true + schema: + $ref: '#/definitions/Auth' + responses: + 200: + description: The JWT for the user + schema: + type: string + x-no-api-key: true + x-request: + body: + username: "${stash['web-username']}" + password: "${stash['web-password']}" + 401: + $ref: '#/responses/error' + description: No credentials provided or invalid credentials + x-request: + body: {} +definitions: + Series: + type: object + properties: + id: + type: object + description: | + All of the ids mapped to a certain Series, at least one of these is required. + properties: + tvdb: + type: integer + format: int32 + minimum: 1 + description: This is the ID from thetvdb.com + tvmaze: + type: integer + format: int32 + minimum: 1 + description: This is the ID from tvmaze.com + imdb: + type: string + description: This is the ID from imdb.com + example: + tvdb: 1234 + tvmaze: 5678 + imdb: tt90123 + title: + type: string + description: | + This is the title of the series, this should be in the main + language the series was originally created in. If other + titles exist for other languages they will be part of the "akas" field. + indexer: + type: string + description: "Indexer name" + example: tvdb + network: + type: string + example: CBS + type: + type: string + example: Scripted + status: + type: string + enum: [Continuing, Ended] + airs: + type: string + description: "Air time" + example: "Thursday 8:00 PM" + language: + type: string + description: Language code + example: en + showType: + type: string + enum: [series, anime, sports] + akas: + type: object + description: Also known as + additionalProperties: + type: string + year: + type: object + properties: + start: + type: integer + format: int32 + minimum: 1900 + maximum: 2200 + description: Starting year + end: + type: integer + format: int32 + minimum: 1900 + maximum: 2200 + description: End year. Available in detailed view + nextAirDate: + type: string + format: date-time + description: Next episode air date + runtime: + type: integer + minimum: 1 + format: int32 + description: Episodes runtime in minutes + genres: + type: array + items: + type: string + example: [Drama, Romance] + rating: + type: object + properties: + imdb: + type: object + properties: + stars: + type: string + description: "IMDB's star rating from 0 to 10" + votes: + type: integer + format: int32 + minimum: 1 + description: "Total number of votes" + example: + imdb: + stars: '8.3' + votes: 558507 + classification: + type: string + enum: ['TV-Y', 'TV-Y7', 'TV-G', 'TV-PG', 'TV-14', 'TV-MA'] + description: TV Parental Guidelines + cache: + type: object + description: Image cache locations + properties: + banner: + type: string + example: "/home/user/Medusa/cache/images/301824.banner.jpg" + poster: + type: string + example: /home/user/Medusa/cache/images/301824.poster.jpg + countries: + type: array + items: + type: string + description: Country codes + config: + type: object + description: Series configuration and preferences + properties: + location: + type: string + description: Series home folder + example: "/library/My Series" + qualities: + type: object + description: Qualities settings + properties: + allowed: + type: array + items: + type: string + prefered: + type: array + items: + type: string + paused: + type: boolean + description: Whether series is paused + airByDate: + type: boolean + description: Whether episodes are indexed by air date + subtitlesEnabled: + type: boolean + description: Whether subtitles download is enabled + dvdOrder: + type: boolean + description: Whether episode numbering follows DVD order instead of air date + flattenFolders: + type: boolean + description: Whether episodes are stored in a flatten folder structure + scene: + type: boolean + defaultEpisodeStatus: + type: string + description: Initial status for newly added episodes + aliases: + type: array + description: Local aliases for the series + items: + type: string + example: [MySeries, MS] + release: + type: object + description: Release configuration + properties: + blacklist: + type: array + description: Blacklisted release words + items: + type: string + whitelist: + type: array + description: Whitelisted release words + items: + type: string + ignoredWords: + type: array + items: + type: string + requiredWords: + type: array + description: Required release words + items: + type: string + seasons: + type: array + description: Episodes grouped by season. Available in detailed view. + items: + type: array + description: Episodes for the given season + items: + $ref: '#/definitions/Episode' + episodeCount: + type: integer + format: int32 + minimum: 0 + maximum: 10000 + description: Total number of episodes. Available in detailed view + Episode: + type: object + properties: + identifier: + type: string + example: s03e07 + id: + type: object + properties: + tvdb: + type: integer + format: int32 + description: This is the ID from thetvdb.com + tvmaze: + type: integer + format: int32 + description: This is the ID from tvmaze.com + imdb: + type: string + description: This is the ID from imdb.com + example: + tvdb: 2345 + tvmaze: 6789 + imdb: tt0123 + season: + type: integer + format: int32 + minimum: 0 + maximum: 1000 + episode: + type: integer + format: int32 + minimum: 0 + maximum: 10000 + absoluteNumber: + type: integer + format: int32 + minimum: 1 + maximum: 10000 + airDate: + type: string + format: date-time + title: + type: string + description: Episode title + description: + type: string + description: Episode plot + content: + type: array + description: Additional content + items: + type: string + enum: [NFO, thumbnail] + subtitles: + type: array + items: + type: string + description: Available subtitle languages + status: + type: string + description: Episode status + release: + type: object + description: The release details + properties: + name: + type: string + description: Original release name + group: + type: string + description: Original release group + proper: + type: boolean + description: Whether the release is proper + version: + type: integer + format: int32 + minimum: 0 + maximum: 10 + description: Episode version (common in animes) + scene: + type: object + properties: + season: + type: integer + format: int32 + minimum: 0 + maximum: 1000 + episode: + type: integer + format: int32 + minimum: 0 + maximum: 10000 + absoluteNumber: + type: integer + format: int32 + minimum: 1 + maximum: 10000 + file: + type: object + description: The episode file details + properties: + location: + type: string + description: The episode file location + example: '/library/My Series/Season 10/My Series - S03E07 - Super Episode.avi' + size: + type: integer + format: int64 + minimum: 1 + description: The file size in bytes + statistics: + type: object + description: Episode statistics. Available only in detailed view. + properties: + subtitleSearch: + type: object + description: subtitle search statistics + properties: + last: + type: string + format: date-time + description: Last subtitle search timestamp + count: + type: integer + format: int32 + minimum: 0 + description: search count + wantedQualities: + type: array + items: + type: string + relatedEpisodes: + type: array + example: [s03e08, s03e09] + items: + type: string + Auth: + description: Auth object + properties: + username: + type: string + password: + type: string + exp: + type: integer + minimum: 3600 + maximum: 31536000 + required: + - username + - password + Error: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + fields: + type: string + Operation: + type: object + properties: + type: + type: string + creation: + type: string + format: date-time + Config: + type: object + properties: + wikiUrl: + type: string + themeName: + type: string + namingForceFolders: + type: boolean + databaseVersion: + type: object + properties: + major: + type: integer + minor: + type: integer + layout: + type: object + properties: + show: + type: object + properties: + specials: + type: boolean + allSeasons: + type: boolean + home: + type: string + history: + type: string + schedule: + type: string + trimZero: + type: boolean + configFile: + type: string + animeSplitHome: + type: boolean + sortArticle: + type: boolean + sourceUrl: + type: string + fanartBackgroundOpacity: + type: number + emby: + type: object + properties: + enabled: + type: boolean + logDir: + type: string + posterSortby: + type: string + subtitles: + type: object + properties: + enabled: + type: boolean + fuzzyDating: + type: boolean + timePreset: + type: string + kodi: + type: object + properties: + enabled: + type: boolean + dbFilename: + type: string + pythonVersion: + type: string + downloadUrl: + type: string + nzb: + type: object + properties: + username: + type: string + host: + type: string + password: + type: string + enabled: + type: boolean + priority: + type: integer + release: + type: string + posterSortDir: + type: string + locale: + type: string + webRoot: + type: string + torrents: + type: object + properties: + username: + type: string + seedTime: + type: integer + rpcurl: + type: string + authType: + type: string + paused: + type: boolean + host: + type: string + path: + type: string + password: + type: string + verifySSL: + type: boolean + highBandwidth: + type: boolean + enabled: + type: boolean + label: + type: string + labelAnime: + type: string + method: + type: string + enum: ["blackhole", "utorrent", "transmission", "deluge", "deluged", "download_station", "rtorrent", "qbittorrent", "mlnet"] + Log: + type: object + properties: + timestamp: + type: string + format: date-time + level: + type: string + enum: [ERROR, WARNING, INFO, DEBUG, DB] + commit: + type: string + thread: + type: string + message: + type: string + threadId: + type: number + extra: + type: string + traceback: + type: array + items: + type: string + args: + type: array + items: + type: object + kwargs: + type: array + items: + type: object + example: + commit: '6a1db77' + level: INFO + message: Waiting for the DAILYSEARCHER thread to exit + thread: EVENT-QUEUE + timestamp: '2017-03-19 08:03:23' + AliasSource: + type: object + properties: + id: + type: string + lastRefresh: + type: integer + description: Last refresh in seconds since Epoch + Alias: + type: object + properties: + id: + type: integer + minimum: 1 + series: + type: string + name: + type: string + season: + type: integer + minimum: 0 + type: + type: string + enum: [local] + example: + series: tvdb301824 + name: TheBig + type: local + Metadata: + type: object + properties: + title: + type: string + description: Video title + path: + type: string + description: Video full path + duration: + type: string + description: Video duration + example: '0:19:39.208000' + size: + type: integer + format: int64 + minimum: 0 + description: Video file size in bytes + overall_bit_rate: + type: integer + format: int32 + minimum: 0 + description: Video overall bitrate + video: + type: array + items: + type: object + properties: + number: + type: integer + format: int32 + minimum: 0 + description: Track number + name: + type: string + description: Track name + language: + type: string + description: Track language + duration: + type: string + description: Track duration + size: + type: integer + format: int64 + minimum: 0 + description: Video stream size in bytes + width: + type: integer + format: int32 + minimum: 0 + description: Video width size (pixels) + height: + type: integer + format: int32 + minimum: 0 + description: Video height size (pixels) + scan_type: + type: string + enum: [Progressive, Interlaced] + description: Video scan type + aspect_ratio: + type: number + minimum: 0 + description: Video aspect ratio + pixel_aspect_ratio: + type: number + minimum: 0 + description: Pixel aspect ratio + resolution: + type: string + description: Video resolution + enum: + - "480i" + - "720i" + - "1080i" + - "2160i" + - "4320i" + - "480p" + - "720p" + - "1080p" + - "2160p" + - "4320p" + - "240i" + - "288i" + - "360i" + - "576i" + - "240p" + - "288p" + - "360p" + - "576p" + frame_rate: + type: number + minimum: 0 + description: Video frame rate (frames per second) + bit_depth: + type: integer + format: int32 + minimum: 0 + description: Video bit depth + bit_rate: + type: integer + format: int32 + minimum: 0 + description: Video bit rate + codec: + type: string + description: Video codec + enum: + - h263 + - h264 + - h265 + - Mpeg1 + - Mpeg2 + - MsMpeg4v2 + - MsMpeg4v3 + - Mpeg4 + - XviD + - DivX + - Jpeg + - Wmv1 + - Wmv2 + - Wmv3 + - VC1 + - QuickTime + - VP6 + - VP7 + - VP9 + profile: + type: string + description: Video codec profile + encoder: + type: string + description: Video encoder + media_type: + type: string + description: Video media type + forced: + type: boolean + description: Whether this track is forced + default: + type: boolean + description: Whether this track is default + audio: + type: array + items: + type: object + properties: + number: + type: integer + format: int32 + minimum: 0 + description: Track number + name: + type: string + description: Track name + language: + type: string + description: Track language + duration: + type: string + description: Track duration + size: + type: integer + format: int64 + minimum: 0 + description: Audio stream size in bytes + codec: + type: string + description: Audio codec + enum: + - AC3 + - EAC3 + - TrueHD + - DolbyAtmos + - DTS + - DTS-HD + - AAC + - FLAC + - PCM + - MP2 + - MP3 + - Vorbis + - Opus + - WMAv1 + - WMAv2 + - WMAPro + profile: + type: string + description: Audio codec profile + enum: + - Main + - LC + channels_count: + type: integer + format: int32 + minimum: 0 + description: Number of channels + channel_positions: + type: string + description: Channel positions + channels: + type: string + description: Audio channels + enum: + - "1.0" + - "2.0" + - "5.1" + - "7.1" + bit_depth: + type: integer + format: int32 + minimum: 0 + description: Audio bit depth + bit_rate: + type: integer + format: int32 + minimum: 0 + description: Audio bit rate + bit_rate_mode: + type: string + enum: [Constant, Variable] + sampling_rate: + type: integer + format: int32 + description: Audio sampling rate + compression: + type: string + enum: [Lossy, Lossless] + description: Audio compression + forced: + type: boolean + description: Whether this track is forced + default: + type: boolean + description: Whether this track is default + subtitle: + type: array + items: + type: object + properties: + number: + type: integer + format: int32 + minimum: 0 + description: Track number + name: + type: string + description: Track name + language: + type: string + description: Track language + hearing_impaired: + type: boolean + description: Whether this track is for hearing impaired people + format: + type: string + description: Subtitles format + enum: + - PGS + - VobSub + - SubRip + - SubStationAlpha + - AdvancedSubStationAlpha + - Tx3g + encoding: + type: string + description: Subtitles encoding + enum: + - 'utf-8' + forced: + type: boolean + description: Whether this track is forced + default: + type: boolean + description: Whether this track is default +parameters: + detailed: + name: detailed + in: query + required: false + description: Whether response should contain detailed information + type: boolean + page: + name: page + in: query + required: false + description: The page to be returned. Default value is 1 + type: integer + format: int32 + limit: + name: limit + in: query + required: false + description: Maximum number of items per page. Default value is 20. Max value is 1000 + type: integer + format: int32 + sort: + name: sort + in: query + required: false + description: The field (or list of fields) to be used while sorting. Use + or - prefix to define sorting order. + type: string + series-id: + name: series-id + in: path + required: true + description: The series id to retrieve. E.g. tvdb1234 + x-example: tvdb301824 + type: string + episode-id: + name: episode-id + in: path + required: true + description: The episode id to retrieve. E.g. s02e03, e34 or 2016-12-31 + x-example: s01e01 + type: string + alias-id: + name: alias-id + in: path + required: true + description: The alias id to retrieve + x-example: 123456 + type: integer + format: int32 + alias-source-id: + name: alias-source-id + in: path + required: true + description: The alias-source id to retrieve + type: string + enum: [local, xem, anidb] + alias-source-with-all-id: + name: alias-source-id + in: path + required: true + description: The alias-source id to retrieve + type: string + enum: [all, local, xem, anidb] + asset-id: + name: asset-id + in: path + required: true + description: The asset to retrieve + type: string + enum: + - banner + - bannerThumb + - fanart + - poster + - posterThumb + - network + - small + config-id: + name: config-id + in: path + required: true + description: The configuration to retrieve + type: string + enum: + - main + log-level: + name: level + in: query + required: false + description: The log level + type: string + enum: [ERROR, WARNING, INFO, DEBUG, DB] +responses: + pagination: + description: Pagination response + headers: + X-Pagination-Page: + type: integer + format: int32 + description: The page number + X-Pagination-Limit: + type: integer + format: int32 + description: The pagination limit + X-Pagination-Count: + type: integer + format: int32 + description: The total items count + Link: + type: string + description: "The pagination links: next, last, first and previous" + pagination_stream: + description: Pagination response + headers: + X-Pagination-Page: + type: integer + format: int32 + description: The page number + X-Pagination-Limit: + type: integer + format: int32 + description: The pagination limit + Link: + type: string + description: "The pagination links: next, last, first and previous" + error: + description: Unexpected error + schema: + $ref: '#/definitions/Error' diff --git a/dredd.yml b/dredd/dredd.yml similarity index 76% rename from dredd.yml rename to dredd/dredd.yml index 9aec524c84..6165991f57 100644 --- a/dredd.yml +++ b/dredd/dredd.yml @@ -1,15 +1,12 @@ dry-run: null -hookfiles: ./dredd_hook.py +hookfiles: dredd/dredd_hook.py language: python sandbox: false -server: python ./start.py -server-wait: 20 +server: python dredd/dredd_hook.py +server-wait: 10 init: false -custom: - apiaryApiKey: '' names: false only: [] -reporter: apiary output: [] header: [] sorted: false @@ -30,6 +27,6 @@ hooks-worker-term-timeout: 5000 hooks-worker-term-retry: 500 hooks-worker-handler-host: localhost hooks-worker-handler-port: 61321 -config: ./dredd.yml -blueprint: api-description.yml +config: dredd.yml +blueprint: dredd/api-description.yml endpoint: 'http://localhost:8081' diff --git a/dredd/dredd_hook.py b/dredd/dredd_hook.py new file mode 100644 index 0000000000..369982a7d7 --- /dev/null +++ b/dredd/dredd_hook.py @@ -0,0 +1,173 @@ +"""Dredd hook.""" +import ConfigParser +import json +import urlparse +from collections import Mapping +from urllib import urlencode + +import dredd_hooks as hooks + +from six import string_types +import yaml + + +api_description = None + +stash = { + 'web-username': 'testuser', + 'web-password': 'testpass', + 'api-key': '1234567890ABCDEF1234567890ABCDEF', +} + + +@hooks.before_all +def load_api_description(transactions): + """Load api description.""" + global api_description + with open(transactions[0]['origin']['filename'], 'r') as stream: + api_description = yaml.safe_load(stream) + + +@hooks.before_each +def configure_transaction(transaction): + """Configure request based on x- property values for each response code.""" + base_path = api_description['basePath'] + + path = transaction['origin']['resourceName'] + method = transaction['request']['method'] + status_code = int(transaction['expected']['statusCode']) + response = api_description['paths'][path[len(base_path):]][method.lower()]['responses'][status_code] + + # Whether we should skip this test + transaction['skip'] = response.get('x-disabled', False) + + # Add api-key + if not response.get('x-no-api-key', False): + transaction['request']['headers']['x-api-key'] = stash['api-key'] + + # If no body is expected, skip body validation + expected = transaction['expected'] + expected_content_type = expected['headers'].get('Content-Type') + expected_status_code = int(expected['statusCode']) + if expected_status_code == 204 or response.get('x-expect', {}).get('no-body', False): + del expected['body'] + if expected_content_type: + print('Skipping content-type validation for {name!r}.'.format(name=transaction['name'])) + del expected['headers']['Content-Type'] + + # Keep stash configuration in the transaction to be executed in an after hook + transaction['x-stash'] = response.get('x-stash') or {} + + # Change request based on x-request configuration + url = transaction['fullPath'] + parsed_url = urlparse.urlparse(url) + parsed_params = urlparse.parse_qs(parsed_url.query) + parsed_path = parsed_url.path + + request = response.get('x-request', {}) + body = request.get('body') + if body is not None: + transaction['request']['body'] = json.dumps(evaluate(body)) + + path_params = request.get('path-params') + if path_params: + params = {} + resource_parts = path.split('/') + for i, part in enumerate(url.split('/')): + if not part: + continue + + resource_part = resource_parts[i] + if resource_part[0] == '{' and resource_part[-1] == '}': + params[resource_part[1:-1]] = part + + params.update(path_params) + new_url = path + for name, value in params.items(): + value = evaluate(value) + new_url = new_url.replace('{' + name + '}', str(value)) + + replace_url(transaction, new_url) + + query_params = request.get('query-params') + if query_params: + for name, value in query_params.items(): + query_params[name] = evaluate(value) + + query_params = dict(parsed_params, **query_params) + new_url = parsed_path if not query_params else parsed_path + '?' + urlencode(query_params) + + replace_url(transaction, new_url) + + +@hooks.after_each +def stash_values(transaction): + """Stash values.""" + if 'real' in transaction and 'bodySchema' in transaction['expected']: + body = json.loads(transaction['real']['body']) if transaction['real']['body'] else None + headers = transaction['real']['headers'] + for name, value in transaction['x-stash'].items(): + value = evaluate(value, {'body': body, 'headers': headers}) + print('Stashing {name}: {value!r}'.format(name=name, value=value)) + stash[name] = value + + +def replace_url(transaction, new_url): + """Replace with a new URL.""" + transaction['fullPath'] = new_url + transaction['request']['uri'] = new_url + transaction['id'] = transaction['request']['method'] + ' ' + new_url + + +def evaluate(expression, context=None): + """Evaluate the expression value.""" + context = context or {'stash': stash} + if isinstance(expression, string_types) and expression.startswith('${') and expression.endswith('}'): + value = eval(expression[2:-1], context) + print('Expression {expression} evaluated to {value!r}'.format(expression=expression, value=value)) + return value + elif isinstance(expression, Mapping): + for key, value in expression.items(): + expression[key] = evaluate(value, context=context) + elif isinstance(expression, list): + for i, value in enumerate(expression): + expression[i] = evaluate(value, context=context) + + return expression + + +def start(): + """Start application.""" + import os + import shutil + import sys + + current_dir = os.path.dirname(__file__) + app_dir = os.path.abspath(os.path.join(current_dir, '..')) + data_dir = os.path.abspath(os.path.join(current_dir, 'data')) + if os.path.isdir(data_dir): + shutil.rmtree(data_dir) + args = [ + '--datadir={0}'.format(data_dir), + '--nolaunch', + ] + + os.makedirs(data_dir) + os.chdir(data_dir) + config = ConfigParser.RawConfigParser() + config.read('config.ini') + config.add_section('General') + config.set('General', 'web_username', stash['web-username']) + config.set('General', 'web_password', stash['web-password']) + config.set('General', 'api_key', stash['api-key']) + with open('config.ini', 'wb') as configfile: + config.write(configfile) + + sys.path.insert(1, app_dir) + + from medusa.__main__ import Application + application = Application() + application.start(args) + +if __name__ == '__main__': + start() diff --git a/dredd_hook.py b/dredd_hook.py deleted file mode 100644 index f5b3103827..0000000000 --- a/dredd_hook.py +++ /dev/null @@ -1,26 +0,0 @@ -import ConfigParser -import dredd_hooks as hooks - -web_username = 'test_username' -web_password = 'test_password' -api_key = 'test_api_key' - -@hooks.before_all -def set_auth(transaction): - config = ConfigParser.RawConfigParser() - config.read(r'config.ini') - config.set('General', 'web_username', web_username) - config.set('General', 'web_password', web_password) - config.set('General', 'api_key', api_key) - with open('config.ini', 'wb') as configfile: - config.write(configfile) - -@hooks.before_each -def add_api_key(transaction): - transaction['request']['headers']['x-api-key'] = api_key - -@hooks.before('/authenticate > POST') -def add_auth(transaction): - del transaction['request']['headers']['x-api-key'] - transaction['request']['body']['username'] = web_username - transaction['request']['body']['password'] = web_password diff --git a/lib/github/__init__.py b/lib/github/__init__.py index 4bfd381efe..7b3c46da84 100644 --- a/lib/github/__init__.py +++ b/lib/github/__init__.py @@ -35,7 +35,7 @@ import logging from MainClass import Github -from GithubException import GithubException, BadCredentialsException, UnknownObjectException, BadUserAgentException, RateLimitExceededException, BadAttributeException +from GithubException import GithubException, BadCredentialsException, UnknownObjectException, BadUserAgentException, RateLimitExceededException, BadAttributeException, TwoFactorException from InputFileContent import InputFileContent from InputGitAuthor import InputGitAuthor from InputGitTreeElement import InputGitTreeElement diff --git a/lib/guessit/api.py b/lib/guessit/api.py index 900f6965e1..62ce10fcf3 100644 --- a/lib/guessit/api.py +++ b/lib/guessit/api.py @@ -100,7 +100,7 @@ def guessit(self, string, options=None): :rtype: """ try: - options = parse_options(options) + options = parse_options(options, True) result_decode = False result_encode = False diff --git a/lib/guessit/options.py b/lib/guessit/options.py index c0db746047..8197590263 100644 --- a/lib/guessit/options.py +++ b/lib/guessit/options.py @@ -81,11 +81,14 @@ def build_argument_parser(): return opts -def parse_options(options=None): +def parse_options(options=None, api=False): """ Parse given option string + :param options: :type options: + :param api + :type boolean :return: :rtype: """ @@ -93,7 +96,10 @@ def parse_options(options=None): args = shlex.split(options) options = vars(argument_parser.parse_args(args)) elif options is None: - options = vars(argument_parser.parse_args()) + if api: + options = {} + else: + options = vars(argument_parser.parse_args()) elif not isinstance(options, dict): options = vars(argument_parser.parse_args(options)) return options diff --git a/lib/guessit/rules/common/words.py b/lib/guessit/rules/common/words.py index cba4e52ce2..8882acb3dd 100644 --- a/lib/guessit/rules/common/words.py +++ b/lib/guessit/rules/common/words.py @@ -53,6 +53,8 @@ def iter_words(string): 'wa', 'ga', 'ao', # spanish words 'la', 'el', 'del', 'por', 'mar', 'al', + # italian words + 'un', # other 'ind', 'arw', 'ts', 'ii', 'bin', 'chan', 'ss', 'san', 'oss', 'iii', 'vi', 'ben', 'da', 'lt', 'ch', 'sr', 'ps', 'cx', 'vo', diff --git a/lib/guessit/rules/properties/audio_codec.py b/lib/guessit/rules/properties/audio_codec.py index e1e712c809..79a6e8eb15 100644 --- a/lib/guessit/rules/properties/audio_codec.py +++ b/lib/guessit/rules/properties/audio_codec.py @@ -58,12 +58,15 @@ def audio_codec_priority(match1, match2): rebulk.regex(r'(7[\W_][01](?:ch)?)(?:[^\d]|$)', value='7.1', children=True) rebulk.regex(r'(5[\W_][01](?:ch)?)(?:[^\d]|$)', value='5.1', children=True) rebulk.regex(r'(2[\W_]0(?:ch)?)(?:[^\d]|$)', value='2.0', children=True) + rebulk.regex('7[01]', value='7.1', validator=seps_after, tags='weak-audio_channels') + rebulk.regex('5[01]', value='5.1', validator=seps_after, tags='weak-audio_channels') + rebulk.string('20', value='2.0', validator=seps_after, tags='weak-audio_channels') rebulk.string('7ch', '8ch', value='7.1') rebulk.string('5ch', '6ch', value='5.1') rebulk.string('2ch', 'stereo', value='2.0') rebulk.string('1ch', 'mono', value='1.0') - rebulk.rules(DtsRule, AacRule, Ac3Rule, AudioValidatorRule, HqConflictRule) + rebulk.rules(DtsRule, AacRule, Ac3Rule, AudioValidatorRule, HqConflictRule, AudioChannelsValidatorRule) return rebulk @@ -162,3 +165,22 @@ def when(self, matches, context): if hq_other: return hq_other + + +class AudioChannelsValidatorRule(Rule): + """ + Remove audio_channel if no audio codec as previous match. + """ + priority = 128 + consequence = RemoveMatch + + def when(self, matches, context): + ret = [] + + for audio_channel in matches.tagged('weak-audio_channels'): + valid_before = matches.range(audio_channel.start - 1, audio_channel.start, + lambda match: match.name == 'audio_codec') + if not valid_before: + ret.append(audio_channel) + + return ret diff --git a/lib/guessit/rules/properties/edition.py b/lib/guessit/rules/properties/edition.py index 429ba8d35f..a470f88ac8 100644 --- a/lib/guessit/rules/properties/edition.py +++ b/lib/guessit/rules/properties/edition.py @@ -24,8 +24,12 @@ def edition(): conflict_solver=lambda match, other: other if other.name == 'episode_details' and other.value == 'Special' else '__default__') + rebulk.string('SE', value='Special Edition', tags='has-neighbor') rebulk.regex('criterion-edition', 'edition-criterion', value='Criterion Edition') rebulk.regex('deluxe', 'deluxe-edition', 'edition-deluxe', value='Deluxe Edition') - rebulk.regex('director\'?s?-cut', 'director\'?s?-cut-edition', 'edition-director\'?s?-cut', value='Director\'s cut') + rebulk.regex('limited', 'limited-edition', value='Limited Edition') + rebulk.regex(r'theatrical-cut', r'theatrical-edition', r'theatrical', value='Theatrical Edition') + rebulk.regex(r"director'?s?-cut", r"director'?s?-cut-edition", r"edition-director'?s?-cut", 'DC', + value="Director's cut") return rebulk diff --git a/lib/guessit/rules/properties/episodes.py b/lib/guessit/rules/properties/episodes.py index 22b8465daf..9cd257d11f 100644 --- a/lib/guessit/rules/properties/episodes.py +++ b/lib/guessit/rules/properties/episodes.py @@ -28,7 +28,7 @@ def episodes(): # pylint: disable=too-many-branches,too-many-statements,too-many-locals rebulk = Rebulk() rebulk.regex_defaults(flags=re.IGNORECASE).string_defaults(ignore_case=True) - rebulk.defaults(private_names=['episodeSeparator', 'seasonSeparator']) + rebulk.defaults(private_names=['episodeSeparator', 'seasonSeparator', 'episodeMarker', 'seasonMarker']) def episodes_season_chain_breaker(matches): """ @@ -58,10 +58,12 @@ def season_episode_conflict_solver(match, other): :return: """ if match.name == 'episode' and other.name in \ - ['screen_size', 'video_codec', 'audio_codec', 'audio_channels', 'container', 'date', 'year']: + ['screen_size', 'video_codec', 'audio_codec', 'audio_channels', 'container', 'date', 'year'] \ + and 'weak-audio_channels' not in other.tags: return match if match.name == 'season' and other.name in \ - ['screen_size', 'video_codec', 'audio_codec', 'audio_channels', 'container', 'date']: + ['screen_size', 'video_codec', 'audio_codec', 'audio_channels', 'container', 'date'] \ + and 'weak-audio_channels' not in other.tags: return match if match.name in ['season', 'episode'] and other.name in ['season', 'episode'] \ and match.initiator != other.initiator: @@ -75,8 +77,10 @@ def season_episode_conflict_solver(match, other): season_episode_seps.extend(seps) season_episode_seps.extend(['x', 'X', 'e', 'E']) - season_words = ['season', 'saison', 'serie', 'seasons', 'saisons', 'series'] - episode_words = ['episode', 'episodes', 'eps', 'ep'] + season_words = ['season', 'saison', 'seizoen', 'serie', 'seasons', 'saisons', 'series', + 'tem', 'temp', 'temporada', 'temporadas', 'stagione'] + episode_words = ['episode', 'episodes', 'eps', 'ep', 'episodio', + 'episodios', 'capitulo', 'capitulos'] of_words = ['of', 'sur'] all_words = ['All'] season_markers = ["S"] @@ -140,8 +144,8 @@ def is_consecutive(property_name): validate_all=True, validator={'__parent__': ordering_validator}, conflict_solver=season_episode_conflict_solver) \ - .regex(build_or_pattern(season_markers) + r'(?P\d+)@?' + - build_or_pattern(episode_markers) + r'@?(?P\d+)', + .regex(build_or_pattern(season_markers, name='seasonMarker') + r'(?P\d+)@?' + + build_or_pattern(episode_markers, name='episodeMarker') + r'@?(?P\d+)', validate_all=True, validator={'__parent__': seps_before}).repeater('+') \ .regex(build_or_pattern(episode_markers + discrete_separators + range_separators, @@ -150,13 +154,13 @@ def is_consecutive(property_name): r'(?P\d+)').repeater('*') \ .chain() \ .regex(r'(?P\d+)@?' + - build_or_pattern(season_ep_markers) + + build_or_pattern(season_ep_markers, name='episodeMarker') + r'@?(?P\d+)', validate_all=True, validator={'__parent__': seps_before}) \ .chain() \ .regex(r'(?P\d+)@?' + - build_or_pattern(season_ep_markers) + + build_or_pattern(season_ep_markers, name='episodeMarker') + r'@?(?P\d+)', validate_all=True, validator={'__parent__': seps_before}) \ @@ -165,7 +169,7 @@ def is_consecutive(property_name): escape=True) + r'(?P\d+)').repeater('*') \ .chain() \ - .regex(build_or_pattern(season_markers) + r'(?P\d+)', + .regex(build_or_pattern(season_markers, name='seasonMarker') + r'(?P\d+)', validate_all=True, validator={'__parent__': seps_before}) \ .regex(build_or_pattern(season_markers + discrete_separators + range_separators, @@ -178,9 +182,6 @@ def is_consecutive(property_name): rebulk.string(episode_detail, value=episode_detail, name='episode_details') rebulk.regex(r'Extras?', name='episode_details', value='Extras') - rebulk.defaults(private_names=['episodeSeparator', 'seasonSeparator'], - validate_all=True, validator={'__parent__': seps_surround}, children=True, private_parent=True) - def validate_roman(match): """ Validate a roman match if surrounded by separators @@ -193,8 +194,8 @@ def validate_roman(match): return True return seps_surround(match) - rebulk.defaults(private_names=['episodeSeparator', 'seasonSeparator'], validate_all=True, - validator={'__parent__': seps_surround}, children=True, private_parent=True, + rebulk.defaults(private_names=['episodeSeparator', 'seasonSeparator', 'episodeMarker', 'seasonMarker'], + validate_all=True, validator={'__parent__': seps_surround}, children=True, private_parent=True, conflict_solver=season_episode_conflict_solver) rebulk.chain(abbreviations=[alt_dash], @@ -203,19 +204,19 @@ def validate_roman(match): 'season': validate_roman, 'count': validate_roman}) \ .defaults(validator=None) \ - .regex(build_or_pattern(season_words) + '@?(?P' + numeral + ')') \ + .regex(build_or_pattern(season_words, name='seasonMarker') + '@?(?P' + numeral + ')') \ .regex(r'' + build_or_pattern(of_words) + '@?(?P' + numeral + ')').repeater('?') \ - .regex(r'@?(?P' + - build_or_pattern(range_separators + discrete_separators + ['@'], escape=True) + - r')@?(?P\d+)').repeater('*') + .regex(r'@?' + build_or_pattern(range_separators + discrete_separators + ['@'], + name='seasonSeparator', escape=True) + + r'@?(?P\d+)').repeater('*') - rebulk.regex(build_or_pattern(episode_words) + r'-?(?P\d+)' + + rebulk.regex(build_or_pattern(episode_words, name='episodeMarker') + r'-?(?P\d+)' + r'(?:v(?P\d+))?' + r'(?:-?' + build_or_pattern(of_words) + r'-?(?P\d+))?', # Episode 4 - abbreviations=[dash], formatter=int, + abbreviations=[dash], formatter={'episode': int, 'version': int, 'count': int}, disabled=lambda context: context.get('type') == 'episode') - rebulk.regex(build_or_pattern(episode_words) + r'-?(?P' + numeral + ')' + + rebulk.regex(build_or_pattern(episode_words, name='episodeMarker') + r'-?(?P' + numeral + ')' + r'(?:v(?P\d+))?' + r'(?:-?' + build_or_pattern(of_words) + r'-?(?P\d+))?', # Episode 4 abbreviations=[dash], @@ -263,7 +264,7 @@ def validate_roman(match): # TODO: Enhance rebulk for validator to be used globally (season_episode_validator) rebulk.chain(formatter={'episode': int, 'version': int}) \ .defaults(validator=None) \ - .regex(r'e(?P\d{1,4})') \ + .regex(r'(?Pe)(?P\d{1,4})') \ .regex(r'v(?P\d+)').repeater('?') \ .regex(r'(?Pe|x|-)(?P\d{1,4})').repeater('*') @@ -274,6 +275,14 @@ def validate_roman(match): .regex(r'v(?P\d+)').repeater('?') \ .regex(r'(?Pep|e|x|-)(?P\d{1,4})').repeater('*') + # cap 112, cap 112_114 + rebulk.chain(abbreviations=[dash], + tags=['see-pattern'], + formatter={'season': int, 'episode': int}) \ + .defaults(validator=None) \ + .regex(r'(?Pcap)-?(?P\d{1,2})(?P\d{2})') \ + .regex(r'(?P-)(?P\d{1,2})(?P\d{2})').repeater('?') + # 102, 0102 rebulk.chain(tags=['bonus-conflict', 'weak-movie', 'weak-episode', 'weak-duplicate'], formatter={'season': int, 'episode': int, 'version': int}, @@ -296,7 +305,8 @@ def validate_roman(match): rebulk.regex(r'Minisodes?', name='episode_format', value="Minisode") - rebulk.rules(EpisodeNumberSeparatorRange(range_separators), + rebulk.rules(RemoveInvalidSeason, RemoveInvalidEpisode, + SeePatternRange(range_separators + ['_']), EpisodeNumberSeparatorRange(range_separators), SeasonSeparatorRange(range_separators), RemoveWeakIfMovie, RemoveWeakIfSxxExx, RemoveWeakDuplicate, EpisodeDetailValidator, RemoveDetachedEpisodeNumber, VersionValidator, CountValidator, EpisodeSingleDigitValidator) @@ -330,6 +340,41 @@ def when(self, matches, context): return to_remove, episode_count, season_count +class SeePatternRange(Rule): + """ + Create matches for episode range for SEE pattern. E.g.: Cap.102_104 + """ + priority = 128 + consequence = [RemoveMatch, AppendMatch] + + def __init__(self, range_separators): + super(SeePatternRange, self).__init__() + self.range_separators = range_separators + + def when(self, matches, context): + to_remove = [] + to_append = [] + + for separator in matches.tagged('see-pattern', lambda m: m.name == 'episodeSeparator'): + previous_match = matches.previous(separator, lambda m: m.name == 'episode' and 'see-pattern' in m.tags, 0) + next_match = matches.next(separator, lambda m: m.name == 'season' and 'see-pattern' in m.tags, 0) + if not next_match: + continue + + next_match = matches.next(next_match, lambda m: m.name == 'episode' and 'see-pattern' in m.tags, 0) + if previous_match and next_match and separator.value in self.range_separators: + to_remove.append(next_match) + + for episode_number in range(previous_match.value + 1, next_match.value + 1): + match = copy.copy(next_match) + match.value = episode_number + to_append.append(match) + + to_remove.append(separator) + + return to_remove, to_append + + class AbstractSeparatorRange(Rule): """ Remove separator matches and create matches for season range. @@ -351,10 +396,12 @@ def when(self, matches, context): next_match = matches.next(separator, lambda match: match.name == self.property_name, 0) if previous_match and next_match and separator.value in self.range_separators: + to_remove.append(next_match) for episode_number in range(previous_match.value + 1, next_match.value): match = copy.copy(next_match) match.value = episode_number to_append.append(match) + to_append.append(next_match) to_remove.append(separator) previous_match = None @@ -434,6 +481,71 @@ def when(self, matches, context): return to_remove +class RemoveInvalidSeason(Rule): + """ + Remove invalid season matches. + """ + priority = 64 + consequence = RemoveMatch + + def when(self, matches, context): + to_remove = [] + for filepart in matches.markers.named('path'): + strong_season = matches.range(filepart.start, filepart.end, index=0, + predicate=lambda m: m.name == 'season' + and not m.private and 'SxxExx' in m.tags) + if strong_season: + if strong_season.initiator.children.named('episode'): + for season in matches.range(strong_season.end, filepart.end, + predicate=lambda m: m.name == 'season' and not m.private): + # remove weak season or seasons without episode matches + if 'SxxExx' not in season.tags or not season.initiator.children.named('episode'): + if season.initiator: + to_remove.append(season.initiator) + to_remove.extend(season.initiator.children) + else: + to_remove.append(season) + + return to_remove + + +class RemoveInvalidEpisode(Rule): + """ + Remove invalid episode matches. + """ + priority = 64 + consequence = RemoveMatch + + def when(self, matches, context): + to_remove = [] + for filepart in matches.markers.named('path'): + strong_episode = matches.range(filepart.start, filepart.end, index=0, + predicate=lambda m: m.name == 'episode' + and not m.private and 'SxxExx' in m.tags) + if strong_episode: + strong_ep_marker = RemoveInvalidEpisode.get_episode_prefix(matches, strong_episode) + for episode in matches.range(strong_episode.end, filepart.end, + predicate=lambda m: m.name == 'episode' and not m.private): + ep_marker = RemoveInvalidEpisode.get_episode_prefix(matches, episode) + if strong_ep_marker and ep_marker and strong_ep_marker.value.lower() != ep_marker.value.lower(): + if episode.initiator: + to_remove.append(episode.initiator) + to_remove.extend(episode.initiator.children) + else: + to_remove.append(ep_marker) + to_remove.append(episode) + + return to_remove + + @staticmethod + def get_episode_prefix(matches, episode): + """ + Return episode prefix: episodeMarker or episodeSeparator + """ + return matches.previous(episode, index=0, + predicate=lambda m: m.name in ('episodeMarker', 'episodeSeparator')) + + class RemoveWeakDuplicate(Rule): """ Remove weak-duplicate tagged matches if duplicate patterns, for example The 100.109 diff --git a/lib/guessit/rules/properties/format.py b/lib/guessit/rules/properties/format.py index f9a2ecd540..83a9a2f65e 100644 --- a/lib/guessit/rules/properties/format.py +++ b/lib/guessit/rules/properties/format.py @@ -17,7 +17,7 @@ def format_(): :rtype: Rebulk """ rebulk = Rebulk().regex_defaults(flags=re.IGNORECASE, abbreviations=[dash]) - rebulk.defaults(name="format", tags='video-codec-prefix') + rebulk.defaults(name="format", tags=['video-codec-prefix', 'streaming_service.suffix']) rebulk.regex("VHS", "VHS-?Rip", value="VHS") rebulk.regex("CAM", "CAM-?Rip", "HD-?CAM", value="Cam") @@ -26,18 +26,21 @@ def format_(): rebulk.regex("TELECINE", "TC", value="Telecine") rebulk.regex("PPV", "PPV-?Rip", value="PPV") # Pay Per View rebulk.regex("SD-?TV", "SD-?TV-?Rip", "Rip-?SD-?TV", "TV-?Rip", - "Rip-?TV", value="TV") # TV is too common to allow matching + "Rip-?TV", "TV-?(?=Dub)", value="TV") # TV is too common to allow matching rebulk.regex("DVB-?Rip", "DVB", "PD-?TV", value="DVB") rebulk.regex("DVD", "DVD-?Rip", "VIDEO-?TS", "DVD-?R(?:$|(?!E))", # "DVD-?R(?:$|^E)" => DVD-Real ... "DVD-?9", "DVD-?5", value="DVD") - rebulk.regex("HD-?TV", "TV-?RIP-?HD", "HD-?TV-?RIP", "HD-?RIP", value="HDTV") + rebulk.regex("HD-?TV", "TV-?RIP-?HD", "HD-?TV-?RIP", "HD-?RIP", value="HDTV", + conflict_solver=lambda match, other: other if other.name == 'other' else '__default__') rebulk.regex("VOD", "VOD-?Rip", value="VOD") rebulk.regex("WEB-?Rip", "WEB-?DL-?Rip", "WEB-?Cap", value="WEBRip") rebulk.regex("WEB-?DL", "WEB-?HD", "WEB", "DL-?WEB", "DL(?=-?Mux)", value="WEB-DL") rebulk.regex("HD-?DVD-?Rip", "HD-?DVD", value="HD-DVD") rebulk.regex("Blu-?ray(?:-?Rip)?", "B[DR]", "B[DR]-?Rip", "BD[59]", "BD25", "BD50", value="BluRay") rebulk.regex("AHDTV", value="AHDTV") + rebulk.regex('UHD-?TV', 'UHD-?Rip', value='UHDTV', + conflict_solver=lambda match, other: other if other.name == 'other' else '__default__') rebulk.regex("HDTC", value="HDTC") rebulk.regex("DSR", "DSR?-?Rip", "SAT-?Rip", "DTH", "DTH-?Rip", value="SATRip") diff --git a/lib/guessit/rules/properties/language.py b/lib/guessit/rules/properties/language.py index 3476d60aa2..0c44cf0ed0 100644 --- a/lib/guessit/rules/properties/language.py +++ b/lib/guessit/rules/properties/language.py @@ -5,11 +5,12 @@ """ # pylint: disable=no-member import copy +from collections import defaultdict, namedtuple import babelfish - -from rebulk.remodule import re from rebulk import Rebulk, Rule, RemoveMatch, RenameMatch +from rebulk.remodule import re + from ..common.words import iter_words, COMMON_WORDS from ..common.validators import seps_surround @@ -23,9 +24,11 @@ def language(): rebulk = Rebulk() rebulk.string(*subtitle_prefixes, name="subtitle_language.prefix", ignore_case=True, private=True, - validator=seps_surround) + validator=seps_surround, tags=['release-group-prefix']) rebulk.string(*subtitle_suffixes, name="subtitle_language.suffix", ignore_case=True, private=True, validator=seps_surround) + rebulk.string(*lang_suffixes, name="language.suffix", ignore_case=True, private=True, + validator=seps_surround, tags=['format-suffix']) rebulk.functional(find_languages, properties={'language': [None]}) rebulk.rules(SubtitlePrefixLanguageRule, SubtitleSuffixLanguageRule, SubtitleExtensionRule) @@ -36,13 +39,13 @@ def language(): UNDETERMINED = babelfish.Language('und') -SYN = {('und', None): ['unknown', 'inconnu', 'unk', 'un'], +SYN = {('und', None): ['unknown', 'inconnu', 'unk'], ('ell', None): ['gr', 'greek'], - ('spa', None): ['esp', 'español'], + ('spa', None): ['esp', 'español', 'espanol'], ('fra', None): ['français', 'vf', 'vff', 'vfi', 'vfq'], ('swe', None): ['se'], - ('por', 'BR'): ['po', 'pb', 'pob', 'br', 'brazilian'], - ('cat', None): ['català'], + ('por', 'BR'): ['po', 'pb', 'pob', 'ptbr', 'br', 'brazilian'], + ('cat', None): ['català', 'castellano', 'espanol castellano', 'español castellano'], ('ces', None): ['cz'], ('ukr', None): ['ua'], ('zho', None): ['cn'], @@ -104,15 +107,247 @@ def reverse(self, name): raise babelfish.LanguageReverseError(name) +def length_comparator(value): + """ + Return value length. + """ + return len(value) + + babelfish.language_converters['guessit'] = GuessitConverter() -subtitle_both = ['sub', 'subs', 'subbed', 'custom subbed', 'custom subs', 'custom sub', 'customsubbed', 'customsubs', - 'customsub'] -subtitle_prefixes = subtitle_both + ['st', 'vost', 'subforced', 'fansub', 'hardsub'] -subtitle_suffixes = subtitle_both + ['subforced', 'fansub', 'hardsub'] -lang_prefixes = ['true'] -all_lang_prefixes_suffixes = subtitle_prefixes + subtitle_suffixes + lang_prefixes +subtitle_both = ['sub', 'subs', 'subbed', 'custom subbed', 'custom subs', + 'custom sub', 'customsubbed', 'customsubs', 'customsub', + 'soft subtitles', 'soft subs'] +subtitle_prefixes = sorted(subtitle_both + + ['st', 'vost', 'subforced', 'fansub', 'hardsub', + 'legenda', 'legendas', 'legendado', 'subtitulado', + 'soft', 'subtitles'], key=length_comparator) +subtitle_suffixes = sorted(subtitle_both + + ['subforced', 'fansub', 'hardsub'], key=length_comparator) +lang_both = ['dublado', 'dubbed', 'dub'] +lang_suffixes = sorted(lang_both + ['audio'], key=length_comparator) +lang_prefixes = sorted(lang_both + ['true'], key=length_comparator) + +_LanguageMatch = namedtuple('_LanguageMatch', ['property_name', 'word', 'lang']) + + +class LanguageWord(object): + """ + Extension to the Word namedtuple in order to create compound words. + + E.g.: pt-BR, soft subtitles, custom subs + """ + + def __init__(self, start, end, value, input_string, next_word=None): + self.start = start + self.end = end + self.value = value + self.input_string = input_string + self.next_word = next_word + + @property + def extended_word(self): + """ + Return the extended word for this instance, if any. + """ + if self.next_word: + separator = self.input_string[self.end:self.next_word.start] + next_separator = self.input_string[self.next_word.end:self.next_word.end + 1] + + if (separator == '-' and separator != next_separator) or separator in (' ', '.'): + value = self.input_string[self.start:self.next_word.end].replace('.', ' ') + + return LanguageWord(self.start, self.next_word.end, value, self.input_string, self.next_word.next_word) + + def __repr__(self): + return '<({start},{end}): {value}'.format(start=self.start, end=self.end, value=self.value) + + +def to_rebulk_match(language_match): + """ + Convert language match to rebulk Match: start, end, dict + """ + word = language_match.word + start = word.start + end = word.end + name = language_match.property_name + if language_match.lang == UNDETERMINED: + return start, end, dict(name=name, value=word.value.lower(), + formatter=babelfish.Language, tags=['weak-language']) + + return start, end, dict(name=name, value=language_match.lang) + + +class LanguageFinder(object): + """ + Helper class to search and return language matches: 'language' and 'subtitle_language' properties + """ + + def __init__(self, allowed_languages): + self.parsed = dict() + self.allowed_languages = allowed_languages + self.common_words = COMMON_WORDS_STRICT if allowed_languages else COMMON_WORDS + + def find(self, string): + """ + Return all matches for language and subtitle_language. + + Undetermined language matches are removed if a regular language is found. + Multi language matches are removed if there are only undetermined language matches + """ + regular_lang_map = defaultdict(set) + undetermined_map = defaultdict(set) + multi_map = defaultdict(set) + + for match in self.iter_language_matches(string): + key = match.property_name + if match.lang == UNDETERMINED: + undetermined_map[key].add(match) + elif match.lang == 'mul': + multi_map[key].add(match) + else: + regular_lang_map[key].add(match) + + for key, values in multi_map.items(): + if key in regular_lang_map or key not in undetermined_map: + for value in values: + yield to_rebulk_match(value) + + for key, values in undetermined_map.items(): + if key not in regular_lang_map: + for value in values: + yield to_rebulk_match(value) + + for values in regular_lang_map.values(): + for value in values: + yield to_rebulk_match(value) + + def iter_language_matches(self, string): + """ + Return language matches for the given string. + """ + candidates = [] + previous = None + for word in iter_words(string): + language_word = LanguageWord(start=word.span[0], end=word.span[1], value=word.value, input_string=string) + if previous: + previous.next_word = language_word + candidates.append(previous) + previous = language_word + if previous: + candidates.append(previous) + + for candidate in candidates: + for match in self.iter_matches_for_candidate(candidate): + yield match + + def iter_matches_for_candidate(self, language_word): + """ + Return language matches for the given candidate word. + """ + tuples = [ + (language_word, language_word.next_word, + dict(subtitle_language=subtitle_prefixes, language=lang_prefixes), + lambda string, prefix: string.startswith(prefix), + lambda string, prefix: string[len(prefix):]), + (language_word.next_word, language_word, + dict(subtitle_language=subtitle_suffixes, language=lang_suffixes), + lambda string, suffix: string.endswith(suffix), + lambda string, suffix: string[:len(string) - len(suffix)]) + ] + + for word, fallback_word, affixes, is_affix, strip_affix in tuples: + if not word: + continue + + match = self.find_match_for_word(word, fallback_word, affixes, is_affix, strip_affix) + if match: + yield match + + match = self.find_language_match_for_word(language_word) + if match: + yield match + + def find_match_for_word(self, word, fallback_word, affixes, is_affix, strip_affix): + """ + Return the language match for the given word and affixes. + """ + for current_word in (word.extended_word, word): + if not current_word: + continue + + word_lang = current_word.value.lower() + if word_lang in self.common_words: + continue + + for key, parts in affixes.items(): + for part in parts: + if not is_affix(word_lang, part): + continue + + match = None + value = strip_affix(word_lang, part) + if not value: + if fallback_word: + match = self.find_language_match_for_word(fallback_word, key=key, force=True) + + if not match: + match = self.create_language_match(key, LanguageWord(current_word.start, current_word.end, + 'und', current_word.input_string)) + elif value not in self.common_words: + match = self.create_language_match(key, LanguageWord(current_word.start, current_word.end, + value, current_word.input_string)) + + if match: + return match + + def find_language_match_for_word(self, word, key='language', force=False): + """ + Return the language match for the given word. + """ + for current_word in (word.extended_word, word): + if current_word and (force or current_word.value.lower() not in self.common_words): + match = self.create_language_match(key, current_word) + if match: + return match + + def create_language_match(self, key, word): + """ + Create a LanguageMatch for a given word + """ + lang = self.parse_language(word.value.lower()) + + if lang is not None: + return _LanguageMatch(property_name=key, word=word, lang=lang) + + def parse_language(self, lang_word): + """ + Parse the lang_word into a valid Language. + + Multi and Undetermined languages are also valid languages. + """ + if lang_word in self.parsed: + return self.parsed[lang_word] + + try: + lang = babelfish.Language.fromguessit(lang_word) + if self.allowed_languages: + if (hasattr(lang, 'name') and lang.name.lower() in self.allowed_languages) \ + or (hasattr(lang, 'alpha2') and lang.alpha2.lower() in self.allowed_languages) \ + or lang.alpha3.lower() in self.allowed_languages: + self.parsed[lang_word] = lang + return lang + # Keep language with alpha2 equivalent. Others are probably + # uncommon languages. + elif lang in ('mul', UNDETERMINED) or hasattr(lang, 'alpha2'): + self.parsed[lang_word] = lang + return lang + + self.parsed[lang_word] = None + except babelfish.Error: + self.parsed[lang_word] = None def find_languages(string, context=None): @@ -120,43 +355,7 @@ def find_languages(string, context=None): :return: list of tuple (property, Language, lang_word, word) """ - allowed_languages = context.get('allowed_languages') - common_words = COMMON_WORDS_STRICT if allowed_languages else COMMON_WORDS - - matches = [] - for word_match in iter_words(string): - word = word_match.value - start, end = word_match.span - - lang_word = word.lower() - key = 'language' - for prefix in subtitle_prefixes: - if lang_word.startswith(prefix): - lang_word = lang_word[len(prefix):] - key = 'subtitle_language' - for suffix in subtitle_suffixes: - if lang_word.endswith(suffix): - lang_word = lang_word[:len(lang_word) - len(suffix)] - key = 'subtitle_language' - for prefix in lang_prefixes: - if lang_word.startswith(prefix): - lang_word = lang_word[len(prefix):] - if lang_word not in common_words and word.lower() not in common_words: - try: - lang = babelfish.Language.fromguessit(lang_word) - match = (start, end, {'name': key, 'value': lang}) - if allowed_languages: - if lang.name.lower() in allowed_languages \ - or lang.alpha2.lower() in allowed_languages \ - or lang.alpha3.lower() in allowed_languages: - matches.append(match) - # Keep language with alpha2 equivalent. Others are probably - # uncommon languages. - elif lang == 'mul' or hasattr(lang, 'alpha2'): - matches.append(match) - except babelfish.Error: - pass - return matches + return LanguageFinder(context.get('allowed_languages')).find(string) class SubtitlePrefixLanguageRule(Rule): @@ -184,6 +383,7 @@ def when(self, matches, context): lambda match: match.name == 'subtitle_language.prefix', 0) if prefix: to_rename.append((prefix, lang)) + to_remove.extend(matches.conflicting(lang)) if prefix in to_remove: to_remove.remove(prefix) return to_rename, to_remove @@ -233,9 +433,11 @@ def then(self, matches, when_response, context): class SubtitleExtensionRule(Rule): """ - Convert language guess as subtitle_language if next match is a subtitle extension + Convert language guess as subtitle_language if next match is a subtitle extension. + + Since it's a strong match, it also removes any conflicting format with it. """ - consequence = RenameMatch('subtitle_language') + consequence = [RemoveMatch, RenameMatch('subtitle_language')] properties = {'subtitle_language': [None]} @@ -246,4 +448,4 @@ def when(self, matches, context): if subtitle_extension: subtitle_lang = matches.previous(subtitle_extension, lambda match: match.name == 'language', 0) if subtitle_lang: - return subtitle_lang + return matches.conflicting(subtitle_lang, lambda m: m.name == 'format'), subtitle_lang diff --git a/lib/guessit/rules/properties/other.py b/lib/guessit/rules/properties/other.py index bd7620cbe5..792f4bb681 100644 --- a/lib/guessit/rules/properties/other.py +++ b/lib/guessit/rules/properties/other.py @@ -10,7 +10,7 @@ from ..common import dash from ..common import seps -from ..common.validators import seps_after, seps_surround, compose +from ..common.validators import seps_after, seps_before, seps_surround, compose from ...reutils import build_or_pattern from ...rules.common.formatters import raw_cleanup @@ -26,12 +26,15 @@ def other(): rebulk.regex('Audio-?Fix', 'Audio-?Fixed', value='AudioFix') rebulk.regex('Sync-?Fix', 'Sync-?Fixed', value='SyncFix') - rebulk.regex('Dual-?Audio', value='DualAudio') + rebulk.regex('Dual', 'Dual-?Audio', value='DualAudio') rebulk.regex('ws', 'wide-?screen', value='WideScreen') rebulk.regex('Re-?Enc(?:oded)?', value='ReEncoded') rebulk.string('Real', 'Fix', 'Fixed', value='Proper', tags=['has-neighbor-before', 'has-neighbor-after']) - rebulk.string('Proper', 'Repack', 'Rerip', value='Proper') + rebulk.string('Proper', 'Repack', 'Rerip', 'Dirfix', 'Nfofix', 'Prooffix', value='Proper', + tags=['streaming_service.prefix', 'streaming_service.suffix']) + rebulk.regex('(?:Proof-?)?Sample-?Fix', value='Proper', + tags=['streaming_service.prefix', 'streaming_service.suffix']) rebulk.string('Fansub', value='Fansub', tags='has-neighbor') rebulk.string('Fastsub', value='Fastsub', tags='has-neighbor') @@ -63,14 +66,32 @@ def validate_complete(match): rebulk.regex('(?:PS-?)?Vita', value='PS Vita') for value in ( - 'Screener', 'Remux', 'Remastered', '3D', 'HD', 'mHD', 'HDLight', 'HQ', 'DDC', 'HR', 'PAL', 'SECAM', 'NTSC', + 'Screener', 'Remux', 'Remastered', '3D', 'mHD', 'HDLight', 'HQ', 'DDC', 'HR', 'PAL', 'SECAM', 'NTSC', 'CC', 'LD', 'MD', 'XXX'): rebulk.string(value, value=value) + rebulk.string('LDTV', value='LD') + rebulk.string('HD', value='HD', validator=None, + tags=['streaming_service.prefix', 'streaming_service.suffix']) + rebulk.regex('Ultra-?(?:HD)?', 'UHD', value='UltraHD', validator=None, + tags=['streaming_service.prefix', 'streaming_service.suffix']) for value in ('Limited', 'Complete', 'Classic', 'Unrated', 'LiNE', 'Bonus', 'Trailer', 'FINAL', 'Retail', 'Uncut', - 'Extended', 'Extended Cut'): + 'Extended', 'Extended Cut', 'Colorized', 'Internal', 'Uncensored'): rebulk.string(value, value=value, tags=['has-neighbor', 'release-group-prefix']) + rebulk.regex('Extended-?version', value='Extended', tags=['has-neighbor', 'release-group-prefix']) + rebulk.regex('Alternat(e|ive)(?:-?Cut)?', value='Alternative Cut', tags=['has-neighbor', 'release-group-prefix']) + rebulk.regex('Read-?NFO', value='Read NFO') + rebulk.string('CONVERT', value='Converted', tags='has-neighbor') + rebulk.string('DOCU', value='Documentary', tags='has-neighbor') + rebulk.string('OM', value='Open Matte', tags='has-neighbor') + rebulk.string('STV', value='Straight to Video', tags='has-neighbor') + rebulk.string('OAR', value='Original Aspect Ratio', tags='has-neighbor') + rebulk.string('Festival', value='Festival', tags=['has-neighbor-before', 'has-neighbor-after']) + rebulk.string('Complet', value='Complete', tags=['has-neighbor', 'release-group-prefix']) + + for coast in ('East', 'West'): + rebulk.regex(r'(?:Live-)?(?:Episode-)?' + coast + '-?(?:Coast-)?Feed', value=coast + ' Coast Feed') rebulk.string('VO', 'OV', value='OV', tags='has-neighbor') @@ -78,9 +99,10 @@ def validate_complete(match): tags=['other.validate.screener', 'format-prefix', 'format-suffix']) rebulk.string('Mux', value='Mux', validator=seps_after, tags=['other.validate.mux', 'video-codec-prefix', 'format-suffix']) + rebulk.string('HC', value='Hardcoded Subtitles') rebulk.rules(ValidateHasNeighbor, ValidateHasNeighborAfter, ValidateHasNeighborBefore, ValidateScreenerRule, - ValidateMuxRule, ProperCountRule) + ValidateMuxRule, ValidateHardcodedSubs, ValidateStreamingServiceNeighbor, ProperCountRule) return rebulk @@ -200,3 +222,61 @@ def when(self, matches, context): if not format_match: ret.append(mux) return ret + + +class ValidateHardcodedSubs(Rule): + """Validate HC matches.""" + + priority = 32 + consequence = RemoveMatch + + def when(self, matches, context): + to_remove = [] + for hc_match in matches.named('other', predicate=lambda match: match.value == 'Hardcoded Subtitles'): + next_match = matches.next(hc_match, predicate=lambda match: match.name == 'subtitle_language', index=0) + if next_match and not matches.holes(hc_match.end, next_match.start, + predicate=lambda match: match.value.strip(seps)): + continue + + previous_match = matches.previous(hc_match, + predicate=lambda match: match.name == 'subtitle_language', index=0) + if previous_match and not matches.holes(previous_match.end, hc_match.start, + predicate=lambda match: match.value.strip(seps)): + continue + + to_remove.append(hc_match) + + return to_remove + + +class ValidateStreamingServiceNeighbor(Rule): + """Validate streaming service's neighbors.""" + + priority = 32 + consequence = RemoveMatch + + def when(self, matches, context): + to_remove = [] + for match in matches.named('other', + predicate=lambda m: ('streaming_service.prefix' in m.tags or + 'streaming_service.suffix' in m.tags)): + + if not seps_after(match): + if 'streaming_service.prefix' in match.tags: + next_match = matches.next(match, lambda m: m.name == 'streaming_service', 0) + if next_match and not matches.holes(match.end, next_match.start, + predicate=lambda m: m.value.strip(seps)): + continue + + to_remove.append(match) + + elif not seps_before(match): + if 'streaming_service.suffix' in match.tags: + previous_match = matches.previous(match, lambda m: m.name == 'streaming_service', 0) + if previous_match and not matches.holes(previous_match.end, match.start, + predicate=lambda m: m.value.strip(seps)): + continue + + to_remove.append(match) + + return to_remove diff --git a/lib/guessit/rules/properties/release_group.py b/lib/guessit/rules/properties/release_group.py index 660fe4c83f..ace3f0eb32 100644 --- a/lib/guessit/rules/properties/release_group.py +++ b/lib/guessit/rules/properties/release_group.py @@ -5,7 +5,7 @@ """ import copy -from rebulk import Rebulk, Rule, AppendMatch +from rebulk import Rebulk, Rule, AppendMatch, RemoveMatch from ..common import seps from ..common.expected import build_expected_function @@ -63,7 +63,7 @@ def clean_groupname(string): _scene_previous_names = ['video_codec', 'format', 'video_api', 'audio_codec', 'audio_profile', 'video_profile', 'audio_channels', 'screen_size', 'other', 'container', 'language', 'subtitle_language', - 'subtitle_language.suffix', 'subtitle_language.prefix'] + 'subtitle_language.suffix', 'subtitle_language.prefix', 'language.suffix'] _scene_previous_tags = ['release-group-prefix'] @@ -155,12 +155,13 @@ class AnimeReleaseGroup(Rule): ...[ReleaseGroup] Something.mkv """ dependency = [SceneReleaseGroup, TitleFromPosition] - consequence = AppendMatch + consequence = [RemoveMatch, AppendMatch] properties = {'release_group': [None]} def when(self, matches, context): - ret = [] + to_remove = [] + to_append = [] # If a release_group is found before, ignore this kind of release_group rule. if matches.named('release_group'): @@ -173,19 +174,23 @@ def when(self, matches, context): for filepart in marker_sorted(matches.markers.named('path'), matches): # pylint:disable=bad-continuation - empty_group_marker = matches.markers \ - .range(filepart.start, filepart.end, lambda marker: (marker.name == 'group' - and not matches.range(marker.start, marker.end) - and marker.value.strip(seps) - and not int_coercable(marker.value.strip(seps))), - 0) - - if empty_group_marker: - group = copy.copy(empty_group_marker) + empty_group = matches.markers.range(filepart.start, + filepart.end, + lambda marker: (marker.name == 'group' + and not matches.range(marker.start, marker.end, + lambda m: + 'weak-language' not in m.tags) + and marker.value.strip(seps) + and not int_coercable(marker.value.strip(seps))), 0) + + if empty_group: + group = copy.copy(empty_group) group.marker = False group.raw_start += 1 group.raw_end -= 1 group.tags = ['anime'] group.name = 'release_group' - ret.append(group) - return ret + to_append.append(group) + to_remove.extend(matches.range(empty_group.start, empty_group.end, + lambda m: 'weak-language' in m.tags)) + return to_remove, to_append diff --git a/lib/guessit/rules/properties/screen_size.py b/lib/guessit/rules/properties/screen_size.py index 80d68c2962..b7732ab61c 100644 --- a/lib/guessit/rules/properties/screen_size.py +++ b/lib/guessit/rules/properties/screen_size.py @@ -7,7 +7,7 @@ from rebulk import Rebulk, Rule, RemoveMatch from ..common.validators import seps_surround -from ..common import dash +from ..common import dash, seps def screen_size(): @@ -29,7 +29,7 @@ def conflict_solver(match, other): return other return '__default__' - rebulk = Rebulk().regex_defaults(flags=re.IGNORECASE) + rebulk = Rebulk().string_defaults(ignore_case=True).regex_defaults(flags=re.IGNORECASE) rebulk.defaults(name="screen_size", validator=seps_surround, conflict_solver=conflict_solver) rebulk.regex(r"(?:\d{3,}(?:x|\*))?360(?:i|p?x?)", value="360p") @@ -45,6 +45,7 @@ def conflict_solver(match, other): rebulk.regex(r"(?:\d{3,}(?:x|\*))?1080(?:p(?:50|60)?x?)", value="1080p") rebulk.regex(r"(?:\d{3,}(?:x|\*))?1080p?hd", value="1080p") rebulk.regex(r"(?:\d{3,}(?:x|\*))?2160(?:i|p?x?)", value="4K") + rebulk.string('4k', value='4K') _digits_re = re.compile(r'\d+') @@ -55,7 +56,7 @@ def conflict_solver(match, other): tags=['resolution'], conflict_solver=lambda match, other: '__default__' if other.name == 'screen_size' else other) - rebulk.rules(ScreenSizeOnlyOne) + rebulk.rules(ScreenSizeOnlyOne, RemoveScreenSizeConflicts) return rebulk @@ -75,3 +76,33 @@ def when(self, matches, context): to_remove.extend(screensize[1:]) return to_remove + + +class RemoveScreenSizeConflicts(Rule): + """ + Remove season and episode matches which conflicts with screen_size match. + """ + consequence = RemoveMatch + + def when(self, matches, context): + to_remove = [] + for filepart in matches.markers.named('path'): + screensize = matches.range(filepart.start, filepart.end, lambda match: match.name == 'screen_size', 0) + if not screensize: + continue + + conflicts = matches.conflicting(screensize, lambda match: match.name in ('season', 'episode')) + if not conflicts: + continue + + video_profile = matches.range(screensize.end, filepart.end, lambda match: match.name == 'video_profile', 0) + if video_profile and not matches.holes(screensize.end, video_profile.start, + predicate=lambda h: h.value and h.value.strip(seps)): + to_remove.extend(conflicts) + + date = matches.previous(screensize, lambda match: match.name == 'date', 0) + if date and not matches.holes(date.end, screensize.start, + predicate=lambda h: h.value and h.value.strip(seps)): + to_remove.extend(conflicts) + + return to_remove diff --git a/lib/guessit/rules/properties/streaming_service.py b/lib/guessit/rules/properties/streaming_service.py index 1e4810808a..b31690d339 100644 --- a/lib/guessit/rules/properties/streaming_service.py +++ b/lib/guessit/rules/properties/streaming_service.py @@ -9,7 +9,6 @@ from rebulk.rules import Rule, RemoveMatch from ...rules.common import seps, dash -from ...rules.common.validators import seps_surround def streaming_service(): @@ -19,7 +18,7 @@ def streaming_service(): :rtype: Rebulk """ rebulk = Rebulk().string_defaults(ignore_case=True).regex_defaults(flags=re.IGNORECASE, abbreviations=[dash]) - rebulk.defaults(name='streaming_service', validator=seps_surround) + rebulk.defaults(name='streaming_service', tags=['format-prefix']) rebulk.string('AE', 'A&E', value='A&E') rebulk.string('AMBC', value='ABC') @@ -55,6 +54,7 @@ def streaming_service(): rebulk.string('NFL', value='NFL') rebulk.string('NICK', 'Nickelodeon', value='Nickelodeon') rebulk.string('NF', 'Netflix', value='Netflix') + rebulk.string('iTunes', value='iTunes') rebulk.string('RTE', value='RTÉ One') rebulk.string('SESO', 'SeeSo', value='SeeSo') rebulk.string('SPKE', 'SpikeTV', 'Spike TV', value='Spike TV') @@ -89,14 +89,20 @@ def when(self, matches, context): """ to_remove = [] for service in matches.named('streaming_service'): - next_match = matches.next(service, predicate=lambda match: match.name == 'format', index=0) - if next_match and not matches.holes(service.end, next_match.start, - predicate=lambda match: match.value.strip(seps)): - if service.value == 'Comedy Central': - # Current match is a valid streaming service, removing invalid closed caption (CC) matches - to_remove.extend(matches.named('other', predicate=lambda match: match.value == 'CC')) + next_match = matches.next(service, lambda match: 'streaming_service.suffix' in match.tags, 0) + previous_match = matches.previous(service, lambda match: 'streaming_service.prefix' in match.tags, 0) + has_other = service.initiator and service.initiator.children.named('other') + + if not has_other and \ + (not next_match or matches.holes(service.end, next_match.start, + predicate=lambda match: match.value.strip(seps))) and \ + (not previous_match or matches.holes(previous_match.end, service.start, + predicate=lambda match: match.value.strip(seps))): + to_remove.append(service) continue - to_remove.append(service) + if service.value == 'Comedy Central': + # Current match is a valid streaming service, removing invalid closed caption (CC) matches + to_remove.extend(matches.named('other', predicate=lambda match: match.value == 'CC')) return to_remove diff --git a/lib/guessit/rules/properties/title.py b/lib/guessit/rules/properties/title.py index e282d1e196..e87ceb6dea 100644 --- a/lib/guessit/rules/properties/title.py +++ b/lib/guessit/rules/properties/title.py @@ -142,7 +142,7 @@ def should_remove(self, match, matches, filepart, hole, context): :return: """ if context.get('type') == 'episode' and match.name == 'episode_details': - return False + return match.start >= hole.start and match.end <= hole.end return True def check_titles_in_filepart(self, filepart, matches, context): diff --git a/lib/guessit/rules/properties/video_codec.py b/lib/guessit/rules/properties/video_codec.py index e4961677e9..86661469cc 100644 --- a/lib/guessit/rules/properties/video_codec.py +++ b/lib/guessit/rules/properties/video_codec.py @@ -18,7 +18,7 @@ def video_codec(): :rtype: Rebulk """ rebulk = Rebulk().regex_defaults(flags=re.IGNORECASE, abbreviations=[dash]).string_defaults(ignore_case=True) - rebulk.defaults(name="video_codec", tags='format-suffix') + rebulk.defaults(name="video_codec", tags=['format-suffix', 'streaming_service.suffix']) rebulk.regex(r"Rv\d{2}", value="Real") rebulk.regex("Mpeg2", value="Mpeg2") @@ -26,12 +26,14 @@ def video_codec(): rebulk.regex("XviD", value="XviD") rebulk.regex("[hx]-?264(?:-?AVC(HD)?)?", "MPEG-?4(?:-?AVC(HD)?)", "AVC(?:HD)?", value="h264") rebulk.regex("[hx]-?265(?:-?HEVC)?", "HEVC", value="h265") + rebulk.regex('(?Phevc)(?P10)', value={'video_codec': 'h265', 'video_profile': '10bit'}, + tags=['video-codec-suffix'], children=True) # http://blog.mediacoderhq.com/h264-profiles-and-levels/ # http://fr.wikipedia.org/wiki/H.264 rebulk.defaults(name="video_profile", validator=seps_surround) - rebulk.regex('10.?bits?', 'Hi10P', value='10bit') + rebulk.regex('10.?bits?', 'Hi10P?', 'YUV420P10', value='10bit') rebulk.regex('8.?bits?', value='8bit') rebulk.string('BP', value='BP', tags='video_profile.rule') @@ -62,7 +64,8 @@ def when(self, matches, context): not matches.at_index(codec.start - 1, lambda match: 'video-codec-prefix' in match.tags): ret.append(codec) continue - if not seps_after(codec): + if not seps_after(codec) and \ + not matches.at_index(codec.end + 1, lambda match: 'video-codec-suffix' in match.tags): ret.append(codec) continue return ret diff --git a/lib/guessit/test/episodes.yml b/lib/guessit/test/episodes.yml index 5961815975..5d97f760be 100644 --- a/lib/guessit/test/episodes.yml +++ b/lib/guessit/test/episodes.yml @@ -1775,7 +1775,8 @@ : episode: 11 episode_title: Our Man in Damascus format: HDTV - release_group: iNTERNAL-BaCKToRG + other: Internal + release_group: BaCKToRG screen_size: 720p season: 5 subtitle_language: de @@ -2902,6 +2903,53 @@ release_group: CasStudio type: episode +? Proof.2015.S01E10.1080p.WEB-DL.DD5.1.H.264-KINGS.mkv +: title: Proof + season: 1 + episode: 10 + screen_size: 1080p + format: WEB-DL + audio_codec: DolbyDigital + audio_channels: '5.1' + video_codec: h264 + release_group: KINGS + container: mkv + type: episode + +# Hardcoded subtitles +? Show.Name.S06E16.HC.SWESUB.HDTV.x264 +: title: Show Name + season: 6 + episode: 16 + other: Hardcoded Subtitles + format: HDTV + video_codec: h264 + subtitle_language: sv + type: episode + +? From [ WWW.TORRENTING.COM ] - White.Rabbit.Project.S01E08.1080p.NF.WEBRip.DD5.1.x264-ViSUM/White.Rabbit.Project.S01E08.1080p.NF.WEBRip.DD5.1.x264-ViSUM.mkv +: title: White Rabbit Project + website: WWW.TORRENTING.COM + season: 1 + episode: 8 + screen_size: 1080p + streaming_service: Netflix + format: WEBRip + audio_codec: DolbyDigital + audio_channels: '5.1' + video_codec: h264 + release_group: ViSUM + container: mkv + type: episode + +? /tv/Daniel Tiger's Neighborhood/S02E06 - Playtime Is Different.mp4 +: season: 2 + episode: 6 + title: Daniel Tiger's Neighborhood + episode_title: Playtime Is Different + container: mp4 + type: episode + ? Zoo.S02E05.1080p.WEB-DL.DD5.1.H.264.HKD/160725_02.mkv : title: Zoo season: 2 @@ -2957,19 +3005,6 @@ container: mkv type: episode -? Proof.2015.S01E10.1080p.WEB-DL.DD5.1.H.264-KINGS.mkv -: title: Proof - season: 1 - episode: 10 - screen_size: 1080p - format: WEB-DL - audio_codec: DolbyDigital - audio_channels: '5.1' - video_codec: h264 - release_group: KINGS - container: mkv - type: episode - ? The.Messengers.2015.S01E07.1080p.WEB-DL.DD5.1.H264.Nlsubs-Q/QoQ-sbuSLN.462.H.1.5DD.LD-BEW.p0801.70E10S.5102.sregnesseM.ehT.mkv : title: The Messengers year: 2015 @@ -3010,26 +3045,812 @@ release_group: Het.Robot.Team.OYM type: episode -? /tv/Daniel Tiger's Neighborhood/S02E06 - Playtime Is Different.mp4 -: season: 2 - episode: 6 - title: Daniel Tiger's Neighborhood - episode_title: Playtime Is Different - container: mp4 +? Show.Name.-.Temporada.1.720p.HDTV.x264[Cap.102]SPANISH.AUDIO-NEWPCT +? /Show Name/Season 01/Show.Name.-.Temporada.1.720p.HDTV.x264[Cap.102]SPANISH.AUDIO-NEWPCT +? /Show Name/Temporada 01/Show.Name.-.Temporada.1.720p.HDTV.x264[Cap.102]SPANISH.AUDIO-NEWPCT +: title: Show Name + season: 1 + episode: 2 + screen_size: 720p + format: HDTV + video_codec: h264 + language: es + release_group: NEWPCT type: episode -? From [ WWW.TORRENTING.COM ] - White.Rabbit.Project.S01E08.1080p.NF.WEBRip.DD5.1.x264-ViSUM/White.Rabbit.Project.S01E08.1080p.NF.WEBRip.DD5.1.x264-ViSUM.mkv -: title: White Rabbit Project - website: WWW.TORRENTING.COM - season: 1 +# newpct +? Show Name - Temporada 4 [HDTV][Cap.408][Espanol Castellano] +? Show Name - Temporada 4 [HDTV][Cap.408][Español Castellano] +: title: Show Name + season: 4 episode: 8 + format: HDTV + language: ca + type: episode + +# newpct +? -Show Name - Temporada 4 [HDTV][Cap.408][Espanol Castellano] +? -Show Name - Temporada 4 [HDTV][Cap.408][Español Castellano] +: release_group: Castellano + +# newpct +? Show.Name.-.Temporada1.[HDTV][Cap.105][Español.Castellano] +: title: Show Name + format: HDTV + season: 1 + episode: 5 + language: ca + type: episode + +# newpct +? Show.Name.-.Temporada1.[HDTV][Cap.105][Español] +: title: Show Name + format: HDTV + season: 1 + episode: 5 + language: es + type: episode + +# newpct - season and episode with range: +? Show.Name.-.Temporada.1.720p.HDTV.x264[Cap.102_104]SPANISH.AUDIO-NEWPCT +: title: Show Name + season: 1 + episode: [2, 3, 4] + screen_size: 720p + format: HDTV + video_codec: h264 + language: es + release_group: NEWPCT + type: episode + +# newpct - season and episode (2 digit season) +? Show.Name.-.Temporada.15.720p.HDTV.x264[Cap.1503]SPANISH.AUDIO-NEWPCT +: title: Show Name + season: 15 + episode: 3 + screen_size: 720p + format: HDTV + video_codec: h264 + language: es + release_group: NEWPCT + type: episode + +# newpct - season and episode (2 digit season with range) +? Show.Name.-.Temporada.15.720p.HDTV.x264[Cap.1503_1506]SPANISH.AUDIO-NEWPCT +: title: Show Name + season: 15 + episode: [3, 4, 5, 6] + screen_size: 720p + format: HDTV + video_codec: h264 + language: es + release_group: NEWPCT + type: episode + +# newpct - season and episode: +? Show.Name.-.Temp.1.720p.HDTV.x264[Cap.102]SPANISH.AUDIO-NEWPCT +: title: Show Name + season: 1 + episode: 2 + screen_size: 720p + format: HDTV + video_codec: h264 + language: es + release_group: NEWPCT + type: episode + +# newpct - season and episode: +? Show.Name.-.Tem.1.720p.HDTV.x264[Cap.102]SPANISH.AUDIO-NEWPCT +: title: Show Name + season: 1 + episode: 2 + screen_size: 720p + format: HDTV + video_codec: h264 + language: es + release_group: NEWPCT + type: episode + +# newpct - season and episode: +? Show.Name.-.Tem.1.720p.HDTV.x264[Cap.112_114.Final]SPANISH.AUDIO-NEWPCT +: title: Show Name + season: 1 + episode: [12, 13, 14] + screen_size: 720p + format: HDTV + video_codec: h264 + language: es + release_group: NEWPCT + other: FINAL + type: episode + +? Mastercook Italia - Stagione 6 (2016) 720p ep13 spyro.mkv +: title: Mastercook Italia + season: 6 + episode: 13 + year: 2016 + screen_size: 720p + episode_title: spyro + container: mkv + type: episode + +? Mastercook Italia - Stagione 6 (2016) 720p Episodio 13 spyro.mkv +: title: Mastercook Italia + season: 6 + year: 2016 + screen_size: 720p + episode: 13 + episode_title: spyro + container: mkv + type: episode + +# Italian releases +? Show Name 3x18 Un Tuffo Nel Passato ITA HDTVMux x264 NovaRip +: title: Show Name + season: 3 + episode: 18 + episode_title: Un Tuffo Nel Passato + language: it + format: HDTV + other: Mux + video_codec: h264 + release_group: NovaRip + type: episode + +# Italian releases +? Show Name 3x18 Un Tuffo Nel Passato ITA HDTVMux x264 NovaRip +: options: --allowed-languages it + title: Show Name + season: 3 + episode: 18 + episode_title: Un Tuffo Nel Passato + language: it + format: HDTV + other: Mux + video_codec: h264 + release_group: NovaRip + type: episode + +# Subbed: No language hint +? Show.Name.S06E03.1080p.HDTV.Legendado +: subtitle_language: und + +# Subbed: No language hint +? Show.Name.S01E09.Subbed.1080p.BluRay.x264-RRH +: title: Show Name + season: 1 + episode: 9 + subtitle_language: und + screen_size: 1080p + format: BluRay + video_codec: h264 + release_group: RRH + type: episode + +# Legendado PT-BR +? Show.Name.S06E05.1080p.WEBRip.Legendado.PT-BR +? Show.Name.S06E05.1080p.WEBRip.Legendas.PT-BR +? Show.Name.S06E05.1080p.WEBRip.Legenda.PT-BR +: title: Show Name + season: 6 + episode: 5 screen_size: 1080p - streaming_service: Netflix format: WEBRip - audio_codec: DolbyDigital + subtitle_language: pt-BR + type: episode + +? Show.Name.S01E07.Super, Title.WEB-DL 720p.br.srt +: title: Show Name + season: 1 + episode: 7 + episode_title: Super, Title + format: WEB-DL + screen_size: 720p + subtitle_language: pt-BR + container: srt + type: episode + +? -Show.Name.S01E07.Super, Title.WEB-DL 720p.br.srt +: language: pt-BR + +# Legendado PT +? Show.Name.S06E05.1080p.WEBRip.Legendado.PT +: title: Show Name + season: 6 + episode: 5 + screen_size: 1080p + format: WEBRip + subtitle_language: pt + type: episode + +? Show.Name.S05E01.SPANISH.SUBBED.720p.HDTV.x264-sPHD +: title: Show Name + season: 5 + episode: 1 + subtitle_language: spa + screen_size: 720p + format: HDTV + video_codec: h264 + release_group: sPHD + type: episode + +? Show.Name.S01E01.German.Subbed.HDTV.XviD-ASAP +: title: Show Name + season: 1 + episode: 1 + subtitle_language: deu + format: HDTV + video_codec: XviD + release_group: ASAP + type: episode + +? Show.Name.S04E21.Aint.Nothing.Like.the.Real.Thing.German.Custom.Subbed.720p.HDTV.x264.iNTERNAL-BaCKToRG +: title: Show Name + season: 4 + episode: 21 + episode_title: Aint Nothing Like the Real Thing + subtitle_language: deu + screen_size: 720p + format: HDTV + video_codec: h264 + type: episode + +? Show.Name.S01.Season.Complet.WEBRiP.Ro.Subbed.TM +: title: Show Name + season: 1 + other: Complete + format: WEBRip + subtitle_language: ro + type: episode + +? Show.Name.(2013).Season.3.-.Eng.Soft.Subtitles.720p.WEBRip.x264.[MKV,AC3,5.1].Ehhhh +: title: Show Name + year: 2013 + season: 3 + subtitle_language: en + screen_size: 720p + format: WEBRip + video_codec: h264 + container: MKV + audio_codec: AC3 audio_channels: '5.1' + release_group: Ehhhh + type: episode + +# Dublado +? Show.Name.S02E03.720p.HDTV.x264-Belex.-.Dual.Audio.-.Dublado +: title: Show Name + season: 2 + episode: 3 + screen_size: 720p + format: HDTV video_codec: h264 - release_group: ViSUM - container: mkv + release_group: Belex + other: DualAudio + language: und + type: episode + +? Show.Name.S06E10.1080p.WEB-DL.DUAL.[Dublado].RK +: title: Show Name + season: 6 + episode: 10 + screen_size: 1080p + format: WEB-DL + other: DualAudio + language: und + release_group: RK type: episode +? Show.Name.S06E12.720p.WEB-DL.Dual.Audio.Dublado +: title: Show Name + season: 6 + episode: 12 + screen_size: 720p + format: WEB-DL + other: DualAudio + language: und + type: episode + +? Show.Name.S05E07.720p.DUBLADO.HDTV.x264-0SEC-pia.mkv +: title: Show Name + season: 5 + episode: 7 + screen_size: 720p + language: und + format: HDTV + video_codec: h264 + release_group: 0SEC-pia + container: mkv + type: episode + +? Show.Name.S02E07.Shiva.AC3.Dubbed.WEBRip.x264 +: title: Show Name + season: 2 + episode: 7 + episode_title: Shiva + audio_codec: AC3 + language: und + format: WEBRip + video_codec: h264 + type: episode + +# Legendas +? Show.Name.S05.1080p.BluRay.x264-Belex.-.Dual.Audio.+.Legendas +: title: Show Name + season: 5 + screen_size: 1080p + format: BluRay + video_codec: h264 + release_group: Belex + other: DualAudio + subtitle_language: und + type: episode + +# Legendas +? Show.Name.S05.1080p.BluRay.x264-Belex.-.Dual.Audio.+.Legendas +: options: --allowed-languages und + title: Show Name + season: 5 + screen_size: 1080p + format: BluRay + video_codec: h264 + release_group: Belex + other: DualAudio + subtitle_language: und + type: episode + +# Subtitulado +? Show.Name.S01E03.HDTV.Subtitulado.Esp.SC +? Show.Name.S01E03.HDTV.Subtitulado.Espanol.SC +? Show.Name.S01E03.HDTV.Subtitulado.Español.SC +# SC is a release group, not a language. To be addressed in #296 +: options: --allowed-languages spa --allowed-countries ESP + title: Show Name + season: 1 + episode: 3 + format: HDTV + subtitle_language: es + release_group: SC + type: episode + +# Subtitles/Subbed +? Show.Name.S02E08.720p.WEB-DL.Subtitles +? Show.Name.S02E08.Subbed.720p.WEB-DL +: title: Show Name + season: 2 + episode: 8 + screen_size: 720p + format: WEB-DL + subtitle_language: und + type: episode + +# Dubbed +? Show.Name.s01e01.german.Dubbed +: title: Show Name + season: 1 + episode: 1 + language: de + type: episode + +? Show.Name.S06E05.Das.Toor.German.AC3.Dubbed.HDTV.German +: title: Show Name + season: 6 + episode: 5 + language: de + audio_codec: AC3 + format: HDTV + type: episode + +? Show.Name.S01E01.Savage.Season.GERMAN.DUBBED.WS.HDTVRip.x264-TVP +: title: Show Name + season: 1 + episode: 1 + episode_title: Savage Season + language: de + other: WideScreen + format: HDTV + video_codec: h264 + release_group: TVP + type: episode + +# Dubbed +? "[AnimeRG].Show.Name.-.03.[Eng.Dubbed].[720p].[WEB-DL].[JRR]" +: title: Show Name + episode: 3 + language: en + screen_size: 720p + format: WEB-DL + release_group: JRR + type: episode + +# Dubbed +? "[RH].Show.Name.-.03.[English.Dubbed].[1080p]" +: title: Show Name + episode: 3 + language: en + screen_size: 1080p + release_group: RH + type: episode + +# Hebsubs +? Show.Name.S05E05.HDTV.XviD-AFG.HebSubs +: title: Show Name + season: 5 + episode: 5 + format: HDTV + video_codec: XviD + release_group: AFG + subtitle_language: he + type: episode + +? Show Name - S02E31 - Episode 55 (720p.HDTV) +: title: Show Name + season: 2 + episode: 31 + episode_title: Episode 55 + screen_size: 720p + format: HDTV + type: episode + +# Scenario: Removing invalid season and episode matches. Correct episode_title match +? Show.Name.S02E06.eps2.4.m4ster-s1ave.aes.1080p.AMZN.WEBRip.DD5.1.x264-GROUP +: title: Show Name + season: 2 + episode: 6 + episode_title: eps2 4 m4ster-s1ave aes + screen_size: 1080p + streaming_service: Amazon Prime + format: WEBRip + audio_codec: DolbyDigital + audio_channels: '5.1' + video_codec: h264 + release_group: GROUP + type: episode + +? Show.Name.S01E05.3xpl0its.wmv.720p.WEBdl.EN-SUB.x264-[MULVAcoded].mkv +: title: Show Name + season: 1 + episode: 5 + episode_title: 3xpl0its + screen_size: 720p + format: WEB-DL + subtitle_language: en + video_codec: h264 + type: episode + +# Regression: S4L release group detected as season 4 +# https://github.com/guessit-io/guessit/issues/352 +? Show Name S01E06 DVD-RIP x264-S4L +: title: Show Name + season: 1 + episode: 6 + format: DVD + video_codec: h264 + release_group: S4L + type: episode + +# Corner case with only date and 720p +? The.Show.Name.2016.05.18.720.HDTV.x264-GROUP.VTV +: title: The Show Name + date: 2016-05-18 + screen_size: 720p + format: HDTV + video_codec: h264 + release_group: GROUP.VTV + type: episode + +# Corner case with only date and 720p +? -The.Show.Name.2016.05.18.720.HDTV.x264-GROUP.VTV +: season: 7 + episode: 20 + +# https://github.com/guessit-io/guessit/issues/308 (conflict with screen size) +? "[SuperGroup].Show.Name.-.06.[720.Hi10p][1F5578AC]" +: title: Show Name + episode: 6 + screen_size: 720p + video_profile: 10bit + crc32: 1F5578AC + release_group: SuperGroup + type: episode + +# https://github.com/guessit-io/guessit/issues/308 (conflict with screen size) +? "[SuperGroup].Show.Name.-.06.[1080.Hi10p][1F5578AC]" +: title: Show Name + episode: 6 + screen_size: 1080p + video_profile: 10bit + crc32: 1F5578AC + release_group: SuperGroup + type: episode + +? "[MK-Pn8].Dimension.W.-.05.[720p][Hi10][Dual][TV-Dub][EDA6E7F1]" +: options: -C us -L und + release_group: MK-Pn8 + title: Dimension W + episode: 5 + screen_size: 720p + video_profile: 10bit + other: DualAudio + format: TV + language: und + crc32: EDA6E7F1 + type: episode + +# NetflixUHD +? Show.Name.S01E06.NetflixUHD +: title: Show Name + season: 1 + episode: 6 + streaming_service: Netflix + other: UltraHD + type: episode + +? Show.Name.S04E13.FINAL.MULTI.DD51.2160p.NetflixUHDRip.x265-TVS +: title: Show Name + season: 4 + episode: 13 + other: FINAL + language: mul + audio_codec: DolbyDigital + audio_channels: '5.1' + screen_size: 4K + streaming_service: Netflix + format: UHDTV + video_codec: h265 + release_group: TVS + type: episode + +? Show.Name.S06E11.Of.Late.I.Think.of.Rosewood.iTunesHD.x264 +: title: Show Name + season: 6 + episode: 11 + episode_title: Of Late I Think of Rosewood + streaming_service: iTunes + other: HD + video_codec: h264 + type: episode + +? Show.Name.S01.720p.iTunes.h264-Group +: title: Show Name + season: 1 + screen_size: 720p + streaming_service: iTunes + video_codec: h264 + release_group: Group + type: episode + +? Show.Name.1x01.eps1.0.hellofriend.(HDiTunes.Ac3.Esp).(2015).By.Malaguita.avi +: title: Show Name + season: 1 + episode: 1 + episode_title: eps1 0 hellofriend + other: HD + streaming_service: iTunes + audio_codec: AC3 + language: spa + year: 2015 + container: avi + type: episode + +? "[Hanamaru&LoliHouse] The Dragon Dentist - 01 [WebRip 1920x1080 HEVC-yuv420p10 AAC].mkv" +: release_group: Hanamaru&LoliHouse + title: The Dragon Dentist + episode: 1 + format: WEBRip + screen_size: 1080p + video_codec: h265 + video_profile: 10bit + audio_codec: AAC + container: mkv + type: episode + +? Show Name - Season 1 Episode 50 +: title: Show Name + season: 1 + episode: 50 + type: episode + +? Vikings.Seizoen.4.1080p.Web.NLsubs +: title: Vikings + season: 4 + screen_size: 1080p + format: WEB-DL + subtitle_language: nl + type: episode + +? Star.Wars.Rebels.S01E01.Spark.of.Rebellion.ALTERNATE.CUT.HDTV.x264-W4F.mp4 +: title: Star Wars Rebels + season: 1 + episode: 1 + episode_title: Spark of Rebellion + other: Alternative Cut + format: HDTV + video_codec: h264 + release_group: W4F + container: mp4 + type: episode + +? DCs.Legends.of.Tomorrow.S02E12.HDTV.XviD-FUM +: title: DCs Legends of Tomorrow + season: 2 + episode: 12 + format: HDTV + video_codec: XviD + release_group: FUM + type: episode + +? DC's Legends of Tomorrow 2016 - S02E02 +: title: DC's Legends of Tomorrow + year: 2016 + season: 2 + episode: 2 + type: episode + +? Broadchurch.S01.DIRFIX.720p.BluRay.x264-SHORTBREHD +: title: Broadchurch + season: 1 + other: Proper + screen_size: 720p + format: BluRay + video_codec: h264 + release_group: SHORTBREHD + proper_count: 1 + type: episode + +? Simply Red - 2016-07-08 Montreux Jazz Festival 720p +: title: Simply Red + date: 2016-07-08 + episode_title: Montreux Jazz Festival + screen_size: 720p + type: episode + +? Ridiculousness.S07E14.iNTERNAL.HDTV.x264-YesTV +: title: Ridiculousness + season: 7 + episode: 14 + other: Internal + format: HDTV + video_codec: h264 + release_group: YesTV + type: episode + +? Stephen.Colbert.2016.05.25.James.McAvoy.iNTERNAL.XviD-AFG +: title: Stephen Colbert + date: 2016-05-25 + episode_title: James McAvoy + other: Internal + video_codec: XviD + release_group: AFG + type: episode + +? The.100.S01E13.iNTERNAL.READNFO.720p.HDTV.x264-2HD +: title: The 100 + season: 1 + episode: 13 + other: [Internal, Read NFO] + screen_size: 720p + format: HDTV + video_codec: h264 + release_group: 2HD + type: episode + +? The.100.S01E13.READ.NFO.720p.HDTV.x264-2HD +: title: The 100 + season: 1 + episode: 13 + other: Read NFO + screen_size: 720p + format: HDTV + video_codec: h264 + release_group: 2HD + type: episode + +? Dr.Ken.S01E21.SAMPLEFIX.720p.HDTV.x264-SVA +: title: Dr Ken + season: 1 + episode: 21 + other: Proper + screen_size: 720p + format: HDTV + video_codec: h264 + release_group: SVA + type: episode + +? Rick and Morty Season 1 [UNCENSORED] [BDRip] [1080p] [HEVC] +: title: Rick and Morty + season: 1 + other: Uncensored + format: BluRay + screen_size: 1080p + video_codec: h265 + type: episode + +? 12.Monkeys.S01E01.LiMiTED.FRENCH.1080p.WEB-DL.H264-AUTHORiTY +: title: 12 Monkeys + season: 1 + episode: 1 + edition: Limited Edition + language: french + screen_size: 1080p + format: WEB-DL + video_codec: h264 + release_group: AUTHORiTY + type: episode + +? Undateable.2014.S03E05.West.Feed.HDTV.x264-2HD +: title: Undateable + year: 2014 + season: 3 + episode: 5 + other: West Coast Feed + format: HDTV + video_codec: h264 + release_group: 2HD + type: episode + +? Undateable.2014.S02E07-E08.Live.Episode.West.Coast.Feed.HDTV.x264-2HD +: title: Undateable + year: 2014 + season: 2 + episode: [7, 8] + other: West Coast Feed + format: HDTV + video_codec: h264 + release_group: 2HD + type: episode + +? Undateable.S03E01-E02.LIVE.EAST.FEED.720p.HDTV.x264-KILLERS +: title: Undateable + season: 3 + episode: [1, 2] + other: East Coast Feed + screen_size: 720p + format: HDTV + video_codec: h264 + release_group: KILLERS + type: episode + +? Undateable.2014.S02E07.Live.Episode.East.Coast.Feed.HDTV.x264-2HD +: title: Undateable + year: 2014 + season: 2 + episode: 7 + other: East Coast Feed + format: HDTV + video_codec: h264 + release_group: 2HD + type: episode + +? Undateable.2014.S02E07.East.Coast.Feed.720p.WEB-DL.DD5.1.H.264-NTb +: title: Undateable + year: 2014 + season: 2 + episode: 7 + other: East Coast Feed + screen_size: 720p + format: WEB-DL + audio_codec: DolbyDigital + audio_channels: '5.1' + video_codec: h264 + release_group: NTb + type: episode + +? Show Name Super 015 VOSTFR par Fansub-Resistance (1280x720) - HQ version +: options: --type episode --episode-prefer-number + title: Show Name Super + episode: 15 + subtitle_language: fr + screen_size: 720p + release_group: Resistance + other: HQ + type: episode + +? Show Name 445 VOSTFR par Fansub-Resistance (1280*720) - version MQ +: options: --type episode --episode-prefer-number + title: Show Name + episode: 445 + screen_size: 720p + subtitle_language: fr + release_group: Resistance + type: episode diff --git a/lib/guessit/test/movies.yml b/lib/guessit/test/movies.yml index 9d8f973cfc..316a1062ff 100644 --- a/lib/guessit/test/movies.yml +++ b/lib/guessit/test/movies.yml @@ -458,28 +458,28 @@ - French - English format: DVD - other: NTSC + other: [Straight to Video, Read NFO, NTSC] ? Immersion.French.2011.STV.READNFO.QC.FRENCH.NTSC.DVDR.nfo : title: Immersion French year: 2011 language: French format: DVD - other: NTSC + other: [Straight to Video, Read NFO, NTSC] ? Immersion.French.2011.STV.READNFO.QC.NTSC.DVDR.nfo : title: Immersion language: French year: 2011 format: DVD - other: NTSC + other: [Straight to Video, Read NFO, NTSC] ? French.Immersion.2011.STV.READNFO.QC.ENGLISH.NTSC.DVDR.nfo : title: French Immersion year: 2011 language: ENGLISH format: DVD - other: NTSC + other: [Straight to Video, Read NFO, NTSC] ? Howl's_Moving_Castle_(2004)_[720p,HDTV,x264,DTS]-FlexGet.avi : video_codec: h264 @@ -855,4 +855,230 @@ ? Mad Max Beyond Thunderdome () : title: Mad Max Beyond Thunderdome - type: movie \ No newline at end of file + type: movie + +? Hacksaw Ridge 2016 Multi 2160p UHD BluRay Hevc10 HDR10 DTSHD & ATMOS 7.1 -DDR.mkv +: title: Hacksaw Ridge + year: 2016 + language: mul + screen_size: 4K + other: UltraHD + format: BluRay + video_codec: h265 + video_profile: 10bit + audio_codec: [DTS, DolbyAtmos] + audio_profile: HD + audio_channels: '7.1' + release_group: DDR + container: mkv + type: movie + +? Special.Correspondents.2016.iTA.ENG.4K.2160p.NetflixUHD.TeamPremium.mp4 +: title: Special Correspondents + year: 2016 + language: [it, en] + screen_size: 4K + streaming_service: Netflix + other: UltraHD + release_group: TeamPremium + container: mp4 + type: movie + +? -Special.Correspondents.2016.iTA.ENG.4K.2160p.NetflixUHD.TeamPremium.mp4 +: alternative_title: 4K + +? -Special.Correspondents.2016.iTA.ENG.4K.2160p.NetflixUHD.TeamPremium.mp4 +: alternative_title: 2160p + +? Suicide Squad EXTENDED (2016) 2160p 4K UltraHD Blu-Ray x265 (HEVC 10bit BT709) Dolby Atmos 7.1 -DDR +: title: Suicide Squad + other: [Extended, UltraHD] + year: 2016 + screen_size: 4K + format: BluRay + video_codec: h265 + video_profile: 10bit + audio_codec: DolbyAtmos + audio_channels: '7.1' + release_group: DDR + type: movie + +? Queen - A Kind of Magic (Alternative Extended Version) 2CD 2014 +: title: Queen + alternative_title: A Kind of Magic + other: [Alternative Cut, Extended] + cd_count: 2 + year: 2014 + type: movie + +? Jour.de.Fete.1949.ALTERNATiVE.CUT.1080p.BluRay.x264-SADPANDA[rarbg] +: title: Jour de Fete + year: 1949 + other: Alternative Cut + screen_size: 1080p + format: BluRay + video_codec: h264 + release_group: SADPANDA[rarbg] + +? The.Movie.CONVERT.720p.HDTV.x264-C4TV +: title: The Movie + other: Converted + screen_size: 720p + format: HDTV + video_codec: h264 + release_group: C4TV + type: movie + +? Its.A.Wonderful.Life.1946.Colorized.720p.BRRip.999MB.MkvCage.com +: title: Its A Wonderful Life + year: 1946 + other: Colorized + screen_size: 720p + format: BluRay + size: 999MB + website: MkvCage.com + type: movie + +? Alien DC (1979) [1080p] +: title: Alien + edition: Director's cut + year: 1979 + screen_size: 1080p + type: movie + +? Requiem.For.A.Dream.2000.DC.1080p.BluRay.x264.anoXmous +: title: Requiem For A Dream + year: 2000 + edition: Director's cut + screen_size: 1080p + format: BluRay + video_codec: h264 + release_group: anoXmous + type: movie + +? Before.the.Flood.2016.DOCU.1080p.WEBRip.x264.DD5.1-FGT +: title: Before the Flood + year: 2016 + other: Documentary + screen_size: 1080p + format: WEBRip + video_codec: h264 + audio_codec: DolbyDigital + audio_channels: '5.1' + release_group: FGT + type: movie + +? Zootopia.2016.HDRip.1.46Gb.Dub.MegaPeer +: title: Zootopia + year: 2016 + format: HDTV + size: 1.46GB + language: und + release_group: MegaPeer + type: movie + +? Suntan.2016.FESTiVAL.DVDRip.x264-IcHoR +: title: Suntan + year: 2016 + other: Festival + format: DVD + video_codec: h264 + release_group: IcHoR + type: movie + +? Hardwired.STV.NFOFiX.FRENCH.DVDRiP.XviD-SURViVAL +: title: Hardwired + other: [Straight to Video, Proper] + language: french + format: DVD + video_codec: XviD + release_group: SURViVAL + proper_count: 1 + type: movie + +? Maze.Runner.The.Scorch.Trials.OM.2015.WEB-DLRip.by.Seven +: title: Maze Runner The Scorch Trials + other: Open Matte + year: 2015 + format: WEBRip + release_group: Seven + type: movie + +? Kampen Om Tungtvannet aka The Heavy Water War COMPLETE 720p x265 HEVC-Lund +: title: Kampen Om Tungtvannet aka The Heavy Water War + other: Complete + screen_size: 720p + video_codec: h265 + release_group: Lund + type: movie + +? All.Fall.Down.x264.PROOFFIX-OUTLAWS +: title: All Fall Down + video_codec: h264 + other: Proper + release_group: OUTLAWS + proper_count: 1 + type: movie + +? The.Last.Survivors.2014.PROOF.SAMPLE.FiX.BDRip.x264-TOPCAT +: title: The Last Survivors + year: 2014 + other: Proper + format: BluRay + video_codec: h264 + release_group: TOPCAT + type: movie + +? Bad Santa 2 2016 THEATRiCAL FRENCH BDRip XviD-EXTREME +: title: Bad Santa 2 + year: 2016 + edition: Theatrical Edition + language: french + format: BluRay + video_codec: XviD + release_group: EXTREME + type: movie + +? The Lord of the Rings The Fellowship of the Ring THEATRICAL EDITION (2001) [1080p] +: title: The Lord of the Rings The Fellowship of the Ring + edition: Theatrical Edition + year: 2001 + screen_size: 1080p + type: movie + +? World War Z (2013) Theatrical Cut 720p BluRay x264 +: title: World War Z + year: 2013 + edition: Theatrical Edition + screen_size: 720p + format: BluRay + video_codec: h264 + type: movie + +? The Heartbreak Kid (1993) UNCUT 720p WEBRip x264 +: title: The Heartbreak Kid + year: 1993 + other: Uncut + screen_size: 720p + format: WEBRip + video_codec: h264 + type: movie + +? Mrs.Doubtfire.1993.720p.OAR.Bluray.DTS.x264-CtrlHD +: title: Mrs Doubtfire + year: 1993 + screen_size: 720p + other: Original Aspect Ratio + format: BluRay + audio_codec: DTS + video_codec: h264 + release_group: CtrlHD + type: movie + +? Aliens.SE.1986.BDRip.1080p +: title: Aliens + edition: Special Edition + year: 1986 + format: BluRay + screen_size: 1080p + type: movie diff --git a/lib/guessit/test/rules/audio_codec.yml b/lib/guessit/test/rules/audio_codec.yml index 22a6d0e728..1d942053c0 100644 --- a/lib/guessit/test/rules/audio_codec.yml +++ b/lib/guessit/test/rules/audio_codec.yml @@ -36,6 +36,12 @@ ? +trueHD : audio_codec: TrueHD +? +True-HD51 +? +trueHD51 +: audio_codec: TrueHD + audio_channels: '5.1' + + ? +DTS-HD : audio_codec: DTS audio_profile: HD @@ -57,6 +63,7 @@ audio_profile: LC ? +AAC2.0 +? +AAC20 : audio_codec: AAC audio_channels: '2.0' @@ -80,5 +87,9 @@ : audio_channels: '1.0' ? DD5.1 +? DD51 : audio_codec: DolbyDigital audio_channels: '5.1' + +? -51 +: audio_channels: '5.1' diff --git a/lib/guessit/test/rules/episodes.yml b/lib/guessit/test/rules/episodes.yml index a75e67029f..29e59c6c7f 100644 --- a/lib/guessit/test/rules/episodes.yml +++ b/lib/guessit/test/rules/episodes.yml @@ -116,10 +116,15 @@ ? -A very special movie : episode_details: Special -? A very special episode +? -A very special episode : options: -t episode episode_details: Special +? A very special episode s06 special +: options: -t episode + title: A very special episode + episode_details: Special + ? 12 Monkeys\Season 01\Episode 05\12 Monkeys - S01E05 - The Night Room.mkv : container: mkv title: 12 Monkeys @@ -244,4 +249,24 @@ ? epi : options: -t episode - title: epi \ No newline at end of file + title: epi + +? Episode20 +? Episode 20 +: episode: 20 + +? Episode50 +? Episode 50 +: episode: 50 + +? Episode51 +? Episode 51 +: episode: 51 + +? Episode70 +? Episode 70 +: episode: 70 + +? Episode71 +? Episode 71 +: episode: 71 \ No newline at end of file diff --git a/lib/guessit/test/rules/format.yml b/lib/guessit/test/rules/format.yml index 3deefe8ab3..e983cfb1b9 100644 --- a/lib/guessit/test/rules/format.yml +++ b/lib/guessit/test/rules/format.yml @@ -132,3 +132,7 @@ ? HDTC : format: HDTC + +? UHDTV +? UHDRip +: format: UHDTV diff --git a/lib/guessit/test/rules/other.yml b/lib/guessit/test/rules/other.yml index 7f0e8c075a..1d90f62095 100644 --- a/lib/guessit/test/rules/other.yml +++ b/lib/guessit/test/rules/other.yml @@ -87,6 +87,12 @@ ? HD : other: HD +? UHD +? Ultra +? UltraHD +? Ultra HD +: other: UltraHD + ? mHD # ?? : other: mHD @@ -138,3 +144,12 @@ ? re-encoded ? reencoded : other: ReEncoded + +? Super Movie Alternate XViD +? Super Movie Alternative XViD +? Super Movie Alternate Cut XViD +? Super Movie Alternative Cut XViD +: other: Alternative Cut + +? CONVERT XViD +: other: Converted \ No newline at end of file diff --git a/lib/guessit/test/rules/video_codec.yml b/lib/guessit/test/rules/video_codec.yml index d195eaafe9..a11991ecc7 100644 --- a/lib/guessit/test/rules/video_codec.yml +++ b/lib/guessit/test/rules/video_codec.yml @@ -49,6 +49,11 @@ ? -x264 : video_codec: h265 +? hevc10 +? HEVC-YUV420P10 +: video_codec: h265 + video_profile: 10bit + ? h265-HP : video_codec: h265 video_profile: HP \ No newline at end of file diff --git a/lib/imdbpie/imdbpie.py b/lib/imdbpie/imdbpie.py index 20811c9afe..e8dde06a75 100644 --- a/lib/imdbpie/imdbpie.py +++ b/lib/imdbpie/imdbpie.py @@ -59,7 +59,6 @@ def get_person_by_id(self, imdb_id): def get_title_by_id(self, imdb_id): url = self._build_url('/title/maindetails', {'tconst': imdb_id}) response = self._get(url) - print(response) if response is None or self._is_redirection_result(response): return None diff --git a/lib/imdbpie/objects.py b/lib/imdbpie/objects.py index 6b98b3f30c..f965a5cbdf 100644 --- a/lib/imdbpie/objects.py +++ b/lib/imdbpie/objects.py @@ -60,7 +60,6 @@ def __unicode__(self): class Title(object): def __init__(self, data): - print(data) self.imdb_id = data.get('tconst') self.title = data.get('title') self.type = data.get('type') diff --git a/lib/rarfile.py b/lib/rarfile.py index 25b61196a6..78148c1916 100644 --- a/lib/rarfile.py +++ b/lib/rarfile.py @@ -54,74 +54,127 @@ # Set to full path of unrar.exe if it is not in PATH rarfile.UNRAR_TOOL = "unrar" - # Set to 0 if you don't look at comments and want to - # avoid wasting time for parsing them - rarfile.NEED_COMMENTS = 1 - - # Set up to 1 if you don't want to deal with decoding comments - # from unknown encoding. rarfile will try couple of common - # encodings in sequence. - rarfile.UNICODE_COMMENTS = 0 - - # Set to 1 if you prefer timestamps to be datetime objects - # instead tuples - rarfile.USE_DATETIME = 0 - - # Set to '/' to be more compatible with zipfile - rarfile.PATH_SEP = '\\' + # Set to '\\' to be more compatible with old rarfile + rarfile.PATH_SEP = '/' For more details, refer to source. """ -__version__ = '2.8' - -# export only interesting items -__all__ = ['is_rarfile', 'RarInfo', 'RarFile', 'RarExtFile'] +from __future__ import division, print_function ## ## Imports and compat - support both Python 2.x and 3.x ## -import sys, os, struct, errno +import sys +import os +import errno +import struct + from struct import pack, unpack, Struct -from binascii import crc32 +from binascii import crc32, hexlify from tempfile import mkstemp from subprocess import Popen, PIPE, STDOUT -from datetime import datetime from io import RawIOBase -from hashlib import sha1 +from hashlib import sha1, sha256 +from hmac import HMAC +from datetime import datetime, timedelta, tzinfo + +# fixed offset timezone, for UTC +try: + from datetime import timezone +except ImportError: + class timezone(tzinfo): + """Compat timezone.""" + __slots__ = ('_ofs', '_name') + _DST = timedelta(0) + + def __init__(self, offset, name): + super(timezone, self).__init__() + self._ofs, self._name = offset, name + + def utcoffset(self, dt): + return self._ofs + + def tzname(self, dt): + return self._name + + def dst(self, dt): + return self._DST # only needed for encryped headers try: try: from cryptography.hazmat.primitives.ciphers import algorithms, modes, Cipher from cryptography.hazmat.backends import default_backend + from cryptography.hazmat.primitives import hashes + from cryptography.hazmat.primitives.kdf import pbkdf2 + class AES_CBC_Decrypt(object): - block_size = 16 + """Decrypt API""" def __init__(self, key, iv): ciph = Cipher(algorithms.AES(key), modes.CBC(iv), default_backend()) - self.dec = ciph.decryptor() - def decrypt(self, data): - return self.dec.update(data) + self.decrypt = ciph.decryptor().update + + def pbkdf2_sha256(password, salt, iters): + """PBKDF2 with HMAC-SHA256""" + ctx = pbkdf2.PBKDF2HMAC(hashes.SHA256(), 32, salt, iters, default_backend()) + return ctx.derive(password) + except ImportError: from Crypto.Cipher import AES + from Crypto.Protocol import KDF + class AES_CBC_Decrypt(object): - block_size = 16 + """Decrypt API""" def __init__(self, key, iv): - self.dec = AES.new(key, AES.MODE_CBC, iv) - def decrypt(self, data): - return self.dec.decrypt(data) + self.decrypt = AES.new(key, AES.MODE_CBC, iv).decrypt + + def pbkdf2_sha256(password, salt, iters): + """PBKDF2 with HMAC-SHA256""" + return KDF.PBKDF2(password, salt, 32, iters, hmac_sha256) + _have_crypto = 1 except ImportError: _have_crypto = 0 +try: + try: + from hashlib import blake2s + _have_blake2 = True + except ImportError: + from pyblake2 import blake2s + _have_blake2 = True +except ImportError: + _have_blake2 = False + # compat with 2.x if sys.hexversion < 0x3000000: - # prefer 3.x behaviour - range = xrange -else: + def rar_crc32(data, prev=0): + """CRC32 with unsigned values. + """ + if (prev > 0) and (prev & 0x80000000): + prev -= (1 << 32) + res = crc32(data, prev) + if res < 0: + res += (1 << 32) + return res + tohex = hexlify + _byte_code = ord +else: # pragma: no cover + def tohex(data): + """Return hex string.""" + return hexlify(data).decode('ascii') + rar_crc32 = crc32 unicode = str + _byte_code = int # noqa + + +__version__ = '3.0' + +# export only interesting items +__all__ = ['is_rarfile', 'RarInfo', 'RarFile', 'RarExtFile'] ## ## Module configuration. Can be tuned after importing. @@ -166,36 +219,27 @@ def decrypt(self, data): USE_EXTRACT_HACK = 1 #: limit the filesize for tmp archive usage -HACK_SIZE_LIMIT = 20*1024*1024 - -#: whether to parse file/archive comments. -NEED_COMMENTS = 1 - -#: whether to convert comments to unicode strings -UNICODE_COMMENTS = 0 - -#: Convert RAR time tuple into datetime() object -USE_DATETIME = 0 +HACK_SIZE_LIMIT = 20 * 1024 * 1024 #: Separator for path name components. RAR internally uses '\\'. #: Use '/' to be similar with zipfile. -PATH_SEP = '\\' +PATH_SEP = '/' ## ## rar constants ## # block types -RAR_BLOCK_MARK = 0x72 # r -RAR_BLOCK_MAIN = 0x73 # s -RAR_BLOCK_FILE = 0x74 # t -RAR_BLOCK_OLD_COMMENT = 0x75 # u -RAR_BLOCK_OLD_EXTRA = 0x76 # v -RAR_BLOCK_OLD_SUB = 0x77 # w -RAR_BLOCK_OLD_RECOVERY = 0x78 # x -RAR_BLOCK_OLD_AUTH = 0x79 # y -RAR_BLOCK_SUB = 0x7a # z -RAR_BLOCK_ENDARC = 0x7b # { +RAR_BLOCK_MARK = 0x72 # r +RAR_BLOCK_MAIN = 0x73 # s +RAR_BLOCK_FILE = 0x74 # t +RAR_BLOCK_OLD_COMMENT = 0x75 # u +RAR_BLOCK_OLD_EXTRA = 0x76 # v +RAR_BLOCK_OLD_SUB = 0x77 # w +RAR_BLOCK_OLD_RECOVERY = 0x78 # x +RAR_BLOCK_OLD_AUTH = 0x79 # y +RAR_BLOCK_SUB = 0x7a # z +RAR_BLOCK_ENDARC = 0x7b # { # flags for RAR_BLOCK_MAIN RAR_MAIN_VOLUME = 0x0001 @@ -257,196 +301,335 @@ def decrypt(self, data): RAR_M4 = 0x34 RAR_M5 = 0x35 +# +# RAR5 constants +# + +RAR5_BLOCK_MAIN = 1 +RAR5_BLOCK_FILE = 2 +RAR5_BLOCK_SERVICE = 3 +RAR5_BLOCK_ENCRYPTION = 4 +RAR5_BLOCK_ENDARC = 5 + +RAR5_BLOCK_FLAG_EXTRA_DATA = 0x01 +RAR5_BLOCK_FLAG_DATA_AREA = 0x02 +RAR5_BLOCK_FLAG_SKIP_IF_UNKNOWN = 0x04 +RAR5_BLOCK_FLAG_SPLIT_BEFORE = 0x08 +RAR5_BLOCK_FLAG_SPLIT_AFTER = 0x10 +RAR5_BLOCK_FLAG_DEPENDS_PREV = 0x20 +RAR5_BLOCK_FLAG_KEEP_WITH_PARENT = 0x40 + +RAR5_MAIN_FLAG_ISVOL = 0x01 +RAR5_MAIN_FLAG_HAS_VOLNR = 0x02 +RAR5_MAIN_FLAG_SOLID = 0x04 +RAR5_MAIN_FLAG_RECOVERY = 0x08 +RAR5_MAIN_FLAG_LOCKED = 0x10 + +RAR5_FILE_FLAG_ISDIR = 0x01 +RAR5_FILE_FLAG_HAS_MTIME = 0x02 +RAR5_FILE_FLAG_HAS_CRC32 = 0x04 +RAR5_FILE_FLAG_UNKNOWN_SIZE = 0x08 + +RAR5_COMPR_SOLID = 0x40 + +RAR5_ENC_FLAG_HAS_CHECKVAL = 0x01 + +RAR5_ENDARC_FLAG_NEXT_VOL = 0x01 + +RAR5_XFILE_ENCRYPTION = 1 +RAR5_XFILE_HASH = 2 +RAR5_XFILE_TIME = 3 +RAR5_XFILE_VERSION = 4 +RAR5_XFILE_REDIR = 5 +RAR5_XFILE_OWNER = 6 +RAR5_XFILE_SERVICE = 7 + +RAR5_XTIME_UNIXTIME = 0x01 +RAR5_XTIME_HAS_MTIME = 0x02 +RAR5_XTIME_HAS_CTIME = 0x04 +RAR5_XTIME_HAS_ATIME = 0x08 + +RAR5_XENC_CIPHER_AES256 = 0 + +RAR5_XENC_CHECKVAL = 0x01 +RAR5_XENC_TWEAKED = 0x02 + +RAR5_XHASH_BLAKE2SP = 0 + +RAR5_XREDIR_UNIX_SYMLINK = 1 +RAR5_XREDIR_WINDOWS_SYMLINK = 2 +RAR5_XREDIR_WINDOWS_JUNCTION = 3 +RAR5_XREDIR_HARD_LINK = 4 +RAR5_XREDIR_FILE_COPY = 5 + +RAR5_XREDIR_ISDIR = 0x01 + +RAR5_XOWNER_UNAME = 0x01 +RAR5_XOWNER_GNAME = 0x02 +RAR5_XOWNER_UID = 0x04 +RAR5_XOWNER_GID = 0x08 + +RAR5_OS_WINDOWS = 0 +RAR5_OS_UNIX = 1 + ## ## internal constants ## RAR_ID = b"Rar!\x1a\x07\x00" -ZERO = b"\0" -EMPTY = b"" - -S_BLK_HDR = Struct(' 0 + class Error(Exception): """Base class for rarfile errors.""" + class BadRarFile(Error): """Incorrect data in archive.""" + class NotRarFile(Error): """The file is not RAR archive.""" + class BadRarName(Error): """Cannot guess multipart name components.""" + class NoRarEntry(Error): """File not found in RAR""" + class PasswordRequired(Error): """File requires password""" + class NeedFirstVolume(Error): """Need to start from first volume.""" + class NoCrypto(Error): """Cannot parse encrypted headers - no crypto available.""" + class RarExecError(Error): """Problem reported by unrar/rar.""" + class RarWarning(RarExecError): """Non-fatal error""" + class RarFatalError(RarExecError): """Fatal error""" + class RarCRCError(RarExecError): """CRC error during unpacking""" + class RarLockedArchiveError(RarExecError): """Must not modify locked archive""" + class RarWriteError(RarExecError): """Write error""" + class RarOpenError(RarExecError): """Open error""" + class RarUserError(RarExecError): """User error""" + class RarMemoryError(RarExecError): """Memory error""" + class RarCreateError(RarExecError): """Create error""" + class RarNoFilesError(RarExecError): """No files that match pattern were found""" + class RarUserBreak(RarExecError): """User stop""" + +class RarWrongPassword(RarExecError): + """Incorrect password""" + class RarUnknownError(RarExecError): """Unknown exit code""" + class RarSignalExit(RarExecError): """Unrar exited with signal""" + class RarCannotExec(RarExecError): """Executable not found.""" -def is_rarfile(xfile): - '''Check quickly whether file is rar archive.''' - fd = XFile(xfile) - buf = fd.read(len(RAR_ID)) - fd.close() - return buf == RAR_ID +class RarInfo(object): + r"""An entry in rar archive. + RAR3 extended timestamps are :class:`datetime.datetime` objects without timezone. + RAR5 extended timestamps are :class:`datetime.datetime` objects with UTC timezone. -class RarInfo(object): - r'''An entry in rar archive. + Attributes: - :mod:`zipfile`-compatible fields: - filename File name with relative path. - Default path separator is '\\', to change set rarfile.PATH_SEP. - Always unicode string. + Path separator is '/'. Always unicode string. + date_time - Modification time, tuple of (year, month, day, hour, minute, second). - Or datetime() object if USE_DATETIME is set. + File modification timestamp. As tuple of (year, month, day, hour, minute, second). + RAR5 allows archives where it is missing, it's None then. + file_size Uncompressed size. + compress_size Compressed size. - CRC - CRC-32 of uncompressed file, unsigned int. - comment - File comment. Byte string or None. Use UNICODE_COMMENTS - to get automatic decoding to unicode. - volume - Volume nr, starting from 0. - - RAR-specific fields: compress_type - Compression method: 0x30 - 0x35. + Compression method: one of :data:`RAR_M0` .. :data:`RAR_M5` constants. + extract_version - Minimal Rar version needed for decompressing. + Minimal Rar version needed for decompressing. As (major*10 + minor), + so 2.9 is 29. + + RAR3: 10, 20, 29 + + RAR5 does not have such field in archive, it's simply set to 50. + host_os Host OS type, one of RAR_OS_* constants. + + RAR3: :data:`RAR_OS_WIN32`, :data:`RAR_OS_UNIX`, :data:`RAR_OS_MSDOS`, + :data:`RAR_OS_OS2`, :data:`RAR_OS_BEOS`. + + RAR5: :data:`RAR_OS_WIN32`, :data:`RAR_OS_UNIX`. + mode File attributes. May be either dos-style or unix-style, depending on host_os. - volume_file - Volume file name, where file starts. + mtime - Optional time field: Modification time, with float seconds. - Same as .date_time but with more precision. + File modification time. Same value as :attr:`date_time` + but as :class:`datetime.datetime` object with extended precision. + ctime - Optional time field: creation time, with float seconds. + Optional time field: creation time. As :class:`datetime.datetime` object. + atime - Optional time field: last access time, with float seconds. + Optional time field: last access time. As :class:`datetime.datetime` object. + arctime - Optional time field: archival time, with float seconds. - - Internal fields: - - type - One of RAR_BLOCK_* types. Only entries with type==RAR_BLOCK_FILE are shown in .infolist(). - flags - For files, RAR_FILE_* bits. - ''' - - __slots__ = ( - # zipfile-compatible fields - 'filename', - 'file_size', - 'compress_size', - 'date_time', - 'comment', - 'CRC', - 'volume', - 'orig_filename', # bytes in unknown encoding - - # rar-specific fields - 'extract_version', - 'compress_type', - 'host_os', - 'mode', - 'type', - 'flags', - - # optional extended time fields - # tuple where the sec is float, or datetime(). - 'mtime', # same as .date_time - 'ctime', - 'atime', - 'arctime', - - # RAR internals - 'name_size', - 'header_size', - 'header_crc', - 'file_offset', - 'add_size', - 'header_data', - 'header_base', - 'header_offset', - 'salt', - 'volume_file', - ) + Optional time field: archival time. As :class:`datetime.datetime` object. + (RAR3-only) + + CRC + CRC-32 of uncompressed file, unsigned int. + + RAR5: may be None. + + blake2sp_hash + Blake2SP hash over decompressed data. (RAR5-only) + + comment + Optional file comment field. Unicode string. (RAR3-only) + + file_redir + If not None, file is link of some sort. Contains tuple of (type, flags, target). + (RAR5-only) + + Type is one of constants: + + :data:`RAR5_XREDIR_UNIX_SYMLINK` + unix symlink to target. + :data:`RAR5_XREDIR_WINDOWS_SYMLINK` + windows symlink to target. + :data:`RAR5_XREDIR_WINDOWS_JUNCTION` + windows junction. + :data:`RAR5_XREDIR_HARD_LINK` + hard link to target. + :data:`RAR5_XREDIR_FILE_COPY` + current file is copy of another archive entry. + + Flags may contain :data:`RAR5_XREDIR_ISDIR` bit. + + volume + Volume nr, starting from 0. + + volume_file + Volume file name, where file starts. + + """ + + # zipfile-compatible fields + filename = None + file_size = None + compress_size = None + date_time = None + comment = None + CRC = None + volume = None + orig_filename = None + + # optional extended time fields, datetime() objects. + mtime = None + ctime = None + atime = None + + extract_version = None + mode = None + host_os = None + compress_type = None + + # rar3-only fields + comment = None + arctime = None + + # rar5-only fields + blake2sp_hash = None + file_redir = None + + # internal fields + flags = 0 + type = None def isdir(self): - '''Returns True if the entry is a directory.''' + """Returns True if entry is a directory. + """ if self.type == RAR_BLOCK_FILE: return (self.flags & RAR_FILE_DIRECTORY) == RAR_FILE_DIRECTORY return False def needs_password(self): - return (self.flags & RAR_FILE_PASSWORD) > 0 + """Returns True if data is stored password-protected. + """ + if self.type == RAR_BLOCK_FILE: + return (self.flags & RAR_FILE_PASSWORD) > 0 + return False class RarFile(object): - '''Parse RAR structure, provide access to files in archive. - ''' + """Parse RAR structure, provide access to files in archive. + """ - #: Archive comment. Byte string or None. Use :data:`UNICODE_COMMENTS` - #: to get automatic decoding to unicode. + #: Archive comment. Unicode string or None. comment = None def __init__(self, rarfile, mode="r", charset=None, info_callback=None, - crc_check = True, errors = "stop"): + crc_check=True, errors="stop"): """Open and parse a RAR archive. - + Parameters: rarfile @@ -463,18 +646,12 @@ def __init__(self, rarfile, mode="r", charset=None, info_callback=None, Either "stop" to quietly stop parsing on errors, or "strict" to raise errors. Default is "stop". """ - self.rarfile = rarfile - self.comment = None + self._rarfile = rarfile self._charset = charset or DEFAULT_CHARSET self._info_callback = info_callback - - self._info_list = [] - self._info_map = {} - self._parse_error = None - self._needs_password = False - self._password = None self._crc_check = crc_check - self._vol_list = [] + self._password = None + self._file_parser = None if errors == "stop": self._strict = False @@ -483,69 +660,62 @@ def __init__(self, rarfile, mode="r", charset=None, info_callback=None, else: raise ValueError("Invalid value for 'errors' parameter.") - self._main = None - if mode != "r": raise NotImplementedError("RarFile supports only mode=r") self._parse() def __enter__(self): + """Open context.""" return self - def __exit__(self, type, value, traceback): + def __exit__(self, typ, value, traceback): + """Exit context""" self.close() def setpassword(self, password): - '''Sets the password to use when extracting.''' + """Sets the password to use when extracting. + """ self._password = password - if not self._main: + if self._file_parser: + if self._file_parser.has_header_encryption(): + self._file_parser = None + if not self._file_parser: self._parse() + else: + self._file_parser.setpassword(self._password) def needs_password(self): - '''Returns True if any archive entries require password for extraction.''' - return self._needs_password + """Returns True if any archive entries require password for extraction. + """ + return self._file_parser.needs_password() def namelist(self): - '''Return list of filenames in archive.''' + """Return list of filenames in archive. + """ return [f.filename for f in self.infolist()] def infolist(self): - '''Return RarInfo objects for all files/directories in archive.''' - return self._info_list + """Return RarInfo objects for all files/directories in archive. + """ + return self._file_parser.infolist() def volumelist(self): - '''Returns filenames of archive volumes. + """Returns filenames of archive volumes. In case of single-volume archive, the list contains just the name of main archive file. - ''' - return self._vol_list + """ + return self._file_parser.volumelist() def getinfo(self, fname): - '''Return RarInfo for file.''' - - if isinstance(fname, RarInfo): - return fname - - # accept both ways here - if PATH_SEP == '/': - fname2 = fname.replace("\\", "/") - else: - fname2 = fname.replace("/", "\\") + """Return RarInfo for file. + """ + return self._file_parser.getinfo(fname) - try: - return self._info_map[fname] - except KeyError: - try: - return self._info_map[fname2] - except KeyError: - raise NoRarEntry("No such file: "+fname) + def open(self, fname, mode='r', psw=None): + """Returns file-like object (:class:`RarExtFile`) from where the data can be read. - def open(self, fname, mode = 'r', psw = None): - '''Returns file-like object (:class:`RarExtFile`), - from where the data can be read. - The object implements :class:`io.RawIOBase` interface, so it can be further wrapped with :class:`io.BufferedReader` and :class:`io.TextIOWrapper`. @@ -565,7 +735,7 @@ def open(self, fname, mode = 'r', psw = None): must be 'r' psw password to use for extracting. - ''' + """ if mode != 'r': raise NotImplementedError("RarFile.open() supports only mode=r") @@ -575,9 +745,6 @@ def open(self, fname, mode = 'r', psw = None): if inf.isdir(): raise TypeError("Directory does not have any data: " + inf.filename) - if inf.flags & RAR_FILE_SPLIT_BEFORE: - raise NeedFirstVolume("Partial file, please start from first volume: " + inf.filename) - # check password if inf.needs_password(): psw = psw or self._password @@ -586,34 +753,11 @@ def open(self, fname, mode = 'r', psw = None): else: psw = None - # is temp write usable? - use_hack = 1 - if not self._main: - use_hack = 0 - elif self._main.flags & (RAR_MAIN_SOLID | RAR_MAIN_PASSWORD): - use_hack = 0 - elif inf.flags & (RAR_FILE_SPLIT_BEFORE | RAR_FILE_SPLIT_AFTER): - use_hack = 0 - elif is_filelike(self.rarfile): - pass - elif inf.file_size > HACK_SIZE_LIMIT: - use_hack = 0 - elif not USE_EXTRACT_HACK: - use_hack = 0 - - # now extract - if inf.compress_type == RAR_M0 and (inf.flags & RAR_FILE_PASSWORD) == 0: - return self._open_clear(inf) - elif use_hack: - return self._open_hack(inf, psw) - elif is_filelike(self.rarfile): - return self._open_unrar_membuf(self.rarfile, inf, psw) - else: - return self._open_unrar(self.rarfile, inf, psw) + return self._file_parser.open(inf, psw) - def read(self, fname, psw = None): + def read(self, fname, psw=None): """Return uncompressed data for archive entry. - + For longer files using :meth:`RarFile.open` may be better idea. Parameters: @@ -624,11 +768,8 @@ def read(self, fname, psw = None): password to use for extracting. """ - f = self.open(fname, 'r', psw) - try: + with self.open(fname, 'r', psw) as f: return f.read() - finally: - f.close() def close(self): """Release open resources.""" @@ -641,7 +782,7 @@ def printdir(self): def extract(self, member, path=None, pwd=None): """Extract single file into current directory. - + Parameters: member @@ -659,7 +800,7 @@ def extract(self, member, path=None, pwd=None): def extractall(self, path=None, members=None, pwd=None): """Extract all files into current directory. - + Parameters: path @@ -684,77 +825,149 @@ def testrar(self): cmd = [UNRAR_TOOL] + list(TEST_ARGS) add_password_arg(cmd, self._password) cmd.append('--') - - if is_filelike(self.rarfile): - tmpname = membuf_tempfile(self.rarfile) - cmd.append(tmpname) - else: - tmpname = None - cmd.append(self.rarfile) - - try: + with XTempFile(self._rarfile) as rarfile: + cmd.append(rarfile) p = custom_popen(cmd) output = p.communicate()[0] check_returncode(p, output) - finally: - if tmpname: - os.unlink(tmpname) def strerror(self): - """Return error string if parsing failed, - or None if no problems. + """Return error string if parsing failed or None if no problems. """ - return self._parse_error + if not self._file_parser: + return "Not a RAR file" + return self._file_parser.strerror() ## ## private methods ## - def _set_error(self, msg, *args): - if args: - msg = msg % args - self._parse_error = msg - if self._strict: - raise BadRarFile(msg) + def _parse(self): + ver = _get_rar_version(self._rarfile) + if ver == 3: + p3 = RAR3Parser(self._rarfile, self._password, self._crc_check, + self._charset, self._strict, self._info_callback) + self._file_parser = p3 # noqa + elif ver == 5: + p5 = RAR5Parser(self._rarfile, self._password, self._crc_check, + self._charset, self._strict, self._info_callback) + self._file_parser = p5 # noqa + else: + raise BadRarFile("Not a RAR file") - # store entry - def _process_entry(self, item): - if item.type == RAR_BLOCK_FILE: - # use only first part - if (item.flags & RAR_FILE_SPLIT_BEFORE) == 0: - self._info_map[item.filename] = item - self._info_list.append(item) - # remember if any items require password - if item.needs_password(): - self._needs_password = True - elif len(self._info_list) > 0: - # final crc is in last block - old = self._info_list[-1] - old.CRC = item.CRC - old.compress_size += item.compress_size + self._file_parser.parse() + self.comment = self._file_parser.comment - # parse new-style comment - if item.type == RAR_BLOCK_SUB and item.filename == 'CMT': - if not NEED_COMMENTS: - pass - elif item.flags & (RAR_FILE_SPLIT_BEFORE | RAR_FILE_SPLIT_AFTER): - pass - elif item.flags & RAR_FILE_SOLID: - # file comment - cmt = self._read_comment_v3(item, self._password) - if len(self._info_list) > 0: - old = self._info_list[-1] - old.comment = cmt - else: - # archive comment - cmt = self._read_comment_v3(item, self._password) - self.comment = cmt + # call unrar to extract a file + def _extract(self, fnlist, path=None, psw=None): + cmd = [UNRAR_TOOL] + list(EXTRACT_ARGS) - if self._info_callback: - self._info_callback(item) + # pasoword + psw = psw or self._password + add_password_arg(cmd, psw) + cmd.append('--') + + # rar file + with XTempFile(self._rarfile) as rarfn: + cmd.append(rarfn) + + # file list + for fn in fnlist: + if os.sep != PATH_SEP: + fn = fn.replace(PATH_SEP, os.sep) + cmd.append(fn) + + # destination path + if path is not None: + cmd.append(path + os.sep) + + # call + p = custom_popen(cmd) + output = p.communicate()[0] + check_returncode(p, output) + +# +# File format parsing +# + +class CommonParser(object): + """Shared parser parts.""" + _main = None + _hdrenc_main = None + _needs_password = False + _fd = None + _expect_sig = None + _parse_error = None + _password = None + comment = None + + def __init__(self, rarfile, password, crc_check, charset, strict, info_cb): + self._rarfile = rarfile + self._password = password + self._crc_check = crc_check + self._charset = charset + self._strict = strict + self._info_callback = info_cb + self._info_list = [] + self._info_map = {} + self._vol_list = [] + + def has_header_encryption(self): + """Returns True if headers are encrypted + """ + if self._hdrenc_main: + return True + if self._main: + if self._main.flags & RAR_MAIN_PASSWORD: + return True + return False + + def setpassword(self, psw): + """Set cached password.""" + self._password = psw + + def volumelist(self): + """Volume files""" + return self._vol_list + + def needs_password(self): + """Is password required""" + return self._needs_password + + def strerror(self): + """Last error""" + return self._parse_error + + def infolist(self): + """List of RarInfo records. + """ + return self._info_list + + def getinfo(self, member): + """Return RarInfo for filename + """ + if isinstance(member, RarInfo): + fname = member.filename + else: + fname = member + + # accept both ways here + if PATH_SEP == '/': + fname2 = fname.replace("\\", "/") + else: + fname2 = fname.replace("/", "\\") + + try: + return self._info_map[fname] + except KeyError: + try: + return self._info_map[fname2] + except KeyError: + raise NoRarEntry("No such file: %s" % fname) # read rar - def _parse(self): + def parse(self): + """Process file.""" self._fd = None try: self._parse_real() @@ -764,19 +977,19 @@ def _parse(self): self._fd = None def _parse_real(self): - fd = XFile(self.rarfile) + fd = XFile(self._rarfile) self._fd = fd - id = fd.read(len(RAR_ID)) - if id != RAR_ID: - if isinstance(self.rarfile, (str, unicode)): - raise NotRarFile("Not a Rar archive: {}".format(self.rarfile)) + sig = fd.read(len(self._expect_sig)) + if sig != self._expect_sig: + if isinstance(self._rarfile, (str, unicode)): + raise NotRarFile("Not a Rar archive: {}".format(self._rarfile)) raise NotRarFile("Not a Rar archive") volume = 0 # first vol (.rar) is 0 - more_vols = 0 - endarc = 0 - volfile = self.rarfile - self._vol_list = [self.rarfile] + more_vols = False + endarc = False + volfile = self._rarfile + self._vol_list = [self._rarfile] while 1: if endarc: h = None # don't read past ENDARC @@ -793,8 +1006,12 @@ def _parse_real(self): self._set_error("Cannot open next volume: %s", volfile) break self._fd = fd - more_vols = 0 - endarc = 0 + sig = fd.read(len(self._expect_sig)) + if sig != self._expect_sig: + self._set_error("Invalid volume sig: %s", volfile) + break + more_vols = False + endarc = False self._vol_list.append(volfile) continue break @@ -811,44 +1028,49 @@ def _parse_real(self): if h.flags & RAR_MAIN_PASSWORD: self._needs_password = True if not self._password: - self._main = None break elif h.type == RAR_BLOCK_ENDARC: - more_vols = h.flags & RAR_ENDARC_NEXT_VOLUME - endarc = 1 + more_vols = (h.flags & RAR_ENDARC_NEXT_VOLUME) > 0 + endarc = True elif h.type == RAR_BLOCK_FILE: # RAR 2.x does not write RAR_BLOCK_ENDARC if h.flags & RAR_FILE_SPLIT_AFTER: - more_vols = 1 + more_vols = True # RAR 2.x does not set RAR_MAIN_FIRSTVOLUME if volume == 0 and h.flags & RAR_FILE_SPLIT_BEFORE: raise NeedFirstVolume("Need to start from first volume") + if h.needs_password(): + self._needs_password = True + # store it - self._process_entry(h) + self.process_entry(fd, h) + + if self._info_callback: + self._info_callback(h) # go to next header if h.add_size > 0: - fd.seek(h.file_offset + h.add_size, 0) + fd.seek(h.data_offset + h.add_size, 0) + + def process_entry(self, fd, item): + """Examine item, add into lookup cache.""" + raise NotImplementedError() - # AES encrypted headers - _last_aes_key = (None, None, None) # (salt, key, iv) def _decrypt_header(self, fd): - if not _have_crypto: - raise NoCrypto('Cannot parse encrypted headers - no crypto') - salt = fd.read(8) - if self._last_aes_key[0] == salt: - key, iv = self._last_aes_key[1:] - else: - key, iv = rar3_s2k(self._password, salt) - self._last_aes_key = (salt, key, iv) - return HeaderDecrypt(fd, key, iv) + raise NotImplementedError('_decrypt_header') + + def _parse_block_header(self, fd): + raise NotImplementedError('_parse_block_header') + + def _open_hack(self, inf, psw): + raise NotImplementedError('_open_hack') # read single header def _parse_header(self, fd): try: # handle encrypted headers - if self._main and self._main.flags & RAR_MAIN_PASSWORD: + if (self._main and self._main.flags & RAR_MAIN_PASSWORD) or self._hdrenc_main: if not self._password: return fd = self._decrypt_header(fd) @@ -859,11 +1081,168 @@ def _parse_header(self, fd): self._set_error('Broken header in RAR file') return None + # given current vol name, construct next one + def _next_volname(self, volfile): + if is_filelike(volfile): + raise IOError("Working on single FD") + if self._main.flags & RAR_MAIN_NEWNUMBERING: + return _next_newvol(volfile) + return _next_oldvol(volfile) + + def _set_error(self, msg, *args): + if args: + msg = msg % args + self._parse_error = msg + if self._strict: + raise BadRarFile(msg) + + def open(self, inf, psw): + """Return stream object for file data.""" + + if inf.file_redir: + # cannot leave to unrar as it expects copied file to exist + if inf.file_redir[0] in (RAR5_XREDIR_FILE_COPY, RAR5_XREDIR_HARD_LINK): + inf = self.getinfo(inf.file_redir[2]) + if not inf: + raise BadRarFile('cannot find copied file') + + if inf.flags & RAR_FILE_SPLIT_BEFORE: + raise NeedFirstVolume("Partial file, please start from first volume: " + inf.filename) + + # is temp write usable? + use_hack = 1 + if not self._main: + use_hack = 0 + elif self._main._must_disable_hack(): + use_hack = 0 + elif inf._must_disable_hack(): + use_hack = 0 + elif is_filelike(self._rarfile): + pass + elif inf.file_size > HACK_SIZE_LIMIT: + use_hack = 0 + elif not USE_EXTRACT_HACK: + use_hack = 0 + + # now extract + if inf.compress_type == RAR_M0 and (inf.flags & RAR_FILE_PASSWORD) == 0 and inf.file_redir is None: + return self._open_clear(inf) + elif use_hack: + return self._open_hack(inf, psw) + elif is_filelike(self._rarfile): + return self._open_unrar_membuf(self._rarfile, inf, psw) + else: + return self._open_unrar(self._rarfile, inf, psw) + + def _open_clear(self, inf): + return DirectReader(self, inf) + + def _open_hack_core(self, inf, psw, prefix, suffix): + + size = inf.compress_size + inf.header_size + rf = XFile(inf.volume_file, 0) + rf.seek(inf.header_offset) + + tmpfd, tmpname = mkstemp(suffix='.rar') + tmpf = os.fdopen(tmpfd, "wb") + + try: + tmpf.write(prefix) + while size > 0: + if size > BSIZE: + buf = rf.read(BSIZE) + else: + buf = rf.read(size) + if not buf: + raise BadRarFile('read failed: ' + inf.filename) + tmpf.write(buf) + size -= len(buf) + tmpf.write(suffix) + tmpf.close() + rf.close() + except: + rf.close() + tmpf.close() + os.unlink(tmpname) + raise + + return self._open_unrar(tmpname, inf, psw, tmpname) + + # write in-memory archive to temp file - needed for solid archives + def _open_unrar_membuf(self, memfile, inf, psw): + tmpname = membuf_tempfile(memfile) + return self._open_unrar(tmpname, inf, psw, tmpname, force_file=True) + + # extract using unrar + def _open_unrar(self, rarfile, inf, psw=None, tmpfile=None, force_file=False): + cmd = [UNRAR_TOOL] + list(OPEN_ARGS) + add_password_arg(cmd, psw) + cmd.append("--") + cmd.append(rarfile) + + # not giving filename avoids encoding related problems + if not tmpfile or force_file: + fn = inf.filename + if PATH_SEP != os.sep: + fn = fn.replace(PATH_SEP, os.sep) + cmd.append(fn) + + # read from unrar pipe + return PipeReader(self, inf, cmd, tmpfile) + +# +# RAR3 format +# + +class Rar3Info(RarInfo): + """RAR3 specific fields.""" + extract_version = 15 + salt = None + add_size = 0 + header_crc = None + header_size = None + header_offset = None + data_offset = None + _md_class = None + _md_expect = None + + # make sure some rar5 fields are always present + file_redir = None + blake2sp_hash = None + + def _must_disable_hack(self): + if self.type == RAR_BLOCK_FILE: + if self.flags & RAR_FILE_PASSWORD: + return True + elif self.flags & (RAR_FILE_SPLIT_BEFORE | RAR_FILE_SPLIT_AFTER): + return True + elif self.type == RAR_BLOCK_MAIN: + if self.flags & (RAR_MAIN_SOLID | RAR_MAIN_PASSWORD): + return True + return False + + +class RAR3Parser(CommonParser): + """Parse RAR3 file format. + """ + _expect_sig = RAR_ID + _last_aes_key = (None, None, None) # (salt, key, iv) + + def _decrypt_header(self, fd): + if not _have_crypto: + raise NoCrypto('Cannot parse encrypted headers - no crypto') + salt = fd.read(8) + if self._last_aes_key[0] == salt: + key, iv = self._last_aes_key[1:] + else: + key, iv = rar3_s2k(self._password, salt) + self._last_aes_key = (salt, key, iv) + return HeaderDecrypt(fd, key, iv) + # common header def _parse_block_header(self, fd): - h = RarInfo() + h = Rar3Info() h.header_offset = fd.tell() - h.comment = None # read and parse base header buf = fd.read(S_BLK_HDR.size) @@ -871,24 +1250,24 @@ def _parse_block_header(self, fd): return None t = S_BLK_HDR.unpack_from(buf) h.header_crc, h.type, h.flags, h.header_size = t - h.header_base = S_BLK_HDR.size - pos = S_BLK_HDR.size # read full header if h.header_size > S_BLK_HDR.size: - h.header_data = buf + fd.read(h.header_size - S_BLK_HDR.size) + hdata = buf + fd.read(h.header_size - S_BLK_HDR.size) else: - h.header_data = buf - h.file_offset = fd.tell() + hdata = buf + h.data_offset = fd.tell() # unexpected EOF? - if len(h.header_data) != h.header_size: + if len(hdata) != h.header_size: self._set_error('Unexpected EOF when reading header') return None + pos = S_BLK_HDR.size + # block has data assiciated with it? if h.flags & RAR_LONG_BLOCK: - h.add_size = S_LONG.unpack_from(h.header_data, pos)[0] + h.add_size, pos = load_le32(hdata, pos) else: h.add_size = 0 @@ -896,31 +1275,36 @@ def _parse_block_header(self, fd): if h.type == RAR_BLOCK_MARK: return h elif h.type == RAR_BLOCK_MAIN: - h.header_base += 6 + pos += 6 if h.flags & RAR_MAIN_ENCRYPTVER: - h.header_base += 1 + pos += 1 + crc_pos = pos if h.flags & RAR_MAIN_COMMENT: - self._parse_subblocks(h, h.header_base) - self.comment = h.comment + self._parse_subblocks(h, hdata, pos) elif h.type == RAR_BLOCK_FILE: - self._parse_file_header(h, pos) + pos = self._parse_file_header(h, hdata, pos - 4) + crc_pos = pos + if h.flags & RAR_FILE_COMMENT: + pos = self._parse_subblocks(h, hdata, pos) elif h.type == RAR_BLOCK_SUB: - self._parse_file_header(h, pos) - h.header_base = h.header_size + pos = self._parse_file_header(h, hdata, pos - 4) + crc_pos = h.header_size elif h.type == RAR_BLOCK_OLD_AUTH: - h.header_base += 8 + pos += 8 + crc_pos = pos elif h.type == RAR_BLOCK_OLD_EXTRA: - h.header_base += 7 + pos += 7 + crc_pos = pos else: - h.header_base = h.header_size + crc_pos = h.header_size # check crc if h.type == RAR_BLOCK_OLD_SUB: - crcdat = h.header_data[2:] + fd.read(h.add_size) + crcdat = hdata[2:] + fd.read(h.add_size) else: - crcdat = h.header_data[2:h.header_base] + crcdat = hdata[2:crc_pos] - calc_crc = crc32(crcdat) & 0xFFFF + calc_crc = rar_crc32(crcdat) & 0xFFFF # return good header if h.header_crc == calc_crc: @@ -928,39 +1312,42 @@ def _parse_block_header(self, fd): # header parsing failed. self._set_error('Header CRC error (%02x): exp=%x got=%x (xlen = %d)', - h.type, h.header_crc, calc_crc, len(crcdat)) + h.type, h.header_crc, calc_crc, len(crcdat)) # instead panicing, send eof return None # read file-specific header - def _parse_file_header(self, h, pos): - fld = S_FILE_HDR.unpack_from(h.header_data, pos) + def _parse_file_header(self, h, hdata, pos): + fld = S_FILE_HDR.unpack_from(hdata, pos) + pos += S_FILE_HDR.size + h.compress_size = fld[0] h.file_size = fld[1] h.host_os = fld[2] h.CRC = fld[3] h.date_time = parse_dos_time(fld[4]) + h.mtime = to_datetime(h.date_time) h.extract_version = fld[5] h.compress_type = fld[6] - h.name_size = fld[7] + name_size = fld[7] h.mode = fld[8] - pos += S_FILE_HDR.size + + h._md_class = CRC32Context + h._md_expect = h.CRC if h.flags & RAR_FILE_LARGE: - h1 = S_LONG.unpack_from(h.header_data, pos)[0] - h2 = S_LONG.unpack_from(h.header_data, pos + 4)[0] + h1, pos = load_le32(hdata, pos) + h2, pos = load_le32(hdata, pos) h.compress_size |= h1 << 32 h.file_size |= h2 << 32 - pos += 8 h.add_size = h.compress_size - name = h.header_data[pos : pos + h.name_size ] - pos += h.name_size + name, pos = load_bytes(hdata, name_size, pos) if h.flags & RAR_FILE_UNICODE: nul = name.find(ZERO) h.orig_filename = name[:nul] - u = UnicodeFilename(h.orig_filename, name[nul + 1 : ]) + u = UnicodeFilename(h.orig_filename, name[nul + 1:]) h.filename = u.decode() # if parsing failed fall back to simple name @@ -975,278 +1362,534 @@ def _parse_file_header(self, h, pos): h.filename = h.filename.replace('\\', PATH_SEP) if h.flags & RAR_FILE_SALT: - h.salt = h.header_data[pos : pos + 8] - pos += 8 + h.salt, pos = load_bytes(hdata, 8, pos) else: h.salt = None # optional extended time stamps if h.flags & RAR_FILE_EXTTIME: - pos = self._parse_ext_time(h, pos) + pos = _parse_ext_time(h, hdata, pos) else: h.mtime = h.atime = h.ctime = h.arctime = None - # base header end - h.header_base = pos - - if h.flags & RAR_FILE_COMMENT: - self._parse_subblocks(h, pos) - - # convert timestamps - if USE_DATETIME: - h.date_time = to_datetime(h.date_time) - h.mtime = to_datetime(h.mtime) - h.atime = to_datetime(h.atime) - h.ctime = to_datetime(h.ctime) - h.arctime = to_datetime(h.arctime) - - # .mtime is .date_time with more precision - if h.mtime: - if USE_DATETIME: - h.date_time = h.mtime - else: - # keep seconds int - h.date_time = h.mtime[:5] + (int(h.mtime[5]),) - return pos # find old-style comment subblock - def _parse_subblocks(self, h, pos): - hdata = h.header_data + def _parse_subblocks(self, h, hdata, pos): while pos < len(hdata): # ordinary block header t = S_BLK_HDR.unpack_from(hdata, pos) - scrc, stype, sflags, slen = t + ___scrc, stype, sflags, slen = t pos_next = pos + slen pos += S_BLK_HDR.size - # corrupt header - if pos_next < pos: - break + # corrupt header + if pos_next < pos: + break + + # followed by block-specific header + if stype == RAR_BLOCK_OLD_COMMENT and pos + S_COMMENT_HDR.size <= pos_next: + declen, ver, meth, crc = S_COMMENT_HDR.unpack_from(hdata, pos) + pos += S_COMMENT_HDR.size + data = hdata[pos : pos_next] + cmt = rar3_decompress(ver, meth, data, declen, sflags, + crc, self._password) + if not self._crc_check: + h.comment = self._decode_comment(cmt) + elif rar_crc32(cmt) & 0xFFFF == crc: + h.comment = self._decode_comment(cmt) + + pos = pos_next + return pos + + def _read_comment_v3(self, inf, psw=None): + + # read data + with XFile(inf.volume_file) as rf: + rf.seek(inf.data_offset) + data = rf.read(inf.compress_size) + + # decompress + cmt = rar3_decompress(inf.extract_version, inf.compress_type, data, + inf.file_size, inf.flags, inf.CRC, psw, inf.salt) + + # check crc + if self._crc_check: + crc = rar_crc32(cmt) + if crc != inf.CRC: + return None + + return self._decode_comment(cmt) + + def _decode(self, val): + for c in TRY_ENCODINGS: + try: + return val.decode(c) + except UnicodeError: + pass + return val.decode(self._charset, 'replace') + + def _decode_comment(self, val): + return self._decode(val) + + def process_entry(self, fd, item): + if item.type == RAR_BLOCK_FILE: + # use only first part + if (item.flags & RAR_FILE_SPLIT_BEFORE) == 0: + self._info_map[item.filename] = item + self._info_list.append(item) + elif len(self._info_list) > 0: + # final crc is in last block + old = self._info_list[-1] + old.CRC = item.CRC + old._md_expect = item._md_expect + old.compress_size += item.compress_size + + # parse new-style comment + if item.type == RAR_BLOCK_SUB and item.filename == 'CMT': + if item.flags & (RAR_FILE_SPLIT_BEFORE | RAR_FILE_SPLIT_AFTER): + pass + elif item.flags & RAR_FILE_SOLID: + # file comment + cmt = self._read_comment_v3(item, self._password) + if len(self._info_list) > 0: + old = self._info_list[-1] + old.comment = cmt + else: + # archive comment + cmt = self._read_comment_v3(item, self._password) + self.comment = cmt - # followed by block-specific header - if stype == RAR_BLOCK_OLD_COMMENT and pos + S_COMMENT_HDR.size <= pos_next: - declen, ver, meth, crc = S_COMMENT_HDR.unpack_from(hdata, pos) - pos += S_COMMENT_HDR.size - data = hdata[pos : pos_next] - cmt = rar_decompress(ver, meth, data, declen, sflags, - crc, self._password) - if not self._crc_check: - h.comment = self._decode_comment(cmt) - elif crc32(cmt) & 0xFFFF == crc: - h.comment = self._decode_comment(cmt) + if item.type == RAR_BLOCK_MAIN: + if item.flags & RAR_MAIN_COMMENT: + self.comment = item.comment + if item.flags & RAR_MAIN_PASSWORD: + self._needs_password = True - pos = pos_next + # put file compressed data into temporary .rar archive, and run + # unrar on that, thus avoiding unrar going over whole archive + def _open_hack(self, inf, psw): + # create main header: crc, type, flags, size, res1, res2 + prefix = RAR_ID + S_BLK_HDR.pack(0x90CF, 0x73, 0, 13) + ZERO * (2 + 4) + return self._open_hack_core(inf, psw, prefix, EMPTY) - def _parse_ext_time(self, h, pos): - data = h.header_data +# +# RAR5 format +# - # flags and rest of data can be missing - flags = 0 - if pos + 2 <= len(data): - flags = S_SHORT.unpack_from(data, pos)[0] - pos += 2 +class Rar5Info(RarInfo): + """Shared fields for RAR5 records. + """ + extract_version = 50 + header_crc = None + header_size = None + header_offset = None + data_offset = None + + # type=all + block_type = None + block_flags = None + add_size = 0 + block_extra_size = 0 + + # type=MAIN + volume_number = None + _md_class = None + _md_expect = None + + def _must_disable_hack(self): + return False - h.mtime, pos = self._parse_xtime(flags >> 3*4, data, pos, h.date_time) - h.ctime, pos = self._parse_xtime(flags >> 2*4, data, pos) - h.atime, pos = self._parse_xtime(flags >> 1*4, data, pos) - h.arctime, pos = self._parse_xtime(flags >> 0*4, data, pos) - return pos - def _parse_xtime(self, flag, data, pos, dostime = None): - unit = 10000000.0 # 100 ns units - if flag & 8: - if not dostime: - t = S_LONG.unpack_from(data, pos)[0] - dostime = parse_dos_time(t) - pos += 4 - rem = 0 - cnt = flag & 3 - for i in range(cnt): - b = S_BYTE.unpack_from(data, pos)[0] - rem = (b << 16) | (rem >> 8) - pos += 1 - sec = dostime[5] + rem / unit - if flag & 4: - sec += 1 - dostime = dostime[:5] + (sec,) - return dostime, pos +class Rar5BaseFile(Rar5Info): + """Shared sturct for file & service record. + """ + type = -1 + file_flags = None + file_encryption = (0, 0, 0, EMPTY, EMPTY, EMPTY) + file_compress_flags = None + file_redir = None + file_owner = None + file_version = None + blake2sp_hash = None + + def _must_disable_hack(self): + if self.flags & RAR_FILE_PASSWORD: + return True + if self.block_flags & (RAR5_BLOCK_FLAG_SPLIT_BEFORE | RAR5_BLOCK_FLAG_SPLIT_AFTER): + return True + if self.file_compress_flags & RAR5_COMPR_SOLID: + return True + if self.file_redir: + return True + return False - # given current vol name, construct next one - def _next_volname(self, volfile): - if is_filelike(volfile): - raise IOError("Working on single FD") - if self._main.flags & RAR_MAIN_NEWNUMBERING: - return self._next_newvol(volfile) - return self._next_oldvol(volfile) - - # new-style next volume - def _next_newvol(self, volfile): - i = len(volfile) - 1 - while i >= 0: - if volfile[i] >= '0' and volfile[i] <= '9': - return self._inc_volname(volfile, i) - i -= 1 - raise BadRarName("Cannot construct volume name: "+volfile) - - # old-style next volume - def _next_oldvol(self, volfile): - # rar -> r00 - if volfile[-4:].lower() == '.rar': - return volfile[:-2] + '00' - return self._inc_volname(volfile, len(volfile) - 1) - - # increase digits with carry, otherwise just increment char - def _inc_volname(self, volfile, i): - fn = list(volfile) - while i >= 0: - if fn[i] != '9': - fn[i] = chr(ord(fn[i]) + 1) - break - fn[i] = '0' - i -= 1 - return ''.join(fn) - def _open_clear(self, inf): - return DirectReader(self, inf) +class Rar5FileInfo(Rar5BaseFile): + """RAR5 file record. + """ + type = RAR_BLOCK_FILE - # put file compressed data into temporary .rar archive, and run - # unrar on that, thus avoiding unrar going over whole archive - def _open_hack(self, inf, psw = None): - BSIZE = 32*1024 - size = inf.compress_size + inf.header_size - rf = XFile(inf.volume_file, 0) - rf.seek(inf.header_offset) +class Rar5ServiceInfo(Rar5BaseFile): + """RAR5 service record. + """ + type = RAR_BLOCK_SUB - tmpfd, tmpname = mkstemp(suffix='.rar') - tmpf = os.fdopen(tmpfd, "wb") - try: - # create main header: crc, type, flags, size, res1, res2 - mh = S_BLK_HDR.pack(0x90CF, 0x73, 0, 13) + ZERO * (2+4) - tmpf.write(RAR_ID + mh) - while size > 0: - if size > BSIZE: - buf = rf.read(BSIZE) - else: - buf = rf.read(size) - if not buf: - raise BadRarFile('read failed: ' + inf.filename) - tmpf.write(buf) - size -= len(buf) - tmpf.close() - rf.close() - except: - rf.close() - tmpf.close() - os.unlink(tmpname) - raise +class Rar5MainInfo(Rar5Info): + """RAR5 archive main record. + """ + type = RAR_BLOCK_MAIN + main_flags = None + main_volume_number = None - return self._open_unrar(tmpname, inf, psw, tmpname) + def _must_disable_hack(self): + if self.main_flags & RAR5_MAIN_FLAG_SOLID: + return True + return False - def _read_comment_v3(self, inf, psw=None): - # read data - rf = XFile(inf.volume_file) - rf.seek(inf.file_offset) - data = rf.read(inf.compress_size) - rf.close() +class Rar5EncryptionInfo(Rar5Info): + """RAR5 archive header encryption record. + """ + type = RAR5_BLOCK_ENCRYPTION + encryption_algo = None + encryption_flags = None + encryption_kdf_count = None + encryption_salt = None + encryption_check_value = None - # decompress - cmt = rar_decompress(inf.extract_version, inf.compress_type, data, - inf.file_size, inf.flags, inf.CRC, psw, inf.salt) + def needs_password(self): + return True - # check crc - if self._crc_check: - crc = crc32(cmt) - if crc < 0: - crc += (1 << 32) - if crc != inf.CRC: - return None - return self._decode_comment(cmt) +class Rar5EndArcInfo(Rar5Info): + """RAR5 end of archive record. + """ + type = RAR_BLOCK_ENDARC + endarc_flags = None - # write in-memory archive to temp file - needed for solid archives - def _open_unrar_membuf(self, memfile, inf, psw): - tmpname = membuf_tempfile(memfile) - return self._open_unrar(tmpname, inf, psw, tmpname) - # extract using unrar - def _open_unrar(self, rarfile, inf, psw = None, tmpfile = None): - if is_filelike(rarfile): - raise ValueError("Cannot use unrar directly on memory buffer") - cmd = [UNRAR_TOOL] + list(OPEN_ARGS) - add_password_arg(cmd, psw) - cmd.append("--") - cmd.append(rarfile) +class RAR5Parser(CommonParser): + """Parse RAR5 format. + """ + _expect_sig = RAR5_ID + _hdrenc_main = None - # not giving filename avoids encoding related problems - if not tmpfile: - fn = inf.filename - if PATH_SEP != os.sep: - fn = fn.replace(PATH_SEP, os.sep) - cmd.append(fn) + # AES encrypted headers + _last_aes256_key = (-1, None, None) # (kdf_count, salt, key) + + def _gen_key(self, kdf_count, salt): + if self._last_aes256_key[:2] == (kdf_count, salt): + return self._last_aes256_key[2] + if kdf_count > 24: + raise BadRarFile('Too large kdf_count') + psw = self._password + if isinstance(psw, unicode): + psw = psw.encode('utf8') + key = pbkdf2_sha256(psw, salt, 1 << kdf_count) + self._last_aes256_key = (kdf_count, salt, key) + return key - # read from unrar pipe - return PipeReader(self, inf, cmd, tmpfile) + def _decrypt_header(self, fd): + if not _have_crypto: + raise NoCrypto('Cannot parse encrypted headers - no crypto') + h = self._hdrenc_main + key = self._gen_key(h.encryption_kdf_count, h.encryption_salt) + iv = fd.read(16) + return HeaderDecrypt(fd, key, iv) - def _decode(self, val): - for c in TRY_ENCODINGS: - try: - return val.decode(c) - except UnicodeError: - pass - return val.decode(self._charset, 'replace') + # common header + def _parse_block_header(self, fd): + header_offset = fd.tell() - def _decode_comment(self, val): - if UNICODE_COMMENTS: - return self._decode(val) - return val + preload = 4 + 3 + start_bytes = fd.read(preload) + header_crc, pos = load_le32(start_bytes, 0) + hdrlen, pos = load_vint(start_bytes, pos) + if hdrlen > 2 * 1024 * 1024: + return None + header_size = pos + hdrlen - # call unrar to extract a file - def _extract(self, fnlist, path=None, psw=None): - cmd = [UNRAR_TOOL] + list(EXTRACT_ARGS) + # read full header, check for EOF + hdata = start_bytes + fd.read(header_size - len(start_bytes)) + if len(hdata) != header_size: + self._set_error('Unexpected EOF when reading header') + return None + data_offset = fd.tell() - # pasoword - psw = psw or self._password - add_password_arg(cmd, psw) - cmd.append('--') + calc_crc = rar_crc32(memoryview(hdata)[4:]) + if header_crc != calc_crc: + # header parsing failed. + self._set_error('Header CRC error: exp=%x got=%x (xlen = %d)', + header_crc, calc_crc, len(hdata)) + return None - # rar file - if is_filelike(self.rarfile): - tmpname = membuf_tempfile(self.rarfile) - cmd.append(tmpname) + block_type, pos = load_vint(hdata, pos) + + if block_type == RAR5_BLOCK_MAIN: + h, pos = self._parse_block_common(Rar5MainInfo(), hdata) + h = self._parse_main_block(h, hdata, pos) + elif block_type == RAR5_BLOCK_FILE: + h, pos = self._parse_block_common(Rar5FileInfo(), hdata) + h = self._parse_file_block(h, hdata, pos) + elif block_type == RAR5_BLOCK_SERVICE: + h, pos = self._parse_block_common(Rar5ServiceInfo(), hdata) + h = self._parse_file_block(h, hdata, pos) + elif block_type == RAR5_BLOCK_ENCRYPTION: + h, pos = self._parse_block_common(Rar5EncryptionInfo(), hdata) + h = self._parse_encryption_block(h, hdata, pos) + elif block_type == RAR5_BLOCK_ENDARC: + h, pos = self._parse_block_common(Rar5EndArcInfo(), hdata) + h = self._parse_endarc_block(h, hdata, pos) + else: + h = None + if h: + h.header_offset = header_offset + h.data_offset = data_offset + return h + + def _parse_block_common(self, h, hdata): + h.header_crc, pos = load_le32(hdata, 0) + hdrlen, pos = load_vint(hdata, pos) + h.header_size = hdrlen + pos + h.block_type, pos = load_vint(hdata, pos) + h.block_flags, pos = load_vint(hdata, pos) + + if h.block_flags & RAR5_BLOCK_FLAG_EXTRA_DATA: + h.block_extra_size, pos = load_vint(hdata, pos) + if h.block_flags & RAR5_BLOCK_FLAG_DATA_AREA: + h.add_size, pos = load_vint(hdata, pos) + + h.compress_size = h.add_size + + if h.block_flags & RAR5_BLOCK_FLAG_SKIP_IF_UNKNOWN: + h.flags |= RAR_SKIP_IF_UNKNOWN + if h.block_flags & RAR5_BLOCK_FLAG_DATA_AREA: + h.flags |= RAR_LONG_BLOCK + return h, pos + + def _parse_main_block(self, h, hdata, pos): + h.main_flags, pos = load_vint(hdata, pos) + if h.main_flags & RAR5_MAIN_FLAG_HAS_VOLNR: + h.main_volume_number = load_vint(hdata, pos) + + h.flags |= RAR_MAIN_NEWNUMBERING + if h.main_flags & RAR5_MAIN_FLAG_SOLID: + h.flags |= RAR_MAIN_SOLID + if h.main_flags & RAR5_MAIN_FLAG_ISVOL: + h.flags |= RAR_MAIN_VOLUME + if h.main_flags & RAR5_MAIN_FLAG_RECOVERY: + h.flags |= RAR_MAIN_RECOVERY + if self._hdrenc_main: + h.flags |= RAR_MAIN_PASSWORD + if h.main_flags & RAR5_MAIN_FLAG_HAS_VOLNR == 0: + h.flags |= RAR_MAIN_FIRSTVOLUME + + return h + + def _parse_file_block(self, h, hdata, pos): + h.file_flags, pos = load_vint(hdata, pos) + h.file_size, pos = load_vint(hdata, pos) + h.mode, pos = load_vint(hdata, pos) + + if h.file_flags & RAR5_FILE_FLAG_HAS_MTIME: + h.mtime, pos = load_unixtime(hdata, pos) + h.date_time = h.mtime.timetuple()[:6] + if h.file_flags & RAR5_FILE_FLAG_HAS_CRC32: + h.CRC, pos = load_le32(hdata, pos) + h._md_class = CRC32Context + h._md_expect = h.CRC + + h.file_compress_flags, pos = load_vint(hdata, pos) + h.file_host_os, pos = load_vint(hdata, pos) + h.orig_filename, pos = load_vstr(hdata, pos) + h.filename = h.orig_filename.decode('utf8', 'replace') + + # use compatible values + if h.file_host_os == RAR5_OS_WINDOWS: + h.host_os = RAR_OS_WIN32 + else: + h.host_os = RAR_OS_UNIX + h.compress_type = RAR_M0 + ((h.file_compress_flags >> 7) & 7) + + if h.block_extra_size: + # allow 1 byte of garbage + while pos < len(hdata) - 1: + xsize, pos = load_vint(hdata, pos) + xdata, pos = load_bytes(hdata, xsize, pos) + self._process_file_extra(h, xdata) + + if h.block_flags & RAR5_BLOCK_FLAG_SPLIT_BEFORE: + h.flags |= RAR_FILE_SPLIT_BEFORE + if h.block_flags & RAR5_BLOCK_FLAG_SPLIT_AFTER: + h.flags |= RAR_FILE_SPLIT_AFTER + if h.file_flags & RAR5_FILE_FLAG_ISDIR: + h.flags |= RAR_FILE_DIRECTORY + if h.file_compress_flags & RAR5_COMPR_SOLID: + h.flags |= RAR_FILE_SOLID + + return h + + def _parse_endarc_block(self, h, hdata, pos): + h.endarc_flags, pos = load_vint(hdata, pos) + if h.endarc_flags & RAR5_ENDARC_FLAG_NEXT_VOL: + h.flags |= RAR_ENDARC_NEXT_VOLUME + return h + + def _parse_encryption_block(self, h, hdata, pos): + h.encryption_algo, pos = load_vint(hdata, pos) + h.encryption_flags, pos = load_vint(hdata, pos) + h.encryption_kdf_count, pos = load_byte(hdata, pos) + h.encryption_salt, pos = load_bytes(hdata, 16, pos) + if h.encryption_flags & RAR5_ENC_FLAG_HAS_CHECKVAL: + h.encryption_check_value = load_bytes(hdata, 12, pos) + if h.encryption_algo != RAR5_XENC_CIPHER_AES256: + raise BadRarFile('Unsupported header encryption cipher') + self._hdrenc_main = h + return h + + # file extra record + def _process_file_extra(self, h, xdata): + xtype, pos = load_vint(xdata, 0) + if xtype == RAR5_XFILE_TIME: + self._parse_file_xtime(h, xdata, pos) + elif xtype == RAR5_XFILE_ENCRYPTION: + self._parse_file_encryption(h, xdata, pos) + elif xtype == RAR5_XFILE_HASH: + self._parse_file_hash(h, xdata, pos) + elif xtype == RAR5_XFILE_VERSION: + self._parse_file_version(h, xdata, pos) + elif xtype == RAR5_XFILE_REDIR: + self._parse_file_redir(h, xdata, pos) + elif xtype == RAR5_XFILE_OWNER: + self._parse_file_owner(h, xdata, pos) + elif xtype == RAR5_XFILE_SERVICE: + pass else: - tmpname = None - cmd.append(self.rarfile) + pass - # file list - for fn in fnlist: - if os.sep != PATH_SEP: - fn = fn.replace(PATH_SEP, os.sep) - cmd.append(fn) + # extra block for file time record + def _parse_file_xtime(self, h, xdata, pos): + tflags, pos = load_vint(xdata, pos) + ldr = load_windowstime + if tflags & RAR5_XTIME_UNIXTIME: + ldr = load_unixtime + if tflags & RAR5_XTIME_HAS_MTIME: + h.mtime, pos = ldr(xdata, pos) + h.date_time = h.mtime.timetuple()[:6] + if tflags & RAR5_XTIME_HAS_CTIME: + h.ctime, pos = ldr(xdata, pos) + if tflags & RAR5_XTIME_HAS_ATIME: + h.atime, pos = ldr(xdata, pos) + + # just remember encryption info + def _parse_file_encryption(self, h, xdata, pos): + algo, pos = load_vint(xdata, pos) + flags, pos = load_vint(xdata, pos) + kdf_count, pos = load_byte(xdata, pos) + salt, pos = load_bytes(xdata, 16, pos) + iv, pos = load_bytes(xdata, 16, pos) + checkval = None + if flags & RAR5_XENC_CHECKVAL: + checkval, pos = load_bytes(xdata, 12, pos) + if flags & RAR5_XENC_TWEAKED: + h._md_expect = None + h._md_class = NoHashContext + + h.file_encryption = (algo, flags, kdf_count, salt, iv, checkval) + h.flags |= RAR_FILE_PASSWORD + + def _parse_file_hash(self, h, xdata, pos): + hash_type, pos = load_vint(xdata, pos) + if hash_type == RAR5_XHASH_BLAKE2SP: + h.blake2sp_hash, pos = load_bytes(xdata, 32, pos) + if _have_blake2 and (h.file_encryption[1] & RAR5_XENC_TWEAKED) == 0: + h._md_class = Blake2SP + h._md_expect = h.blake2sp_hash + + def _parse_file_version(self, h, xdata, pos): + flags, pos = load_vint(xdata, pos) + version, pos = load_vint(xdata, pos) + h.file_version = (flags, version) + + def _parse_file_redir(self, h, xdata, pos): + redir_type, pos = load_vint(xdata, pos) + redir_flags, pos = load_vint(xdata, pos) + redir_name, pos = load_vstr(xdata, pos) + redir_name = redir_name.decode('utf8', 'replace') + h.file_redir = (redir_type, redir_flags, redir_name) + + def _parse_file_owner(self, h, xdata, pos): + user_name = group_name = user_id = group_id = None + + flags, pos = load_vint(xdata, pos) + if flags & RAR5_XOWNER_UNAME: + user_name, pos = load_vstr(xdata, pos) + if flags & RAR5_XOWNER_GNAME: + group_name, pos = load_vstr(xdata, pos) + if flags & RAR5_XOWNER_UID: + user_id, pos = load_vint(xdata, pos) + if flags & RAR5_XOWNER_GID: + group_id, pos = load_vint(xdata, pos) + + h.file_owner = (user_name, group_name, user_id, group_id) + + def process_entry(self, fd, item): + if item.block_type == RAR5_BLOCK_FILE: + # use only first part + if (item.block_flags & RAR5_BLOCK_FLAG_SPLIT_BEFORE) == 0: + self._info_map[item.filename] = item + self._info_list.append(item) + elif len(self._info_list) > 0: + # final crc is in last block + old = self._info_list[-1] + old.CRC = item.CRC + old._md_expect = item._md_expect + old.blake2sp_hash = item.blake2sp_hash + old.compress_size += item.compress_size + elif item.block_type == RAR5_BLOCK_SERVICE: + if item.filename == 'CMT': + self._load_comment(fd, item) - # destination path - if path is not None: - cmd.append(path + os.sep) + def _load_comment(self, fd, item): + if item.block_flags & (RAR5_BLOCK_FLAG_SPLIT_BEFORE | RAR5_BLOCK_FLAG_SPLIT_AFTER): + return None + if item.compress_type != RAR_M0: + return None - # call - try: - p = custom_popen(cmd) - output = p.communicate()[0] - check_returncode(p, output) - finally: - if tmpname: - os.unlink(tmpname) + if item.flags & RAR_FILE_PASSWORD: + algo, ___flags, kdf_count, salt, iv, ___checkval = item.file_encryption + if algo != RAR5_XENC_CIPHER_AES256: + return None + key = self._gen_key(kdf_count, salt) + f = HeaderDecrypt(fd, key, iv) + cmt = f.read(item.file_size) + else: + # archive comment + with self._open_clear(item) as cmtstream: + cmt = cmtstream.read() + + # rar bug? - appends zero to comment + cmt = cmt.split(ZERO, 1)[0] + self.comment = cmt.decode('utf8') + + def _open_hack(self, inf, psw): + # len, type, blk_flags, flags + main_hdr = b'\x03\x01\x00\x00' + endarc_hdr = b'\x03\x05\x00\x00' + main_hdr = S_LONG.pack(rar_crc32(main_hdr)) + main_hdr + endarc_hdr = S_LONG.pack(rar_crc32(endarc_hdr)) + endarc_hdr + return self._open_hack_core(inf, psw, RAR5_ID + main_hdr, endarc_hdr) ## ## Utility classes ## class UnicodeFilename(object): - """Handle unicode filename decompression""" - + """Handle RAR3 unicode filename decompression. + """ def __init__(self, name, encdata): self.std_name = bytearray(name) self.encdata = bytearray(encdata) @@ -1255,6 +1898,7 @@ def __init__(self, name, encdata): self.failed = 0 def enc_byte(self): + """Copy encoded byte.""" try: c = self.encdata[self.encpos] self.encpos += 1 @@ -1264,6 +1908,7 @@ def enc_byte(self): return 0 def std_byte(self): + """Copy byte from 8-bit representation.""" try: return self.std_name[self.pos] except IndexError: @@ -1271,11 +1916,13 @@ def std_byte(self): return ord('?') def put(self, lo, hi): + """Copy 16-bit value to result.""" self.buf.append(lo) self.buf.append(hi) self.pos += 1 def decode(self): + """Decompress compressed UTF16 value.""" hi = self.enc_byte() flagbits = 0 while self.encpos < len(self.encdata): @@ -1294,11 +1941,11 @@ def decode(self): n = self.enc_byte() if n & 0x80: c = self.enc_byte() - for i in range((n & 0x7f) + 2): + for _ in range((n & 0x7f) + 2): lo = (self.std_byte() + c) & 0xFF self.put(lo, hi) else: - for i in range(n + 2): + for _ in range(n + 2): self.put(self.std_byte(), 0) return self.buf.decode("utf-16le", "replace") @@ -1311,77 +1958,78 @@ class RarExtFile(RawIOBase): Behaviour: - no short reads - .read() and .readinfo() read as much as requested. - no internal buffer, use io.BufferedReader for that. - - If :mod:`io` module is available (Python 2.6+, 3.x), then this calls - will inherit from :class:`io.RawIOBase` class. This makes line-based - access available: :meth:`RarExtFile.readline` and ``for ln in f``. """ #: Filename of the archive entry name = None - def __init__(self, rf, inf): + def __init__(self, parser, inf): + """Open archive entry. + """ super(RarExtFile, self).__init__() # standard io.* properties self.name = inf.filename self.mode = 'rb' - self.rf = rf - self.inf = inf - self.crc_check = rf._crc_check - self.fd = None - self.CRC = 0 - self.remain = 0 - self.returncode = 0 + self._parser = parser + self._inf = inf + self._fd = None + self._remain = 0 + self._returncode = 0 + + self._md_context = None self._open() def _open(self): - if self.fd: - self.fd.close() - self.fd = None - self.CRC = 0 - self.remain = self.inf.file_size + if self._fd: + self._fd.close() + md_class = self._inf._md_class or NoHashContext + self._md_context = md_class() + self._fd = None + self._remain = self._inf.file_size - def read(self, cnt = None): + def read(self, cnt=None): """Read all or specified amount of data from archive entry.""" # sanitize cnt if cnt is None or cnt < 0: - cnt = self.remain - elif cnt > self.remain: - cnt = self.remain + cnt = self._remain + elif cnt > self._remain: + cnt = self._remain if cnt == 0: return EMPTY # actual read data = self._read(cnt) if data: - self.CRC = crc32(data, self.CRC) - self.remain -= len(data) + self._md_context.update(data) + self._remain -= len(data) if len(data) != cnt: raise BadRarFile("Failed the read enough data") # done? - if not data or self.remain == 0: - #self.close() + if not data or self._remain == 0: + # self.close() self._check() return data def _check(self): """Check final CRC.""" - if not self.crc_check: + final = self._md_context.digest() + exp = self._inf._md_expect + if exp is None: return - if self.returncode: + if final is None: + return + if self._returncode: check_returncode(self, '') - if self.remain != 0: + if self._remain != 0: raise BadRarFile("Failed the read enough data") - crc = self.CRC - if crc < 0: - crc += (1 << 32) - if crc != self.inf.CRC: - raise BadRarFile("Corrupt file - CRC check failed: " + self.inf.filename) + if final != exp: + raise BadRarFile("Corrupt file - CRC check failed: %s - exp=%r got=%r" % ( + self._inf.filename, exp, final)) def _read(self, cnt): """Actual read that gets sanitized cnt.""" @@ -1391,9 +2039,9 @@ def close(self): super(RarExtFile, self).close() - if self.fd: - self.fd.close() - self.fd = None + if self._fd: + self._fd.close() + self._fd = None def __del__(self): """Hook delete to make sure tempfile is removed.""" @@ -1404,25 +2052,15 @@ def readinto(self, buf): Returns bytes read. """ - - data = self.read(len(buf)) - n = len(data) - try: - buf[:n] = data - except TypeError: - import array - if not isinstance(buf, array.array): - raise - buf[:n] = array.array(buf.typecode, data) - return n + raise NotImplementedError('readinto') def tell(self): """Return current reading position in uncompressed data.""" - return self.inf.file_size - self.remain + return self._inf.file_size - self._remain - def seek(self, ofs, whence = 0): + def seek(self, ofs, whence=0): """Seek in data. - + On uncompressed files, the seeking works by actual seeks so it's fast. On compresses files its slow - forward seeking happends by reading ahead, @@ -1430,9 +2068,9 @@ def seek(self, ofs, whence = 0): """ # disable crc check when seeking - self.crc_check = 0 + self._md_context = NoHashContext() - fsize = self.inf.file_size + fsize = self._inf.file_size cur_ofs = self.tell() if whence == 0: # seek from beginning of file @@ -1454,8 +2092,6 @@ def seek(self, ofs, whence = 0): if new_ofs >= cur_ofs: self._skip(new_ofs - cur_ofs) else: - # process old data ? - #self._skip(fsize - cur_ofs) # reopen and seek self._open() self._skip(new_ofs) @@ -1478,13 +2114,14 @@ def readable(self): def writable(self): """Returns False. - - Writing is not supported.""" + + Writing is not supported. + """ return False def seekable(self): """Returns True. - + Seeking is supported, although it's slow on compressed files. """ return True @@ -1499,23 +2136,23 @@ class PipeReader(RarExtFile): """Read data from pipe, handle tempfile cleanup.""" def __init__(self, rf, inf, cmd, tempfile=None): - self.cmd = cmd - self.proc = None - self.tempfile = tempfile + self._cmd = cmd + self._proc = None + self._tempfile = tempfile super(PipeReader, self).__init__(rf, inf) def _close_proc(self): - if not self.proc: + if not self._proc: return - if self.proc.stdout: - self.proc.stdout.close() - if self.proc.stdin: - self.proc.stdin.close() - if self.proc.stderr: - self.proc.stderr.close() - self.proc.wait() - self.returncode = self.proc.returncode - self.proc = None + if self._proc.stdout: + self._proc.stdout.close() + if self._proc.stdin: + self._proc.stdin.close() + if self._proc.stderr: + self._proc.stderr.close() + self._proc.wait() + self._returncode = self._proc.returncode + self._proc = None def _open(self): super(PipeReader, self)._open() @@ -1524,19 +2161,19 @@ def _open(self): self._close_proc() # launch new process - self.returncode = 0 - self.proc = custom_popen(self.cmd) - self.fd = self.proc.stdout + self._returncode = 0 + self._proc = custom_popen(self._cmd) + self._fd = self._proc.stdout # avoid situation where unrar waits on stdin - if self.proc.stdin: - self.proc.stdin.close() + if self._proc.stdin: + self._proc.stdin.close() def _read(self, cnt): """Read from pipe.""" # normal read is usually enough - data = self.fd.read(cnt) + data = self._fd.read(cnt) if len(data) == cnt or not data: return data @@ -1544,7 +2181,7 @@ def _read(self, cnt): buf = [data] cnt -= len(data) while cnt > 0: - data = self.fd.read(cnt) + data = self._fd.read(cnt) if not data: break cnt -= len(data) @@ -1557,42 +2194,45 @@ def close(self): self._close_proc() super(PipeReader, self).close() - if self.tempfile: + if self._tempfile: try: - os.unlink(self.tempfile) + os.unlink(self._tempfile) except OSError: pass - self.tempfile = None + self._tempfile = None def readinto(self, buf): """Zero-copy read directly into buffer.""" cnt = len(buf) - if cnt > self.remain: - cnt = self.remain + if cnt > self._remain: + cnt = self._remain vbuf = memoryview(buf) res = got = 0 while got < cnt: - res = self.fd.readinto(vbuf[got : cnt]) + res = self._fd.readinto(vbuf[got : cnt]) if not res: break - if self.crc_check: - self.CRC = crc32(vbuf[got : got + res], self.CRC) - self.remain -= res + self._md_context.update(vbuf[got : got + res]) + self._remain -= res got += res return got class DirectReader(RarExtFile): - """Read uncompressed data directly from archive.""" + """Read uncompressed data directly from archive. + """ + _cur = None + _cur_avail = None + _volfile = None def _open(self): super(DirectReader, self)._open() - self.volfile = self.inf.volume_file - self.fd = XFile(self.volfile, 0) - self.fd.seek(self.inf.header_offset, 0) - self.cur = self.rf._parse_header(self.fd) - self.cur_avail = self.cur.add_size + self._volfile = self._inf.volume_file + self._fd = XFile(self._volfile, 0) + self._fd.seek(self._inf.header_offset, 0) + self._cur = self._parser._parse_header(self._fd) + self._cur_avail = self._cur.add_size def _skip(self, cnt): """RAR Seek, skipping through rar files to get to correct position @@ -1600,19 +2240,19 @@ def _skip(self, cnt): while cnt > 0: # next vol needed? - if self.cur_avail == 0: + if self._cur_avail == 0: if not self._open_next(): break # fd is in read pos, do the read - if cnt > self.cur_avail: - cnt -= self.cur_avail - self.remain -= self.cur_avail - self.cur_avail = 0 + if cnt > self._cur_avail: + cnt -= self._cur_avail + self._remain -= self._cur_avail + self._cur_avail = 0 else: - self.fd.seek(cnt, 1) - self.cur_avail -= cnt - self.remain -= cnt + self._fd.seek(cnt, 1) + self._cur_avail -= cnt + self._remain -= cnt cnt = 0 def _read(self, cnt): @@ -1621,21 +2261,21 @@ def _read(self, cnt): buf = [] while cnt > 0: # next vol needed? - if self.cur_avail == 0: + if self._cur_avail == 0: if not self._open_next(): break # fd is in read pos, do the read - if cnt > self.cur_avail: - data = self.fd.read(self.cur_avail) + if cnt > self._cur_avail: + data = self._fd.read(self._cur_avail) else: - data = self.fd.read(cnt) + data = self._fd.read(cnt) if not data: break # got some data cnt -= len(data) - self.cur_avail -= len(data) + self._cur_avail -= len(data) buf.append(data) if len(buf) == 1: @@ -1646,31 +2286,34 @@ def _open_next(self): """Proceed to next volume.""" # is the file split over archives? - if (self.cur.flags & RAR_FILE_SPLIT_AFTER) == 0: + if (self._cur.flags & RAR_FILE_SPLIT_AFTER) == 0: return False - if self.fd: - self.fd.close() - self.fd = None + if self._fd: + self._fd.close() + self._fd = None # open next part - self.volfile = self.rf._next_volname(self.volfile) - fd = open(self.volfile, "rb", 0) - self.fd = fd + self._volfile = self._parser._next_volname(self._volfile) + fd = open(self._volfile, "rb", 0) + self._fd = fd + sig = fd.read(len(self._parser._expect_sig)) + if sig != self._parser._expect_sig: + raise BadRarFile("Invalid signature") # loop until first file header while 1: - cur = self.rf._parse_header(fd) + cur = self._parser._parse_header(fd) if not cur: raise BadRarFile("Unexpected EOF") if cur.type in (RAR_BLOCK_MARK, RAR_BLOCK_MAIN): if cur.add_size: fd.seek(cur.add_size, 1) continue - if cur.orig_filename != self.inf.orig_filename: + if cur.orig_filename != self._inf.orig_filename: raise BadRarFile("Did not found file entry") - self.cur = cur - self.cur_avail = cur.add_size + self._cur = cur + self._cur_avail = cur.add_size return True def readinto(self, buf): @@ -1679,23 +2322,22 @@ def readinto(self, buf): vbuf = memoryview(buf) while got < len(buf): # next vol needed? - if self.cur_avail == 0: + if self._cur_avail == 0: if not self._open_next(): break # length for next read cnt = len(buf) - got - if cnt > self.cur_avail: - cnt = self.cur_avail + if cnt > self._cur_avail: + cnt = self._cur_avail # read into temp view - res = self.fd.readinto(vbuf[got : got + cnt]) + res = self._fd.readinto(vbuf[got : got + cnt]) if not res: break - if self.crc_check: - self.CRC = crc32(vbuf[got : got + res], self.CRC) - self.cur_avail -= res - self.remain -= res + self._md_context.update(vbuf[got : got + res]) + self._cur_avail -= res + self._remain -= res got += res return got @@ -1708,10 +2350,12 @@ def __init__(self, f, key, iv): self.buf = EMPTY def tell(self): + """Current file pos - works only on block boundaries.""" return self.f.tell() def read(self, cnt=None): - if cnt > 8*1024: + """Read and decrypt.""" + if cnt > 8 * 1024: raise BadRarFile('Bad count to header decrypt - wrong password?') # consume old data @@ -1724,10 +2368,10 @@ def read(self, cnt=None): cnt -= len(res) # decrypt new data - BLK = self.ciph.block_size + blklen = 16 while cnt > 0: - enc = self.f.read(BLK) - if len(enc) < BLK: + enc = self.f.read(blklen) + if len(enc) < blklen: break dec = self.ciph.decrypt(enc) if cnt >= len(dec): @@ -1740,10 +2384,14 @@ def read(self, cnt=None): return res + # handle (filename|filelike) object class XFile(object): + """Input may be filename or file object. + """ __slots__ = ('_fd', '_need_close') - def __init__(self, xfile, bufsize = 1024): + + def __init__(self, xfile, bufsize=1024): if is_filelike(xfile): self._need_close = False self._fd = xfile @@ -1751,27 +2399,279 @@ def __init__(self, xfile, bufsize = 1024): else: self._need_close = True self._fd = open(xfile, 'rb', bufsize) + def read(self, n=None): + """Read from file.""" return self._fd.read(n) + def tell(self): + """Return file pos.""" return self._fd.tell() + def seek(self, ofs, whence=0): + """Move file pos.""" return self._fd.seek(ofs, whence) + def readinto(self, dst): + """Read into buffer.""" return self._fd.readinto(dst) + def close(self): + """Close file object.""" if self._need_close: self._fd.close() + def __enter__(self): return self + def __exit__(self, typ, val, tb): self.close() + +class NoHashContext(object): + """No-op hash function.""" + def __init__(self, data=None): + """Initialize""" + def update(self, data): + """Update data""" + def digest(self): + """Final hash""" + def hexdigest(self): + """Hexadecimal digest.""" + + +class CRC32Context(object): + """Hash context that uses CRC32.""" + __slots__ = ['_crc'] + + def __init__(self, data=None): + self._crc = 0 + if data: + self.update(data) + + def update(self, data): + """Process data.""" + self._crc = rar_crc32(data, self._crc) + + def digest(self): + """Final hash.""" + return self._crc + + def hexdigest(self): + """Hexadecimal digest.""" + return '%08x' % self.digest() + + +class Blake2SP(object): + """Blake2sp hash context. + """ + __slots__ = ['_thread', '_buf', '_cur', '_digest'] + digest_size = 32 + block_size = 64 + parallelism = 8 + + def __init__(self, data=None): + self._buf = b'' + self._cur = 0 + self._digest = None + self._thread = [] + + for i in range(self.parallelism): + ctx = self._blake2s(i, 0, i == (self.parallelism - 1)) + self._thread.append(ctx) + + if data: + self.update(data) + + def _blake2s(self, ofs, depth, is_last): + return blake2s(node_offset=ofs, node_depth=depth, last_node=is_last, + depth=2, inner_size=32, fanout=self.parallelism) + + def _add_block(self, blk): + self._thread[self._cur].update(blk) + self._cur = (self._cur + 1) % self.parallelism + + def update(self, data): + """Hash data. + """ + view = memoryview(data) + bs = self.block_size + if self._buf: + need = bs - len(self._buf) + if len(view) < need: + self._buf += view.tobytes() + return + self._add_block(self._buf + view[:need].tobytes()) + view = view[need:] + while len(view) >= bs: + self._add_block(view[:bs]) + view = view[bs:] + self._buf = view.tobytes() + + def digest(self): + """Return final digest value. + """ + if self._digest is None: + if self._buf: + self._add_block(self._buf) + self._buf = EMPTY + ctx = self._blake2s(0, 1, True) + for t in self._thread: + ctx.update(t.digest()) + self._digest = ctx.digest() + return self._digest + + def hexdigest(self): + """Hexadecimal digest.""" + return tohex(self.digest()) + ## ## Utility functions ## +S_LONG = Struct(' len(buf): + raise BadRarFile('cannot load byte') + return S_BYTE.unpack_from(buf, pos)[0], end + +def load_le32(buf, pos): + """Load little-endian 32-bit integer""" + end = pos + 4 + if end > len(buf): + raise BadRarFile('cannot load le32') + return S_LONG.unpack_from(buf, pos)[0], pos + 4 + +def load_bytes(buf, num, pos): + """Load sequence of bytes""" + end = pos + num + if end > len(buf): + raise BadRarFile('cannot load bytes') + return buf[pos : end], end + +def load_vstr(buf, pos): + """Load bytes prefixed by vint length""" + slen, pos = load_vint(buf, pos) + return load_bytes(buf, slen, pos) + +def load_dostime(buf, pos): + """Load LE32 dos timestamp""" + stamp, pos = load_le32(buf, pos) + tup = parse_dos_time(stamp) + return to_datetime(tup), pos + +def load_unixtime(buf, pos): + """Load LE32 unix timestamp""" + secs, pos = load_le32(buf, pos) + dt = datetime.fromtimestamp(secs, UTC) + return dt, pos + +def load_windowstime(buf, pos): + """Load LE64 windows timestamp""" + # unix epoch (1970) in seconds from windows epoch (1601) + unix_epoch = 11644473600 + val1, pos = load_le32(buf, pos) + val2, pos = load_le32(buf, pos) + secs, n1secs = divmod((val2 << 32) | val1, 10000000) + dt = datetime.fromtimestamp(secs - unix_epoch, UTC) + dt = dt.replace(microsecond=n1secs // 10) + return dt, pos + +# new-style next volume +def _next_newvol(volfile): + i = len(volfile) - 1 + while i >= 0: + if volfile[i] >= '0' and volfile[i] <= '9': + return _inc_volname(volfile, i) + i -= 1 + raise BadRarName("Cannot construct volume name: " + volfile) + +# old-style next volume +def _next_oldvol(volfile): + # rar -> r00 + if volfile[-4:].lower() == '.rar': + return volfile[:-2] + '00' + return _inc_volname(volfile, len(volfile) - 1) + +# increase digits with carry, otherwise just increment char +def _inc_volname(volfile, i): + fn = list(volfile) + while i >= 0: + if fn[i] != '9': + fn[i] = chr(ord(fn[i]) + 1) + break + fn[i] = '0' + i -= 1 + return ''.join(fn) + +# rar3 extended time fields +def _parse_ext_time(h, data, pos): + # flags and rest of data can be missing + flags = 0 + if pos + 2 <= len(data): + flags = S_SHORT.unpack_from(data, pos)[0] + pos += 2 + + mtime, pos = _parse_xtime(flags >> 3 * 4, data, pos, h.mtime) + h.ctime, pos = _parse_xtime(flags >> 2 * 4, data, pos) + h.atime, pos = _parse_xtime(flags >> 1 * 4, data, pos) + h.arctime, pos = _parse_xtime(flags >> 0 * 4, data, pos) + if mtime: + h.mtime = mtime + h.date_time = mtime.timetuple()[:6] + return pos + +# rar3 one extended time field +def _parse_xtime(flag, data, pos, basetime=None): + res = None + if flag & 8: + if not basetime: + basetime, pos = load_dostime(data, pos) + + # load second fractions + rem = 0 + cnt = flag & 3 + for _ in range(cnt): + b, pos = load_byte(data, pos) + rem = (b << 16) | (rem >> 8) + + # convert 100ns units to microseconds + usec = rem // 10 + if usec > 1000000: + usec = 999999 + + # dostime has room for 30 seconds only, correct if needed + if flag & 4 and basetime.second < 59: + res = basetime.replace(microsecond=usec, second=basetime.second + 1) + else: + res = basetime.replace(microsecond=usec) + return res, pos + def is_filelike(obj): + """Filename or file object? + """ if isinstance(obj, str) or isinstance(obj, unicode): return False res = True @@ -1782,14 +2682,16 @@ def is_filelike(obj): return True def rar3_s2k(psw, salt): - """String-to-key hash for RAR3.""" - + """String-to-key hash for RAR3. + """ + if not isinstance(psw, unicode): + psw = psw.decode('utf8') seed = psw.encode('utf-16le') + salt iv = EMPTY h = sha1() for i in range(16): for j in range(0x4000): - cnt = S_LONG.pack(i*0x4000 + j) + cnt = S_LONG.pack(i * 0x4000 + j) h.update(seed + cnt[:3]) if j == 0: iv += h.digest()[19:20] @@ -1797,12 +2699,11 @@ def rar3_s2k(psw, salt): key_le = pack("LLLL", key_be)) return key_le, iv -def rar_decompress(vers, meth, data, declen=0, flags=0, crc=0, psw=None, salt=None): +def rar3_decompress(vers, meth, data, declen=0, flags=0, crc=0, psw=None, salt=None): """Decompress blob of compressed data. Used for data with non-standard header - eg. comments. """ - # already uncompressed? if meth == RAR_M0 and (flags & RAR_FILE_PASSWORD) == 0: return data @@ -1826,11 +2727,11 @@ def rar_decompress(vers, meth, data, declen=0, flags=0, crc=0, psw=None, salt=No # full header hlen = S_BLK_HDR.size + len(fhdr) hdr = S_BLK_HDR.pack(0, RAR_BLOCK_FILE, flags, hlen) + fhdr - hcrc = crc32(hdr[2:]) & 0xFFFF + hcrc = rar_crc32(hdr[2:]) & 0xFFFF hdr = S_BLK_HDR.pack(hcrc, RAR_BLOCK_FILE, flags, hlen) + fhdr # archive main header - mh = S_BLK_HDR.pack(0x90CF, RAR_BLOCK_MAIN, 0, 13) + ZERO * (2+4) + mh = S_BLK_HDR.pack(0x90CF, RAR_BLOCK_MAIN, 0, 13) + ZERO * (2 + 4) # decompress via temp rar tmpfd, tmpname = mkstemp(suffix='.rar') @@ -1850,62 +2751,66 @@ def rar_decompress(vers, meth, data, declen=0, flags=0, crc=0, psw=None, salt=No os.unlink(tmpname) def to_datetime(t): - """Convert 6-part time tuple into datetime object.""" - + """Convert 6-part time tuple into datetime object. + """ if t is None: return None # extract values - year, mon, day, h, m, xs = t - s = int(xs) - us = int(1000000 * (xs - s)) + year, mon, day, h, m, s = t # assume the values are valid try: - return datetime(year, mon, day, h, m, s, us) + return datetime(year, mon, day, h, m, s) except ValueError: pass # sanitize invalid values - MDAY = (0, 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31) - if mon < 1: mon = 1 - if mon > 12: mon = 12 - if day < 1: day = 1 - if day > MDAY[mon]: day = MDAY[mon] - if h > 23: h = 23 - if m > 59: m = 59 - if s > 59: s = 59 + mday = (0, 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31) + if mon < 1: + mon = 1 + if mon > 12: + mon = 12 + if day < 1: + day = 1 + if day > mday[mon]: + day = mday[mon] + if h > 23: + h = 23 + if m > 59: + m = 59 + if s > 59: + s = 59 if mon == 2 and day == 29: try: - return datetime(year, mon, day, h, m, s, us) + return datetime(year, mon, day, h, m, s) except ValueError: day = 28 - return datetime(year, mon, day, h, m, s, us) + return datetime(year, mon, day, h, m, s) def parse_dos_time(stamp): - """Parse standard 32-bit DOS timestamp.""" - - sec = stamp & 0x1F; stamp = stamp >> 5 - min = stamp & 0x3F; stamp = stamp >> 6 - hr = stamp & 0x1F; stamp = stamp >> 5 - day = stamp & 0x1F; stamp = stamp >> 5 - mon = stamp & 0x0F; stamp = stamp >> 4 + """Parse standard 32-bit DOS timestamp. + """ + sec, stamp = stamp & 0x1F, stamp >> 5 + mn, stamp = stamp & 0x3F, stamp >> 6 + hr, stamp = stamp & 0x1F, stamp >> 5 + day, stamp = stamp & 0x1F, stamp >> 5 + mon, stamp = stamp & 0x0F, stamp >> 4 yr = (stamp & 0x7F) + 1980 - return (yr, mon, day, hr, min, sec * 2) + return (yr, mon, day, hr, mn, sec * 2) def custom_popen(cmd): - """Disconnect cmd from parent fds, read only from stdout.""" - + """Disconnect cmd from parent fds, read only from stdout. + """ # needed for py2exe creationflags = 0 if sys.platform == 'win32': - creationflags = 0x08000000 # CREATE_NO_WINDOW + creationflags = 0x08000000 # CREATE_NO_WINDOW # run command try: - p = Popen(cmd, bufsize = 0, - stdout = PIPE, stdin = PIPE, stderr = STDOUT, - creationflags = creationflags) + p = Popen(cmd, bufsize=0, stdout=PIPE, stdin=PIPE, stderr=STDOUT, + creationflags=creationflags) except OSError as ex: if ex.errno == errno.ENOENT: raise RarCannotExec("Unrar not installed? (rarfile.UNRAR_TOOL=%r)" % UNRAR_TOOL) @@ -1913,15 +2818,17 @@ def custom_popen(cmd): return p def custom_check(cmd, ignore_retcode=False): - """Run command, collect output, raise error if needed.""" + """Run command, collect output, raise error if needed. + """ p = custom_popen(cmd) - out, err = p.communicate() + out, _ = p.communicate() if p.returncode and not ignore_retcode: raise RarExecError("Check-run failed") return out -def add_password_arg(cmd, psw, required=False): - """Append password switch to commandline.""" +def add_password_arg(cmd, psw, ___required=False): + """Append password switch to commandline. + """ if UNRAR_TOOL == ALT_TOOL: return if psw is not None: @@ -1930,17 +2837,17 @@ def add_password_arg(cmd, psw, required=False): cmd.append('-p-') def check_returncode(p, out): - """Raise exception according to unrar exit code""" - + """Raise exception according to unrar exit code. + """ code = p.returncode if code == 0: return - # map return code to exception class + # map return code to exception class, codes from rar.txt errmap = [None, - RarWarning, RarFatalError, RarCRCError, RarLockedArchiveError, - RarWriteError, RarOpenError, RarUserError, RarMemoryError, - RarCreateError, RarNoFilesError] # codes from rar.txt + RarWarning, RarFatalError, RarCRCError, RarLockedArchiveError, # 1..4 + RarWriteError, RarOpenError, RarUserError, RarMemoryError, # 5..8 + RarCreateError, RarNoFilesError, RarWrongPassword] # 9..11 if UNRAR_TOOL == ALT_TOOL: errmap = [None] if code > 0 and code < len(errmap): @@ -1960,43 +2867,85 @@ def check_returncode(p, out): raise exc(msg) +def hmac_sha256(key, data): + """HMAC-SHA256""" + return HMAC(key, data, sha256).digest() + def membuf_tempfile(memfile): + """Write in-memory file object to real file.""" memfile.seek(0, 0) tmpfd, tmpname = mkstemp(suffix='.rar') tmpf = os.fdopen(tmpfd, "wb") try: - BSIZE = 32*1024 while True: buf = memfile.read(BSIZE) if not buf: break tmpf.write(buf) tmpf.close() - return tmpname except: tmpf.close() os.unlink(tmpname) raise + return tmpname + +class XTempFile(object): + """Real file for archive. + """ + __slots__ = ('_tmpfile', '_filename') + + def __init__(self, rarfile): + if is_filelike(rarfile): + self._tmpfile = membuf_tempfile(rarfile) + self._filename = self._tmpfile + else: + self._tmpfile = None + self._filename = rarfile + + def __enter__(self): + return self._filename + + def __exit__(self, exc_type, exc_value, tb): + if self._tmpfile: + try: + os.unlink(self._tmpfile) + except OSError: + pass + self._tmpfile = None # # Check if unrar works # -try: - # does UNRAR_TOOL work? - custom_check([UNRAR_TOOL], True) -except RarCannotExec: +ORIG_UNRAR_TOOL = UNRAR_TOOL +ORIG_OPEN_ARGS = OPEN_ARGS +ORIG_EXTRACT_ARGS = EXTRACT_ARGS +ORIG_TEST_ARGS = TEST_ARGS + +def _check_unrar_tool(): + global UNRAR_TOOL, OPEN_ARGS, EXTRACT_ARGS, TEST_ARGS try: - # does ALT_TOOL work? - custom_check([ALT_TOOL] + list(ALT_CHECK_ARGS), True) - # replace config - UNRAR_TOOL = ALT_TOOL - OPEN_ARGS = ALT_OPEN_ARGS - EXTRACT_ARGS = ALT_EXTRACT_ARGS - TEST_ARGS = ALT_TEST_ARGS + # does UNRAR_TOOL work? + custom_check([ORIG_UNRAR_TOOL], True) + + UNRAR_TOOL = ORIG_UNRAR_TOOL + OPEN_ARGS = ORIG_OPEN_ARGS + EXTRACT_ARGS = ORIG_EXTRACT_ARGS + TEST_ARGS = ORIG_TEST_ARGS except RarCannotExec: - # no usable tool, only uncompressed archives work - pass + try: + # does ALT_TOOL work? + custom_check([ALT_TOOL] + list(ALT_CHECK_ARGS), True) + # replace config + UNRAR_TOOL = ALT_TOOL + OPEN_ARGS = ALT_OPEN_ARGS + EXTRACT_ARGS = ALT_EXTRACT_ARGS + TEST_ARGS = ALT_TEST_ARGS + except RarCannotExec: + # no usable tool, only uncompressed archives work + pass + +_check_unrar_tool() diff --git a/lib/traktor/__init__.py b/lib/traktor/__init__.py index c2fa559cee..37d09bc176 100644 --- a/lib/traktor/__init__.py +++ b/lib/traktor/__init__.py @@ -17,9 +17,10 @@ from __future__ import absolute_import -from .exceptions import (TraktException, AuthException, ServerBusy, MissingTokenException, - TraktIOError, TraktConnectionException, TimeoutException, - UnavailableException, ResourceUnavailable, TraktTooManyRedirects, - TokenExpiredException) +from .exceptions import (AuthException, MissingTokenException, ResourceUnavailable, + TokenExpiredException, TraktException, UnavailableException) from .trakt import TraktApi + +__all__ = (AuthException, MissingTokenException, ResourceUnavailable, TokenExpiredException, + TraktApi, TraktException, UnavailableException) diff --git a/lib/traktor/exceptions.py b/lib/traktor/exceptions.py index ade2847bae..a54fffb6b5 100644 --- a/lib/traktor/exceptions.py +++ b/lib/traktor/exceptions.py @@ -1,49 +1,29 @@ -from requests.exceptions import (ConnectionError, Timeout, TooManyRedirects) +"""Traktor exceptions module.""" class TraktException(Exception): - """A Generic Trakt Exception""" + """A Generic Trakt Exception.""" class AuthException(TraktException): - """A Generic Trakt Authentication Exception""" - - -class ServerBusy(TraktException): - """A Generic Trakt Server Busy Exception""" + """A Generic Trakt Authentication Exception.""" class MissingTokenException(TraktException): - """A Generic Trakt Missing Token Exception""" + """A Generic Trakt Missing Token Exception.""" class TokenExpiredException(TraktException): - """A 410 the token has expired Exception""" - - -class TraktIOError(TraktException, IOError): - """A Generic Trakt IOError Exception""" - - -class TraktConnectionException(TraktException, ConnectionError): - """A Generic Trakt Connection Exception""" - - -class TimeoutException(TraktIOError, Timeout): - """A Generic Trakt Timeout Exception""" + """A 410 the token has expired Exception.""" class UnavailableException(TraktException): - """ - A Generic Trakt Unavailable Exception, - possibly raised when Trakt is reachable but is showing an unavailable response code. + """A Generic Trakt Unavailable Exception. + + Possibly raised when Trakt is reachable but is showing an unavailable response code. Possibly raised on in 500 series response codes """ class ResourceUnavailable(TraktException): - """A Trakt Exception for when a requested resources does not exist, possibly raised on 404""" - - -class TraktTooManyRedirects(TraktException, TooManyRedirects): - """A Generic Trakt Too Many Redirects Exception""" + """A Trakt Exception for when a requested resources does not exist, possibly raised on 404.""" diff --git a/lib/traktor/trakt.py b/lib/traktor/trakt.py index 867a9b76c4..99a608eb89 100644 --- a/lib/traktor/trakt.py +++ b/lib/traktor/trakt.py @@ -1,3 +1,4 @@ +"""Traktor Trakt module.""" # coding=utf-8 # # URL: https://medusa.github.io @@ -17,14 +18,16 @@ # You should have received a copy of the GNU General Public License # along with Medusa. If not, see . -import time import json import logging -import requests +import time + import certifi -from .exceptions import (TraktConnectionException, MissingTokenException, AuthException, - UnavailableException, ResourceUnavailable, TraktException, - TimeoutException, TraktTooManyRedirects, TokenExpiredException) + +import requests + +from .exceptions import (AuthException, MissingTokenException, ResourceUnavailable, + TokenExpiredException, TraktException, UnavailableException) log = logging.getLogger(__name__) @@ -32,8 +35,10 @@ class TraktApi(object): - """A base class to use for recommended shows client API's""" - def __init__(self, headers=None, timeout=None, api_url=None, auth_url=None, ssl_verify=None, **trakt_settings): # pylint: disable=too-many-arguments + """A base class to use for recommended shows client API's.""" + + def __init__(self, headers=None, timeout=None, api_url=None, auth_url=None, ssl_verify=None, **trakt_settings): + """Initialize TraktApi class.""" headers = { 'Content-Type': 'application/json', 'trakt-api-version': '2', @@ -52,11 +57,10 @@ def __init__(self, headers=None, timeout=None, api_url=None, auth_url=None, ssl_ self.trakt_settings = trakt_settings def get_token(self, refresh_token=None, trakt_pin=None, count=0): - """function or refreshing a trakt token""" - + """Function or refreshing a trakt token.""" if count > 3: self.access_token = '' - return (False, False) + return False, False elif count > 0: time.sleep(2) @@ -85,12 +89,11 @@ def get_token(self, refresh_token=None, trakt_pin=None, count=0): if 'refresh_token' in resp: self.refresh_token = resp['refresh_token'] self.access_token_refreshed = True - return (self.access_token, self.refresh_token) - return (None, None) - - def request(self, path, data=None, headers=None, url=None, method='GET', count=0): # pylint: disable-msg=too-many-arguments,too-many-branches - """function for performing the trakt request""" + return self.access_token, self.refresh_token + return None, None + def request(self, path, data=None, headers=None, url=None, method='GET', count=0): + """Function for performing the trakt request.""" if not self.access_token and count >= 2: raise MissingTokenException(u'You must get a Trakt TOKEN. Check your Trakt settings') @@ -103,7 +106,7 @@ def request(self, path, data=None, headers=None, url=None, method='GET', count=0 if url is None: url = self.api_url - count = count + 1 + count += 1 data = json.dumps(data) if data else [] @@ -116,15 +119,6 @@ def request(self, path, data=None, headers=None, url=None, method='GET', count=0 # convert response to json resp = resp.json() - except TimeoutException: - log.warning(u'Timeout connecting to Trakt. Try to increase timeout value in Trakt settings') - raise TimeoutException(u'Timeout connecting to Trakt. Try to increase timeout value in Trakt settings') - except TraktConnectionException: - log.warning(u'Could not connect to Trakt.') - raise TraktConnectionException(u'Could not connect to Trakt.') - except TraktTooManyRedirects: - log.warning(u'Too many redirections while connection to Trakt.') - raise TraktTooManyRedirects(u'Too many redirections while connection to Trakt.') except requests.RequestException as e: code = getattr(e.response, 'status_code', None) if code == 502: @@ -135,25 +129,25 @@ def request(self, path, data=None, headers=None, url=None, method='GET', count=0 if self.get_token(refresh_token=True, count=count): return self.request(path, data, headers, url, method) else: - log.warning(u'Unauthorized. Please check your Trakt settings') - raise AuthException(u'Unauthorized. Please check your Trakt settings') - elif code in (500, 501, 503, 504, 520, 521, 522): + log_message = u'Unauthorized. Please check your Trakt settings' + log.warning(log_message) + raise AuthException(log_message) + elif code in (None, 500, 501, 503, 504, 520, 521, 522): + # Report Trakt as unavailable when Timeout to connect (no status code) # http://docs.trakt.apiary.io/#introduction/status-codes - log.debug(u"Trakt may have some issues and it's unavailable. Try again later please") - raise UnavailableException(u"Trakt may have some issues and it\'s unavailable. Try again later please") + raise UnavailableException(u"Trakt may have some issues and it's unavailable. Try again later please") elif code == 404: - log.error(u'Trakt error (404) the resource does not exist: %s', url + path) - raise ResourceUnavailable(u'Trakt error (404) the resource does not exist: %s', url + path) + log_message = u'Trakt error (404) Not found - the resource does not exist: %s' % url + path + log.error(log_message) + raise ResourceUnavailable(log_message) elif code == 410: - log.error(u'Trakt error (410) Expired - the tokens have expired, restart the process: %s', url + path) - raise TokenExpiredException(u'Trakt error (410) Expired - the tokens have expired, restart the process: %s', url + path) + log_message = u'Trakt error (410) Expired - the tokens have expired. Get a new one' + log.warning(log_message) + raise TokenExpiredException(log_message) else: - if code: - log.error(u'Unknown Trakt request exception. Code error: %s', code) - raise TraktException(u'Unknown Trakt request exception. Code error: %s', code) - else: - log.warning(u'Could not connect to Trakt. No http status code') - raise TraktException(u'Could not connect to Trakt. No http status code') + log_message = u'Unknown Trakt request exception. Error: %s' % code if code else e + log.error(log_message) + raise TraktException(log_message) # check and confirm trakt call did not fail if isinstance(resp, dict) and resp.get('status', False) == 'failure': @@ -169,7 +163,7 @@ def request(self, path, data=None, headers=None, url=None, method='GET', count=0 return resp def validate_account(self): - """function for validation of trakt account""" + """Function for validation of trakt account.""" resp = self.request('users/settings') if 'account' in resp: diff --git a/lib/tvdbapiv2/__init__.py b/lib/tvdbapiv2/__init__.py index bcc513e4c7..84960c8e2d 100644 --- a/lib/tvdbapiv2/__init__.py +++ b/lib/tvdbapiv2/__init__.py @@ -1,3 +1,5 @@ +# coding=utf-8 + from __future__ import absolute_import # import models into sdk package diff --git a/lib/tvdbapiv2/api_client.py b/lib/tvdbapiv2/api_client.py index e3720cbb50..41764ad81e 100644 --- a/lib/tvdbapiv2/api_client.py +++ b/lib/tvdbapiv2/api_client.py @@ -20,34 +20,28 @@ from __future__ import absolute_import -from . import models +from requests.exceptions import RequestException +from . import models # Used through eval. from .rest import RESTClientObject -from .rest import ApiException +from .exceptions import AuthError import os import re import sys -import urllib import json import mimetypes -import random import tempfile import threading +import requests from datetime import datetime from datetime import date # python 2 and python 3 compatibility library -from six import iteritems - -try: - # for python3 - from urllib.parse import quote -except ImportError: - # for python2 - from urllib import quote - +from six import iteritems, text_type +from .auth.tvdb import TVDBAuth from .configuration import Configuration +from .exceptions import ApiException class ApiClient(object): @@ -67,39 +61,23 @@ class ApiClient(object): :param header_name: a header to pass when making calls to the API. :param header_value: a header value to pass when making calls to the API. """ - def __init__(self, host=None, header_name=None, header_value=None, cookie=None): + def __init__(self, host=None, cookie=None, session=None, api_key=None, token=None): """ Constructor of the class. """ - self.rest_client = RESTClientObject() - self.default_headers = {} - if header_name is not None: - self.default_headers[header_name] = header_value + self.session = session or requests.session() + if not api_key and not token: + raise AuthError('Please provide an api_key or token to authenticate.') + + self.session.auth = TVDBAuth(api_key=api_key, token=token) + self.rest_client = RESTClientObject(session=self.session) + if host is None: self.host = Configuration().host else: self.host = host self.cookie = cookie - # Set default User-Agent. - self.user_agent = 'Python-Swagger/1.0.0' - - @property - def user_agent(self): - """ - Gets user agent. - """ - return self.default_headers['User-Agent'] - - @user_agent.setter - def user_agent(self, value): - """ - Sets user agent. - """ - self.default_headers['User-Agent'] = value - - def set_default_header(self, header_name, header_value): - self.default_headers[header_name] = header_value def __call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, @@ -108,7 +86,6 @@ def __call_api(self, resource_path, method, # headers parameters header_params = header_params or {} - header_params.update(self.default_headers) if self.cookie: header_params['Cookie'] = self.cookie if header_params: @@ -118,7 +95,7 @@ def __call_api(self, resource_path, method, if path_params: path_params = self.sanitize_for_serialization(path_params) for k, v in iteritems(path_params): - replacement = quote(str(self.to_path_value(v))) + replacement = text_type(self.to_path_value(v)) resource_path = resource_path.\ replace('{' + k + '}', replacement) @@ -237,9 +214,9 @@ def deserialize(self, response, response_type): # fetch data from response object try: - data = json.loads(response.data) + data = json.loads(response.content) except ValueError: - data = response.data + data = response.content return self.__deserialize(data, response_type) @@ -339,39 +316,39 @@ def request(self, method, url, query_params=None, headers=None, Makes the HTTP request using RESTClient. """ if method == "GET": - return self.rest_client.GET(url, + return self.rest_client.get(url, query_params=query_params, headers=headers) elif method == "HEAD": - return self.rest_client.HEAD(url, + return self.rest_client.head(url, query_params=query_params, headers=headers) elif method == "OPTIONS": - return self.rest_client.OPTIONS(url, + return self.rest_client.options(url, query_params=query_params, headers=headers, post_params=post_params, body=body) elif method == "POST": - return self.rest_client.POST(url, + return self.rest_client.post(url, query_params=query_params, headers=headers, post_params=post_params, body=body) elif method == "PUT": - return self.rest_client.PUT(url, + return self.rest_client.put(url, query_params=query_params, headers=headers, post_params=post_params, body=body) elif method == "PATCH": - return self.rest_client.PATCH(url, + return self.rest_client.patch(url, query_params=query_params, headers=headers, post_params=post_params, body=body) elif method == "DELETE": - return self.rest_client.DELETE(url, + return self.rest_client.delete(url, query_params=query_params, headers=headers) else: diff --git a/lib/tvdbapiv2/apis/__init__.py b/lib/tvdbapiv2/apis/__init__.py index bf230f9fa2..1aa07fd0b9 100644 --- a/lib/tvdbapiv2/apis/__init__.py +++ b/lib/tvdbapiv2/apis/__init__.py @@ -1,3 +1,5 @@ +# coding=utf-8 + from __future__ import absolute_import # import apis into api package diff --git a/lib/tvdbapiv2/apis/authentication_api.py b/lib/tvdbapiv2/apis/authentication_api.py index 90d9912f6d..3574850eda 100644 --- a/lib/tvdbapiv2/apis/authentication_api.py +++ b/lib/tvdbapiv2/apis/authentication_api.py @@ -66,8 +66,7 @@ def login_post(self, authentication_string, **kwargs): returns the request thread. """ - all_params = ['authentication_string'] - all_params.append('callback') + all_params = ['authentication_string', 'callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -100,13 +99,13 @@ def login_post(self, authentication_string, **kwargs): body_params = params['authentication_string'] # HTTP header `Accept` - header_params['Accept'] = self.api_client.\ + header_params['Accept'] = self.api_client. \ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.\ + header_params['Content-Type'] = self.api_client. \ select_header_content_type(['application/json']) # Authentication setting @@ -144,8 +143,7 @@ def refresh_token_get(self, **kwargs): returns the request thread. """ - all_params = [] - all_params.append('callback') + all_params = ['callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -157,7 +155,6 @@ def refresh_token_get(self, **kwargs): params[key] = val del params['kwargs'] - resource_path = '/refresh_token'.replace('{format}', 'json') method = 'GET' @@ -173,13 +170,13 @@ def refresh_token_get(self, **kwargs): body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.\ + header_params['Accept'] = self.api_client. \ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.\ + header_params['Content-Type'] = self.api_client. \ select_header_content_type(['application/json']) # Authentication setting diff --git a/lib/tvdbapiv2/apis/episodes_api.py b/lib/tvdbapiv2/apis/episodes_api.py index 410cee033e..41bad55eb0 100644 --- a/lib/tvdbapiv2/apis/episodes_api.py +++ b/lib/tvdbapiv2/apis/episodes_api.py @@ -66,8 +66,7 @@ def episodes_id_get(self, id, **kwargs): returns the request thread. """ - all_params = ['id', 'accept_language'] - all_params.append('callback') + all_params = ['id', 'accept_language', 'callback'] params = locals() for key, val in iteritems(params['kwargs']): diff --git a/lib/tvdbapiv2/apis/languages_api.py b/lib/tvdbapiv2/apis/languages_api.py index 18437570b7..8d5907519b 100644 --- a/lib/tvdbapiv2/apis/languages_api.py +++ b/lib/tvdbapiv2/apis/languages_api.py @@ -65,8 +65,7 @@ def languages_get(self, **kwargs): returns the request thread. """ - all_params = [] - all_params.append('callback') + all_params = ['callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -78,7 +77,6 @@ def languages_get(self, **kwargs): params[key] = val del params['kwargs'] - resource_path = '/languages'.replace('{format}', 'json') method = 'GET' @@ -94,13 +92,13 @@ def languages_get(self, **kwargs): body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.\ + header_params['Accept'] = self.api_client. \ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.\ + header_params['Content-Type'] = self.api_client. \ select_header_content_type(['application/json']) # Authentication setting @@ -139,8 +137,7 @@ def languages_id_get(self, id, **kwargs): returns the request thread. """ - all_params = ['id'] - all_params.append('callback') + all_params = ['id', 'callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -173,13 +170,13 @@ def languages_id_get(self, id, **kwargs): body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.\ + header_params['Accept'] = self.api_client. \ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.\ + header_params['Content-Type'] = self.api_client. \ select_header_content_type(['application/json']) # Authentication setting diff --git a/lib/tvdbapiv2/apis/search_api.py b/lib/tvdbapiv2/apis/search_api.py index 00895c355f..a7f61d9bb9 100644 --- a/lib/tvdbapiv2/apis/search_api.py +++ b/lib/tvdbapiv2/apis/search_api.py @@ -69,8 +69,7 @@ def search_series_get(self, **kwargs): returns the request thread. """ - all_params = ['name', 'imdb_id', 'zap2it_id', 'accept_language'] - all_params.append('callback') + all_params = ['name', 'imdb_id', 'zap2it_id', 'accept_language', 'callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -90,7 +89,6 @@ def search_series_get(self, **kwargs): query_params = {} if 'name' in params: query_params['name'] = params['name'] - print('search called on name: %s' % query_params['name']) if 'imdb_id' in params: query_params['imdbId'] = params['imdb_id'] if 'zap2it_id' in params: @@ -106,13 +104,13 @@ def search_series_get(self, **kwargs): body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.\ + header_params['Accept'] = self.api_client. \ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.\ + header_params['Content-Type'] = self.api_client. \ select_header_content_type(['application/json']) # Authentication setting @@ -149,8 +147,7 @@ def search_series_params_get(self, **kwargs): returns the request thread. """ - all_params = [] - all_params.append('callback') + all_params = ['callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -162,7 +159,6 @@ def search_series_params_get(self, **kwargs): params[key] = val del params['kwargs'] - resource_path = '/search/series/params'.replace('{format}', 'json') method = 'GET' @@ -178,13 +174,13 @@ def search_series_params_get(self, **kwargs): body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.\ + header_params['Accept'] = self.api_client. \ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.\ + header_params['Content-Type'] = self.api_client. \ select_header_content_type(['application/json']) # Authentication setting diff --git a/lib/tvdbapiv2/apis/series_api.py b/lib/tvdbapiv2/apis/series_api.py index 1c926e0201..50b6dd93e6 100644 --- a/lib/tvdbapiv2/apis/series_api.py +++ b/lib/tvdbapiv2/apis/series_api.py @@ -66,8 +66,7 @@ def series_id_get(self, id, **kwargs): returns the request thread. """ - all_params = ['id', 'accept_language'] - all_params.append('callback') + all_params = ['id', 'accept_language', 'callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -147,8 +146,7 @@ def series_id_head(self, id, **kwargs): returns the request thread. """ - all_params = ['id', 'accept_language'] - all_params.append('callback') + all_params = ['id', 'accept_language', 'callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -227,8 +225,7 @@ def series_id_actors_get(self, id, **kwargs): returns the request thread. """ - all_params = ['id'] - all_params.append('callback') + all_params = ['id', 'callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -307,8 +304,7 @@ def series_id_episodes_get(self, id, **kwargs): returns the request thread. """ - all_params = ['id', 'page'] - all_params.append('callback') + all_params = ['id', 'page', 'callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -395,8 +391,7 @@ def series_id_episodes_query_get(self, id, **kwargs): returns the request thread. """ - all_params = ['id', 'absolute_number', 'aired_season', 'aired_episode', 'dvd_season', 'dvd_episode', 'imdb_id', 'page', 'accept_language'] - all_params.append('callback') + all_params = ['id', 'absolute_number', 'aired_season', 'aired_episode', 'dvd_season', 'dvd_episode', 'imdb_id', 'page', 'accept_language', 'callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -489,8 +484,7 @@ def series_id_episodes_query_params_get(self, id, **kwargs): returns the request thread. """ - all_params = ['id'] - all_params.append('callback') + all_params = ['id', 'callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -567,8 +561,7 @@ def series_id_episodes_summary_get(self, id, **kwargs): returns the request thread. """ - all_params = ['id'] - all_params.append('callback') + all_params = ['id', 'callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -648,8 +641,7 @@ def series_id_filter_get(self, id, keys, **kwargs): returns the request thread. """ - all_params = ['id', 'keys', 'accept_language'] - all_params.append('callback') + all_params = ['id', 'keys', 'accept_language', 'callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -735,8 +727,7 @@ def series_id_filter_params_get(self, id, **kwargs): returns the request thread. """ - all_params = ['id', 'accept_language'] - all_params.append('callback') + all_params = ['id', 'accept_language', 'callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -817,8 +808,7 @@ def series_id_images_get(self, id, **kwargs): returns the request thread. """ - all_params = ['id', 'accept_language'] - all_params.append('callback') + all_params = ['id', 'accept_language', 'callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -902,8 +892,7 @@ def series_id_images_query_get(self, id, **kwargs): returns the request thread. """ - all_params = ['id', 'key_type', 'resolution', 'sub_key', 'accept_language'] - all_params.append('callback') + all_params = ['id', 'key_type', 'resolution', 'sub_key', 'accept_language', 'callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -990,8 +979,7 @@ def series_id_images_query_params_get(self, id, **kwargs): returns the request thread. """ - all_params = ['id', 'accept_language'] - all_params.append('callback') + all_params = ['id', 'accept_language', 'callback'] params = locals() for key, val in iteritems(params['kwargs']): diff --git a/lib/tvdbapiv2/apis/updates_api.py b/lib/tvdbapiv2/apis/updates_api.py index fbe89ee923..1673948017 100644 --- a/lib/tvdbapiv2/apis/updates_api.py +++ b/lib/tvdbapiv2/apis/updates_api.py @@ -68,8 +68,7 @@ def updated_query_get(self, from_time, **kwargs): returns the request thread. """ - all_params = ['from_time', 'to_time', 'accept_language'] - all_params.append('callback') + all_params = ['from_time', 'to_time', 'accept_language', 'callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -106,13 +105,13 @@ def updated_query_get(self, from_time, **kwargs): body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.\ + header_params['Accept'] = self.api_client. \ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.\ + header_params['Content-Type'] = self.api_client. \ select_header_content_type(['application/json']) # Authentication setting @@ -150,8 +149,7 @@ def updated_query_params_get(self, **kwargs): returns the request thread. """ - all_params = [] - all_params.append('callback') + all_params = ['callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -163,7 +161,6 @@ def updated_query_params_get(self, **kwargs): params[key] = val del params['kwargs'] - resource_path = '/updated/query/params'.replace('{format}', 'json') method = 'GET' @@ -179,13 +176,13 @@ def updated_query_params_get(self, **kwargs): body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.\ + header_params['Accept'] = self.api_client. \ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.\ + header_params['Content-Type'] = self.api_client. \ select_header_content_type(['application/json']) # Authentication setting diff --git a/lib/tvdbapiv2/apis/users_api.py b/lib/tvdbapiv2/apis/users_api.py index bca8bab824..bec5d20a2e 100644 --- a/lib/tvdbapiv2/apis/users_api.py +++ b/lib/tvdbapiv2/apis/users_api.py @@ -19,9 +19,6 @@ from __future__ import absolute_import -import sys -import os - # python 2 and python 3 compatibility library from six import iteritems @@ -65,8 +62,7 @@ def user_get(self, **kwargs): returns the request thread. """ - all_params = [] - all_params.append('callback') + all_params = ['callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -78,7 +74,6 @@ def user_get(self, **kwargs): params[key] = val del params['kwargs'] - resource_path = '/user'.replace('{format}', 'json') method = 'GET' @@ -94,13 +89,13 @@ def user_get(self, **kwargs): body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.\ + header_params['Accept'] = self.api_client. \ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.\ + header_params['Content-Type'] = self.api_client. \ select_header_content_type(['application/json']) # Authentication setting @@ -138,8 +133,7 @@ def user_favorites_get(self, **kwargs): returns the request thread. """ - all_params = [] - all_params.append('callback') + all_params = ['callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -151,7 +145,6 @@ def user_favorites_get(self, **kwargs): params[key] = val del params['kwargs'] - resource_path = '/user/favorites'.replace('{format}', 'json') method = 'GET' @@ -167,13 +160,13 @@ def user_favorites_get(self, **kwargs): body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.\ + header_params['Accept'] = self.api_client. \ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.\ + header_params['Content-Type'] = self.api_client. \ select_header_content_type(['application/json']) # Authentication setting @@ -212,8 +205,7 @@ def user_favorites_id_put(self, id, **kwargs): returns the request thread. """ - all_params = ['id'] - all_params.append('callback') + all_params = ['id', 'callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -246,13 +238,13 @@ def user_favorites_id_put(self, id, **kwargs): body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.\ + header_params['Accept'] = self.api_client. \ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.\ + header_params['Content-Type'] = self.api_client. \ select_header_content_type(['application/json']) # Authentication setting @@ -291,8 +283,7 @@ def user_favorites_id_delete(self, id, **kwargs): returns the request thread. """ - all_params = ['id'] - all_params.append('callback') + all_params = ['id', 'callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -325,13 +316,13 @@ def user_favorites_id_delete(self, id, **kwargs): body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.\ + header_params['Accept'] = self.api_client. \ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.\ + header_params['Content-Type'] = self.api_client. \ select_header_content_type(['application/json']) # Authentication setting @@ -369,8 +360,7 @@ def user_ratings_get(self, **kwargs): returns the request thread. """ - all_params = [] - all_params.append('callback') + all_params = ['callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -382,7 +372,6 @@ def user_ratings_get(self, **kwargs): params[key] = val del params['kwargs'] - resource_path = '/user/ratings'.replace('{format}', 'json') method = 'GET' @@ -398,13 +387,13 @@ def user_ratings_get(self, **kwargs): body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.\ + header_params['Accept'] = self.api_client. \ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.\ + header_params['Content-Type'] = self.api_client. \ select_header_content_type(['application/json']) # Authentication setting @@ -443,8 +432,7 @@ def user_ratings_query_get(self, **kwargs): returns the request thread. """ - all_params = ['item_type'] - all_params.append('callback') + all_params = ['item_type', 'callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -456,7 +444,6 @@ def user_ratings_query_get(self, **kwargs): params[key] = val del params['kwargs'] - resource_path = '/user/ratings/query'.replace('{format}', 'json') method = 'GET' @@ -474,13 +461,13 @@ def user_ratings_query_get(self, **kwargs): body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.\ + header_params['Accept'] = self.api_client. \ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.\ + header_params['Content-Type'] = self.api_client. \ select_header_content_type(['application/json']) # Authentication setting @@ -518,8 +505,7 @@ def user_ratings_query_params_get(self, **kwargs): returns the request thread. """ - all_params = [] - all_params.append('callback') + all_params = ['callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -531,7 +517,6 @@ def user_ratings_query_params_get(self, **kwargs): params[key] = val del params['kwargs'] - resource_path = '/user/ratings/query/params'.replace('{format}', 'json') method = 'GET' @@ -547,13 +532,13 @@ def user_ratings_query_params_get(self, **kwargs): body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.\ + header_params['Accept'] = self.api_client. \ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.\ + header_params['Content-Type'] = self.api_client. \ select_header_content_type(['application/json']) # Authentication setting @@ -593,8 +578,7 @@ def user_ratings_item_type_item_id_delete(self, item_type, item_id, **kwargs): returns the request thread. """ - all_params = ['item_type', 'item_id'] - all_params.append('callback') + all_params = ['item_type', 'item_id', 'callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -632,13 +616,13 @@ def user_ratings_item_type_item_id_delete(self, item_type, item_id, **kwargs): body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.\ + header_params['Accept'] = self.api_client. \ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.\ + header_params['Content-Type'] = self.api_client. \ select_header_content_type(['application/json']) # Authentication setting @@ -679,8 +663,7 @@ def user_ratings_item_type_item_id_item_rating_put(self, item_type, item_id, ite returns the request thread. """ - all_params = ['item_type', 'item_id', 'item_rating'] - all_params.append('callback') + all_params = ['item_type', 'item_id', 'item_rating', 'callback'] params = locals() for key, val in iteritems(params['kwargs']): @@ -723,13 +706,13 @@ def user_ratings_item_type_item_id_item_rating_put(self, item_type, item_id, ite body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.\ + header_params['Accept'] = self.api_client. \ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.\ + header_params['Content-Type'] = self.api_client. \ select_header_content_type(['application/json']) # Authentication setting diff --git a/lib/tvdbapiv2/auth/__init__.py b/lib/tvdbapiv2/auth/__init__.py new file mode 100644 index 0000000000..833ef71b03 --- /dev/null +++ b/lib/tvdbapiv2/auth/__init__.py @@ -0,0 +1,5 @@ +# coding=utf-8 + +""" +The sessions.auth package contains custom authentication handlers for Requests. +""" diff --git a/lib/tvdbapiv2/auth/jwt.py b/lib/tvdbapiv2/auth/jwt.py new file mode 100644 index 0000000000..8cab6308bd --- /dev/null +++ b/lib/tvdbapiv2/auth/jwt.py @@ -0,0 +1,111 @@ +# coding=utf-8 + +""" +This module provides JSON Web Token (JWT) Authentication. + +See: + https://jwt.io/introduction/ +""" + +import json +import logging + +from base64 import urlsafe_b64decode +import requests.auth +from six import text_type + +log = logging.getLogger(__name__) +log.addHandler(logging.NullHandler()) + + +class JWTBearerAuth(requests.auth.AuthBase): + """Attaches JWT Bearer Authentication to a given Request object.""" + + def __init__(self, token=None): + """Create a new request auth with a JWT token.""" + self.token = token + + @property + def auth_header(self): + """A Bearer schema authentication header using the provided token.""" + return { + 'Authorization': 'Bearer {token}'.format(token=self.token) + } + + @property + def token(self): + """The JWT token.""" + return getattr(self, '_token', None) + + @token.setter + def token(self, value): + if self.token != value: + setattr(self, '_token', value) + setattr(self, '_payload', jwt_payload(value)) + + @property + def payload(self): + """The JWT payload.""" + return getattr(self, '_payload', {}) + + def __eq__(self, other): + """Allow comparison of Auth objects.""" + return all([ + self.token == getattr(other, 'token', None), + ]) + + def __ne__(self, other): + """Allow comparison of Auth objects.""" + return not self == other + + def __call__(self, request): + """Apply authentication to the current request.""" + log.debug('Adding JWT Bearer token to request') + request.headers.update(self.auth_header) + return request + + def __repr__(self): + return '{obj.__class__.__name__}({obj.token!r})'.format(obj=self) + + +def jwt_decode(data): + """Decode a JSON Web Token (JWT).""" + # make sure data is binary + if isinstance(data, text_type): + log.debug('Encoding the JWT token as UTF-8') + data = data.encode('utf-8') + + # pad the data to a multiple of 4 bytes + remainder = len(data) % 4 + if remainder > 0: + length = 4 - remainder + log.debug('Padding the JWT with {x} bytes'.format(x=length)) + data += b'=' * length + + # base64 decode the data + data = urlsafe_b64decode(data) + + # convert the decoded json to a string + data = data.decode('utf-8') + + # return the json string as a dict + result = json.loads(data) + log.info('JWT Successfully decoded') + return result + + +def jwt_payload(token): + """Get the payload from a JSON Web Token.""" + # split the token into its header, payload, and signature + result = {} + try: + header, payload, signature = token.split('.') + except AttributeError: + log.debug('Unable to extract payload from JWT: {}'.format(token)) + else: + del header # unused + del signature # unused + result = jwt_decode(payload) + log.debug('Payload extracted from JWT: {}'.format(result)) + finally: + return result diff --git a/lib/tvdbapiv2/auth/tvdb.py b/lib/tvdbapiv2/auth/tvdb.py new file mode 100644 index 0000000000..2c3e1be183 --- /dev/null +++ b/lib/tvdbapiv2/auth/tvdb.py @@ -0,0 +1,146 @@ +# coding=utf-8 + +""" +This module provides authentication to TheTVDB API v2. +""" + +import logging + +from time import time +import requests + +from .jwt import JWTBearerAuth +from ..exceptions import AuthError + +log = logging.getLogger(__name__) +log.addHandler(logging.NullHandler()) + + +class TVDBAuth(JWTBearerAuth): + """Attaches JWT Bearer Authentication to a TVDB request.""" + + refresh_window = 7200 # seconds + + def __init__(self, api_key=None, token=None): + """Create a new TVDB request auth.""" + super(TVDBAuth, self).__init__(token) + self.api_key = api_key + + @property + def authorization(self): + """TVDB Authentication details for obtaining a JSON Web Token.""" + return { + 'apikey': self.api_key, + } + + @property + def expiration(self): + """Authentication expiration in epoch time.""" + return self.payload.get('exp', time()) + + @property + def time_remaining(self): + """Remaining authentication time in seconds.""" + return max(self.expiration - time(), 0) + + @property + def is_expired(self): + """True if authentication has expired, else False.""" + return self.expiration <= time() + + def _get_token(self, response): + try: + data = response.json() + except ValueError as error: + log.warning('Failed to extract token: {msg}'.format(msg=error)) + else: + self.token = data['token'] + finally: + return response + + def login(self): + """Acquire a JSON Web Token.""" + log.debug('Acquiring a TVDB JWT') + if not self.api_key: + raise AuthError('Missing API key') + response = requests.post( + 'https://api.thetvdb.com/login', + json=self.authorization, + ) + try: + self._get_token(response) + finally: + return response + + def refresh(self): + """Refresh a JSON Web Token.""" + + log.debug('Refreshing a TVDB JWT') + + if not self.token: + log.debug('No token to refresh') + return self.login() + elif self.is_expired: + log.debug('Token has expired') + return self.login() + + response = requests.get( + 'https://api.thetvdb.com/refresh_token', + headers=self.auth_header, + ) + + try: + self._get_token(response) + finally: + return response + + def authenticate(self): + """Acquire or refresh a JSON Web Token.""" + if not self.token or self.is_expired: + self.login() + elif self.time_remaining < self.refresh_window: + self.refresh() + + def __call__(self, request): + self.authenticate() + return super(TVDBAuth, self).__call__(request) + + def __repr__(self): + representation = '{obj.__class__.__name__}(api_key={obj.api_key!r})' + return representation.format(obj=self) + + +class TVDBUser(TVDBAuth): + """ + Attaches a users JWT Bearer Authentication to a TVDB request. + + Providing user authentication to a TVDB session allows access to + user-specific routes. + """ + + def __init__(self, api_key=None, username=None, account_id=None): + """Create a new TVDB request auth with a users credentials.""" + super(TVDBUser, self).__init__(api_key) + self.username = username + self.account_id = account_id + + @property + def authorization(self): + """TVDB Authentication details for obtaining a users JSON Web Token.""" + result = { + 'username': self.username, + 'userkey': self.account_id, + } + result.update(super(TVDBUser, self).authorization) + return result + + def __repr__(self): + representation = ( + '{obj.__class__.__name__}' + '(' + 'api_key={obj.api_key!r}, ' + 'username={obj.username!r}, ' + 'account_id={obj.account_id!r}' + ')' + ) + return representation.format(obj=self) diff --git a/lib/tvdbapiv2/configuration.py b/lib/tvdbapiv2/configuration.py index 0c444d4941..278545ee10 100644 --- a/lib/tvdbapiv2/configuration.py +++ b/lib/tvdbapiv2/configuration.py @@ -20,20 +20,14 @@ from __future__ import absolute_import -import base64 -from requests.packages import urllib3 - -try: - import httplib -except ImportError: - # for python3 - import http.client as httplib - +from requests.packages.urllib3.util import make_headers +from six.moves.http_client import HTTPConnection import sys import logging from six import iteritems + def singleton(cls, *args, **kw): instances = {} @@ -41,6 +35,7 @@ def _singleton(): if cls not in instances: instances[cls] = cls(*args, **kw) return instances[cls] + return _singleton @@ -65,19 +60,18 @@ def __init__(self): # Authentication Settings # dict to store API key(s) - self.api_key = {'identifier': '7FAF7FE037212506'} + self.api_key = {} # dict to store API prefix (e.g. Bearer) self.api_key_prefix = {} # Username for HTTP basic authentication - self.username = "p0psicles" + self.username = "" # Password for HTTP basic authentication - self.password = "Tequila99" - + self.password = "" # Logging Settings - self.logger = {} - self.logger["package_logger"] = logging.getLogger("swagger_client") - self.logger["urllib3_logger"] = logging.getLogger("urllib3") + self.logger = { + "package_logger": logging.getLogger("swagger_client") + } # Log format self.logger_format = '%(asctime)s %(levelname)s %(message)s' # Log stream handler @@ -158,14 +152,14 @@ def debug(self, value): for _, logger in iteritems(self.logger): logger.setLevel(logging.DEBUG) # turn on httplib debug - httplib.HTTPConnection.debuglevel = 1 + HTTPConnection.debuglevel = 1 else: # if debug status is False, turn off debug logging, # setting log level to default `logging.WARNING` for _, logger in iteritems(self.logger): logger.setLevel(logging.WARNING) # turn off httplib debug - httplib.HTTPConnection.debuglevel = 0 + HTTPConnection.debuglevel = 0 @property def logger_format(self): @@ -205,8 +199,7 @@ def get_basic_auth_token(self): :return: The token for basic HTTP authentication. """ - return urllib3.util.make_headers(basic_auth=self.username + ':' + self.password)\ - .get('authorization') + return make_headers(basic_auth=self.username + ':' + self.password).get('authorization') def auth_settings(self): """ @@ -231,9 +224,9 @@ def to_debug_report(self): :return: The report for debugging. """ - return "Python SDK Debug Report:\n"\ - "OS: {env}\n"\ - "Python Version: {pyversion}\n"\ - "Version of the API: 1.2.0\n"\ - "SDK Package Version: 1.0.0".\ - format(env=sys.platform, pyversion=sys.version) + return "Python SDK Debug Report:\n" \ + "OS: {env}\n" \ + "Python Version: {pyversion}\n" \ + "Version of the API: 1.2.0\n" \ + "SDK Package Version: 1.0.0". \ + format(env=sys.platform, pyversion=sys.version) diff --git a/lib/tvdbapiv2/exceptions.py b/lib/tvdbapiv2/exceptions.py new file mode 100644 index 0000000000..f6c4bd6cc7 --- /dev/null +++ b/lib/tvdbapiv2/exceptions.py @@ -0,0 +1,34 @@ +# coding: utf-8 + + +class ApiException(Exception): + + def __init__(self, status=None, reason=None, http_resp=None): + if http_resp: + self.status = http_resp.status + self.reason = http_resp.reason + self.body = http_resp.data + self.headers = http_resp.getheaders() + else: + self.status = status + self.reason = reason + self.body = None + self.headers = None + + def __str__(self): + """ + Custom error messages for exception + """ + error_message = "({0})\n"\ + "Reason: {1}\n".format(self.status, self.reason) + if self.headers: + error_message += "HTTP response headers: {0}\n".format(self.headers) + + if self.body: + error_message += "HTTP response body: {0}\n".format(self.body) + + return error_message + + +class AuthError(ApiException): + """Authentication failed.""" diff --git a/lib/tvdbapiv2/models/__init__.py b/lib/tvdbapiv2/models/__init__.py index 9bc44fdd02..c01dc4e3c7 100644 --- a/lib/tvdbapiv2/models/__init__.py +++ b/lib/tvdbapiv2/models/__init__.py @@ -1,3 +1,5 @@ +# coding=utf-8 + from __future__ import absolute_import # import models into model package diff --git a/lib/tvdbapiv2/rest.py b/lib/tvdbapiv2/rest.py index 17fd2a1e25..5f0bbbe936 100644 --- a/lib/tvdbapiv2/rest.py +++ b/lib/tvdbapiv2/rest.py @@ -20,232 +20,84 @@ """ from __future__ import absolute_import -import sys -import io -import json -import ssl -import certifi import logging - -# python 2 and python 3 compatibility library -from six import iteritems - -from .configuration import Configuration - -try: - from requests.packages import urllib3 -except ImportError: - raise ImportError('Swagger python client requires urllib3.') - -try: - # for python3 - from urllib.parse import urlencode -except ImportError: - # for python2 - from urllib import urlencode - +from contextlib2 import suppress +from requests.exceptions import RequestException +from .exceptions import ApiException logger = logging.getLogger(__name__) -class RESTResponse(io.IOBase): - - def __init__(self, resp): - self.urllib3_response = resp - self.status = resp.status - self.reason = resp.reason - self.data = resp.data - - def getheaders(self): - """ - Returns a dictionary of the response headers. - """ - return self.urllib3_response.getheaders() - - def getheader(self, name, default=None): - """ - Returns a given response header. - """ - return self.urllib3_response.getheader(name, default) - - class RESTClientObject(object): - def __init__(self, pools_size=4): - # urllib3.PoolManager will pass all kw parameters to connectionpool - # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 - # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 - # ca_certs vs cert_file vs key_file - # http://stackoverflow.com/a/23957365/2985775 + def __init__(self, session): + self.session = session - # cert_reqs - if Configuration().verify_ssl: - cert_reqs = ssl.CERT_REQUIRED - else: - cert_reqs = ssl.CERT_NONE - - # ca_certs - if Configuration().ssl_ca_cert: - ca_certs = Configuration().ssl_ca_cert - else: - # if not set certificate file, use Mozilla's root certificates. - ca_certs = certifi.where() - - # cert_file - cert_file = Configuration().cert_file - - # key file - key_file = Configuration().key_file - - # https pool manager - self.pool_manager = urllib3.PoolManager( - num_pools=pools_size, - cert_reqs=cert_reqs, - ca_certs=ca_certs, - cert_file=cert_file, - key_file=key_file - ) - - def request(self, method, url, query_params=None, headers=None, - body=None, post_params=None): - """ - :param method: http request method - :param url: http request url - :param query_params: query parameters in the url - :param headers: http request headers - :param body: request json body, for `application/json` - :param post_params: request post parameters, - `application/x-www-form-urlencode` - and `multipart/form-data` - """ - method = method.upper() - assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT', 'PATCH', 'OPTIONS'] - - if post_params and body: - raise ValueError( - "body parameter cannot be used with post_params parameter." - ) - - post_params = post_params or {} - headers = headers or {} - - if 'Content-Type' not in headers: - headers['Content-Type'] = 'application/json' + def request(self, method, url, query_params=None, headers=None, body=None, post_params=None): try: # For `POST`, `PUT`, `PATCH`, `OPTIONS` if method in ['POST', 'PUT', 'PATCH', 'OPTIONS']: - if query_params: - url += '?' + urlencode(query_params) if headers['Content-Type'] == 'application/json': - r = self.pool_manager.request(method, url, - body=json.dumps(body), - headers=headers) + r = self.session.request(method, url, params=query_params, headers=headers, json=body) if headers['Content-Type'] == 'application/x-www-form-urlencoded': - r = self.pool_manager.request(method, url, - fields=post_params, - encode_multipart=False, - headers=headers) + r = self.session.request(method, url, params=query_params, headers=headers, json=post_params) if headers['Content-Type'] == 'multipart/form-data': - # must del headers['Content-Type'], or the correct Content-Type - # which generated by urllib3 will be overwritten. - del headers['Content-Type'] - r = self.pool_manager.request(method, url, - fields=post_params, - encode_multipart=True, - headers=headers) + r = self.session.request(method, url, params=query_params, headers=headers) # For `GET`, `HEAD`, `DELETE` else: - r = self.pool_manager.request(method, url, - fields=query_params, - headers=headers) - except urllib3.exceptions.SSLError as e: - msg = "{0}\n{1}".format(type(e).__name__, str(e)) - raise ApiException(status=0, reason=msg) - - r = RESTResponse(r) - - # In the python 3, the response.data is bytes. - # we need to decode it to string. - #if sys.version_info > (3,): - r.data = r.data.decode('utf8') - - # log response body - logger.debug("response body: %s" % r.data) + r = self.session.request(method, url, params=query_params, headers=headers) - if r.status not in range(200, 206): - raise ApiException(http_resp=r) + r.raise_for_status() - return r + except RequestException as error: + status = 0 + msg = "{0}\n{1}".format(type(error).__name__, str(error)) + with suppress(AttributeError): + status = error.response.status_code + raise ApiException(status=status, reason=msg) + else: + return r - def GET(self, url, headers=None, query_params=None): + def get(self, url, headers=None, query_params=None): return self.request("GET", url, headers=headers, query_params=query_params) - def HEAD(self, url, headers=None, query_params=None): + def head(self, url, headers=None, query_params=None): return self.request("HEAD", url, headers=headers, query_params=query_params) - def OPTIONS(self, url, headers=None, query_params=None, post_params=None, body=None): + def options(self, url, headers=None, query_params=None, post_params=None, body=None): return self.request("OPTIONS", url, headers=headers, query_params=query_params, post_params=post_params, body=body) - def DELETE(self, url, headers=None, query_params=None): + def delete(self, url, headers=None, query_params=None): return self.request("DELETE", url, headers=headers, query_params=query_params) - def POST(self, url, headers=None, query_params=None, post_params=None, body=None): + def post(self, url, headers=None, query_params=None, post_params=None, body=None): return self.request("POST", url, headers=headers, query_params=query_params, post_params=post_params, body=body) - def PUT(self, url, headers=None, query_params=None, post_params=None, body=None): + def put(self, url, headers=None, query_params=None, post_params=None, body=None): return self.request("PUT", url, headers=headers, query_params=query_params, post_params=post_params, body=body) - def PATCH(self, url, headers=None, query_params=None, post_params=None, body=None): + def patch(self, url, headers=None, query_params=None, post_params=None, body=None): return self.request("PATCH", url, headers=headers, query_params=query_params, post_params=post_params, body=body) - - -class ApiException(Exception): - - def __init__(self, status=None, reason=None, http_resp=None): - if http_resp: - self.status = http_resp.status - self.reason = http_resp.reason - self.body = http_resp.data - self.headers = http_resp.getheaders() - else: - self.status = status - self.reason = reason - self.body = None - self.headers = None - - def __str__(self): - """ - Custom error messages for exception - """ - error_message = "({0})\n"\ - "Reason: {1}\n".format(self.status, self.reason) - if self.headers: - error_message += "HTTP response headers: {0}\n".format(self.headers) - - if self.body: - error_message += "HTTP response body: {0}\n".format(self.body) - - return error_message diff --git a/medusa/__main__.py b/medusa/__main__.py index fa8274f1df..f45c8acd12 100755 --- a/medusa/__main__.py +++ b/medusa/__main__.py @@ -76,6 +76,7 @@ ) from .databases import cache_db, failed_db, main_db from .event_queue import Events +from .indexers.indexer_config import INDEXER_TVDBV2, INDEXER_TVMAZE from .providers.generic_provider import GenericProvider from .providers.nzb.newznab import NewznabProvider from .providers.torrent.rss.rsstorrent import TorrentRssProvider @@ -290,7 +291,7 @@ def start(self, args): if self.console_logging and not os.path.isfile(app.CONFIG_FILE): sys.stdout.write('Unable to find %s, all settings will be default!\n' % app.CONFIG_FILE) - app.CFG = ConfigObj(app.CONFIG_FILE) + app.CFG = ConfigObj(app.CONFIG_FILE, encoding='UTF-8', default_encoding='UTF-8') # Initialize the config and our threads self.initialize(console_logging=self.console_logging) @@ -390,8 +391,10 @@ def initialize(self, console_logging=True): app.ENCRYPTION_SECRET = check_setting_str(app.CFG, 'General', 'encryption_secret', helpers.generate_cookie_secret(), censor_log='low') # git login info + app.GIT_AUTH_TYPE = check_setting_int(app.CFG, 'General', 'git_auth_type', 0) app.GIT_USERNAME = check_setting_str(app.CFG, 'General', 'git_username', '') app.GIT_PASSWORD = check_setting_str(app.CFG, 'General', 'git_password', '', censor_log='low') + app.GIT_TOKEN = check_setting_str(app.CFG, 'General', 'git_token', '', censor_log='low') app.DEVELOPER = bool(check_setting_int(app.CFG, 'General', 'developer', 0)) # debugging @@ -644,6 +647,7 @@ def initialize(self, console_logging=True): app.TORRENT_VERIFY_CERT = bool(check_setting_int(app.CFG, 'TORRENT', 'torrent_verify_cert', 0)) app.TORRENT_RPCURL = check_setting_str(app.CFG, 'TORRENT', 'torrent_rpcurl', 'transmission') app.TORRENT_AUTH_TYPE = check_setting_str(app.CFG, 'TORRENT', 'torrent_auth_type', '') + app.TORRENT_SEED_LOCATION = check_setting_str(app.CFG, 'TORRENT', 'torrent_seed_location', '') app.USE_KODI = bool(check_setting_int(app.CFG, 'KODI', 'use_kodi', 0)) app.KODI_ALWAYS_ON = bool(check_setting_int(app.CFG, 'KODI', 'kodi_always_on', 1)) @@ -773,7 +777,10 @@ def initialize(self, console_logging=True): app.TRAKT_USE_RECOMMENDED = bool(check_setting_int(app.CFG, 'Trakt', 'trakt_use_recommended', 0)) app.TRAKT_SYNC = bool(check_setting_int(app.CFG, 'Trakt', 'trakt_sync', 0)) app.TRAKT_SYNC_REMOVE = bool(check_setting_int(app.CFG, 'Trakt', 'trakt_sync_remove', 0)) - app.TRAKT_DEFAULT_INDEXER = check_setting_int(app.CFG, 'Trakt', 'trakt_default_indexer', 1) + app.TRAKT_DEFAULT_INDEXER = check_setting_int(app.CFG, 'Trakt', 'trakt_default_indexer', INDEXER_TVDBV2) + if app.TRAKT_DEFAULT_INDEXER == INDEXER_TVMAZE: + # Trakt doesn't support TVMAZE. Default to TVDB + app.TRAKT_DEFAULT_INDEXER = INDEXER_TVDBV2 app.TRAKT_TIMEOUT = check_setting_int(app.CFG, 'Trakt', 'trakt_timeout', 30) app.TRAKT_BLACKLIST_NAME = check_setting_str(app.CFG, 'Trakt', 'trakt_blacklist_name', '') @@ -911,6 +918,9 @@ def initialize(self, console_logging=True): app.RELEASES_IN_PP = [] app.GIT_REMOTE_BRANCHES = [] app.KODI_LIBRARY_CLEAN_PENDING = False + app.SELECTED_ROOT = check_setting_int(app.CFG, 'GUI', 'selected_root', -1) + app.BACKLOG_PERIOD = check_setting_str(app.CFG, 'GUI', 'backlog_period', 'all') + app.BACKLOG_STATUS = check_setting_str(app.CFG, 'GUI', 'backlog_status', 'all') # reconfigure the logger app_logger.reconfigure() @@ -1087,9 +1097,8 @@ def initialize(self, console_logging=True): threadName="BACKLOG", run_delay=update_interval) - search_intervals = {'15m': 15, '45m': 45, '90m': 90, '4h': 4 * 60, 'daily': 24 * 60} - if app.CHECK_PROPERS_INTERVAL in search_intervals: - update_interval = datetime.timedelta(minutes=search_intervals[app.CHECK_PROPERS_INTERVAL]) + if app.CHECK_PROPERS_INTERVAL in app.PROPERS_SEARCH_INTERVAL: + update_interval = datetime.timedelta(minutes=app.PROPERS_SEARCH_INTERVAL[app.CHECK_PROPERS_INTERVAL]) run_at = None else: update_interval = datetime.timedelta(hours=1) @@ -1324,8 +1333,10 @@ def save_config(): # For passwords you must include the word `password` in the item_name # and add `helpers.encrypt(ITEM_NAME, ENCRYPTION_VERSION)` in save_config() new_config['General'] = {} + new_config['General']['git_auth_type'] = app.GIT_AUTH_TYPE new_config['General']['git_username'] = app.GIT_USERNAME new_config['General']['git_password'] = helpers.encrypt(app.GIT_PASSWORD, app.ENCRYPTION_VERSION) + new_config['General']['git_token'] = helpers.encrypt(app.GIT_TOKEN, app.ENCRYPTION_VERSION) new_config['General']['git_reset'] = int(app.GIT_RESET) new_config['General']['git_reset_branches'] = ','.join(app.GIT_RESET_BRANCHES) new_config['General']['branch'] = app.BRANCH @@ -1466,6 +1477,9 @@ def save_config(): new_config['General']['display_all_seasons'] = int(app.DISPLAY_ALL_SEASONS) new_config['General']['news_last_read'] = app.NEWS_LAST_READ new_config['General']['broken_providers'] = helpers.get_broken_providers() or app.BROKEN_PROVIDERS + new_config['General']['selected_root'] = int(app.SELECTED_ROOT) + new_config['General']['backlog_period'] = app.BACKLOG_PERIOD + new_config['General']['backlog_status'] = app.BACKLOG_STATUS new_config['Blackhole'] = {} new_config['Blackhole']['nzb_dir'] = app.NZB_DIR @@ -1556,6 +1570,7 @@ def save_config(): new_config['TORRENT']['torrent_verify_cert'] = int(app.TORRENT_VERIFY_CERT) new_config['TORRENT']['torrent_rpcurl'] = app.TORRENT_RPCURL new_config['TORRENT']['torrent_auth_type'] = app.TORRENT_AUTH_TYPE + new_config['TORRENT']['torrent_seed_location'] = app.TORRENT_SEED_LOCATION new_config['KODI'] = {} new_config['KODI']['use_kodi'] = int(app.USE_KODI) diff --git a/medusa/app.py b/medusa/app.py index d968edec5e..ccfc6e5ef6 100644 --- a/medusa/app.py +++ b/medusa/app.py @@ -105,8 +105,10 @@ GIT_REMOTE = '' GIT_REMOTE_URL = '' CUR_COMMIT_BRANCH = '' +GIT_AUTH_TYPE = 0 GIT_USERNAME = None GIT_PASSWORD = None +GIT_TOKEN = None GIT_PATH = None DEVELOPER = False @@ -314,6 +316,8 @@ TORRENT_VERIFY_CERT = False TORRENT_RPCURL = 'transmission' TORRENT_AUTH_TYPE = 'none' +TORRENT_SEED_LOCATION = None +TORRENT_CHECKER_FREQUENCY = None USE_KODI = False KODI_ALWAYS_ON = True @@ -511,6 +515,9 @@ POSTER_SORTDIR = None FANART_BACKGROUND = None FANART_BACKGROUND_OPACITY = None +SELECTED_ROOT = None +BACKLOG_PERIOD = None +BACKLOG_STATUS = None USE_SUBTITLES = False SUBTITLES_LANGUAGES = [] @@ -528,6 +535,7 @@ SUBTITLES_EXTRA_SCRIPTS = [] SUBTITLES_PRE_SCRIPTS = [] SUBTITLES_KEEP_ONLY_WANTED = False +SUBTITLES_ERASE_CACHE = False ADDIC7ED_USER = None ADDIC7ED_PASS = None @@ -592,6 +600,17 @@ RECENTLY_DELETED = set() +RECENTLY_POSTPROCESSED = {} + RELEASES_IN_PP = [] PRIVACY_LEVEL = 'normal' + +PROPERS_SEARCH_INTERVAL = {'15m': 15, '45m': 45, '90m': 90, '4h': 4 * 60, 'daily': 24 * 60} + +PROPERS_INTERVAL_LABELS = {'daily': '24 hours', + '4h': '4 hours', + '90m': '90 mins', + '45m': '45 mins', + '15m': '15 mins' + } diff --git a/medusa/auto_post_processor.py b/medusa/auto_post_processor.py index bc5262732d..683592d9a1 100644 --- a/medusa/auto_post_processor.py +++ b/medusa/auto_post_processor.py @@ -49,11 +49,11 @@ def run(self, force=False): return if not (force or os.path.isabs(app.TV_DOWNLOAD_DIR)): - logger.error(u"Automatic post-processing attempted but directory is relative " - u"(and probably not what you really want to process): {folder}", + logger.error(u'Automatic post-processing attempted but directory is relative ' + u'(and probably not what you really want to process): {folder}', folder=app.TV_DOWNLOAD_DIR) return - process_tv.processDir(app.TV_DOWNLOAD_DIR, force=force) + process_tv.ProcessResult(app.TV_DOWNLOAD_DIR, app.PROCESS_METHOD).process(force=force) finally: self.amActive = False diff --git a/medusa/browser.py b/medusa/browser.py index bd508698b7..6c395665dd 100644 --- a/medusa/browser.py +++ b/medusa/browser.py @@ -1,29 +1,14 @@ # coding=utf-8 -# Author: Nic Wolfe -# - -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . + """Browser module.""" from __future__ import unicode_literals +import logging import os import string -from . import logger +log = logging.getLogger(__name__) +log.addHandler(logging.NullHandler()) # adapted from http://stackoverflow.com/questions/827371/is-there-a-way-to-list-all-the-available-drive-letters-in-python/827490 @@ -112,7 +97,7 @@ def list_folders(path, include_parent=False, include_files=False): try: file_list = get_file_list(path, include_files) except OSError as e: - logger.log('Unable to open %s: %s / %s' % (path, repr(e), str(e)), logger.WARNING) + log.warning('Unable to open %s: %s / %s', path, repr(e), str(e)) file_list = get_file_list(parent_path, include_files) file_list = sorted(file_list, diff --git a/medusa/clients/__init__.py b/medusa/clients/__init__.py index 32e136cc28..e69de29bb2 100644 --- a/medusa/clients/__init__.py +++ b/medusa/clients/__init__.py @@ -1,48 +0,0 @@ -# coding=utf-8 -# Author: Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . -"""Clients module.""" - -from __future__ import unicode_literals - - -_clients = [ - 'deluge', - 'deluged', - 'download_station', - 'mlnet', - 'qbittorrent', - 'rtorrent', - 'transmission', - 'utorrent', -] - - -def get_client_module(name): - """Import the client module for the given name.""" - return __import__('{prefix}.{name}_client'.format(prefix=__name__, name=name.lower()), fromlist=_clients) - - -def get_client_class(name): - """Return the client API class for the given name. - - :param name: - :type name: string - :return: - :rtype: class - """ - return get_client_module(name).api diff --git a/medusa/clients/nzb/__init__.py b/medusa/clients/nzb/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/medusa/nzbget.py b/medusa/clients/nzb/nzbget.py similarity index 52% rename from medusa/nzbget.py rename to medusa/clients/nzb/nzbget.py index 16f2efdc42..c9a42b1454 100644 --- a/medusa/nzbget.py +++ b/medusa/clients/nzb/nzbget.py @@ -1,60 +1,49 @@ # coding=utf-8 -# Author: Nic Wolfe - -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . from __future__ import unicode_literals import datetime +import logging from base64 import standard_b64encode +from medusa import app +from medusa.common import Quality +from medusa.helper.common import try_int +from medusa.logger.adapters.style import BraceAdapter + from six.moves.http_client import socket from six.moves.xmlrpc_client import ProtocolError, ServerProxy -from . import app, logger -from .common import Quality -from .helper.common import try_int + + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) def NZBConnection(url): """Method to connect to NZBget client :param url: nzb url to connect - :return: True if connected, else False """ nzbGetRPC = ServerProxy(url) try: if nzbGetRPC.writelog('INFO', 'Medusa connected to test connection.'): - logger.log('Successful connected to NZBget', logger.DEBUG) + log.debug('Successfully connected to NZBget') else: - logger.log('Successful connected to NZBget, but unable to send a message', logger.WARNING) + log.warning('Successfully connected to NZBget but unable to' + ' send a message') return True except socket.error: - logger.log( - 'Please check your NZBget host and port (if it is running). NZBget is not responding to this combination', - logger.WARNING) + log.warning('Please check your NZBget host and port (if it is' + ' running). NZBget is not responding to this combination') return False except ProtocolError as e: if e.errmsg == 'Unauthorized': - logger.log('NZBget username or password is incorrect.', logger.WARNING) + log.warning('NZBget username or password is incorrect.') else: - logger.log('Protocol Error: ' + e.errmsg, logger.ERROR) + log.error('Protocol Error: {msg}', {'msg': e.errmsg}) return False @@ -76,15 +65,16 @@ def testNZB(host, username, password, use_https): return NZBConnection(url) -def sendNZB(nzb, proper=False): # pylint: disable=too-many-locals, too-many-statements, too-many-branches, too-many-return-statements +def sendNZB(nzb, proper=False): """ Sends NZB to NZBGet client :param nzb: nzb object - :param proper: True if this is a Proper download, False if not. Defaults to False + :param proper: True if a Proper download, False if not. """ if app.NZBGET_HOST is None: - logger.log('No NZBget host found in configuration. Please configure it.', logger.WARNING) + log.warning('No NZBget host found in configuration.' + ' Please configure it.') return False addToTop = False @@ -132,17 +122,24 @@ def sendNZB(nzb, proper=False): # pylint: disable=too-many-locals, too-many-sta data = nzb.extraInfo[0] nzbcontent64 = standard_b64encode(data) - logger.log('Sending NZB to NZBget') - logger.log('URL: ' + url, logger.DEBUG) + log.info('Sending NZB to NZBget') + log.debug('URL: {}', url) try: # Find out if nzbget supports priority (Version 9.0+), # old versions beginning with a 0.x will use the old command nzbget_version_str = nzbGetRPC.version() - nzbget_version = try_int(nzbget_version_str[:nzbget_version_str.find('.')]) + nzbget_version = try_int( + nzbget_version_str[:nzbget_version_str.find('.')] + ) if nzbget_version == 0: if nzbcontent64: - nzbget_result = nzbGetRPC.append(nzb.name + '.nzb', category, addToTop, nzbcontent64) + nzbget_result = nzbGetRPC.append( + nzb.name + '.nzb', + category, + addToTop, + nzbcontent64 + ) else: if nzb.resultType == 'nzb': if not nzb.provider.login(): @@ -154,35 +151,53 @@ def sendNZB(nzb, proper=False): # pylint: disable=too-many-locals, too-many-sta nzbcontent64 = standard_b64encode(data) - nzbget_result = nzbGetRPC.append(nzb.name + '.nzb', category, addToTop, nzbcontent64) + nzbget_result = nzbGetRPC.append( + nzb.name + '.nzb', + category, + addToTop, + nzbcontent64 + ) elif nzbget_version == 12: if nzbcontent64 is not None: - nzbget_result = nzbGetRPC.append(nzb.name + '.nzb', category, nzbgetprio, False, - nzbcontent64, False, dupekey, dupescore, 'score') + nzbget_result = nzbGetRPC.append( + nzb.name + '.nzb', category, nzbgetprio, False, + nzbcontent64, False, dupekey, dupescore, 'score' + ) else: - nzbget_result = nzbGetRPC.appendurl(nzb.name + '.nzb', category, nzbgetprio, False, - nzb.url, False, dupekey, dupescore, 'score') - # v13+ has a new combined append method that accepts both (url and content) - # also the return value has changed from boolean to integer - # (Positive number representing NZBID of the queue item. 0 and negative numbers represent error codes.) + nzbget_result = nzbGetRPC.appendurl( + nzb.name + '.nzb', category, nzbgetprio, False, nzb.url, + False, dupekey, dupescore, 'score' + ) + # v13+ has a new combined append method that accepts both (url and + # content) also the return value has changed from boolean to integer + # (Positive number representing NZBID of the queue item. 0 and negative + # numbers represent error codes.) elif nzbget_version >= 13: - nzbget_result = nzbGetRPC.append(nzb.name + '.nzb', nzbcontent64 if nzbcontent64 is not None else nzb.url, - category, nzbgetprio, False, False, dupekey, dupescore, - 'score') > 0 + nzbget_result = nzbGetRPC.append( + nzb.name + '.nzb', + nzbcontent64 if nzbcontent64 is not None else nzb.url, + category, nzbgetprio, False, False, dupekey, dupescore, + 'score' + ) > 0 else: if nzbcontent64 is not None: - nzbget_result = nzbGetRPC.append(nzb.name + '.nzb', category, nzbgetprio, False, - nzbcontent64) + nzbget_result = nzbGetRPC.append( + nzb.name + '.nzb', category, nzbgetprio, False, + nzbcontent64 + ) else: - nzbget_result = nzbGetRPC.appendurl(nzb.name + '.nzb', category, nzbgetprio, False, - nzb.url) + nzbget_result = nzbGetRPC.appendurl( + nzb.name + '.nzb', category, nzbgetprio, False, nzb.url + ) if nzbget_result: - logger.log('NZB sent to NZBget successfully', logger.DEBUG) + log.debug('NZB sent to NZBget successfully') return True else: - logger.log('NZBget could not add {} to the queue'.format(nzb.name + '.nzb'), logger.WARNING) + log.warning('NZBget could not add {name}.nzb to the queue', + {'name': nzb.name}) return False except Exception: - logger.log('Connect Error to NZBget: could not add {} to the queue'.format(nzb.name + '.nzb'), logger.WARNING) + log.warning('Connect Error to NZBget: could not add {file}.nzb to the' + ' queue', {'name': nzb.name}) return False diff --git a/medusa/clients/nzb/sab.py b/medusa/clients/nzb/sab.py new file mode 100644 index 0000000000..14d19d9a63 --- /dev/null +++ b/medusa/clients/nzb/sab.py @@ -0,0 +1,141 @@ +# coding=utf-8 + +""" +NZB Client API for SABnzbd. + +https://sabnzbd.org/ +https://github.com/sabnzbd/sabnzbd +""" + +from __future__ import unicode_literals + +import datetime +import logging + +from medusa import app +from medusa.logger.adapters.style import BraceAdapter + +import requests +from requests.compat import urljoin + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) + +session = requests.Session() + + +def send_nzb(nzb): + """ + Sends an NZB to SABnzbd via the API. + + :param nzb: The NZBSearchResult object to send to SAB + """ + session.params.update({ + 'output': 'json', + 'ma_username': app.SAB_USERNAME, + 'ma_password': app.SAB_PASSWORD, + 'apikey': app.SAB_APIKEY, + }) + + category = app.SAB_CATEGORY + if nzb.show.is_anime: + category = app.SAB_CATEGORY_ANIME + + # if it aired more than 7 days ago, override with the backlog category IDs + for cur_ep in nzb.episodes: + if datetime.date.today() - cur_ep.airdate > datetime.timedelta(days=7): + category = app.SAB_CATEGORY_ANIME_BACKLOG if nzb.show.is_anime else app.SAB_CATEGORY_BACKLOG + + # set up a dict with the URL params in it + params = { + 'cat': category, + 'mode': 'addurl', + 'name': nzb.url, + } + + if nzb.priority: + params['priority'] = 2 if app.SAB_FORCED else 1 + + log.info('Sending NZB to SABnzbd') + url = urljoin(app.SAB_HOST, 'api') + + response = session.get(url, params=params, verify=False) + + try: + data = response.json() + except ValueError: + log.info('Error connecting to sab, no data returned') + else: + log.debug('Result text from SAB: {0}', data) + result, text = _check_sab_response(data) + del text + return result + + +def _check_sab_response(jdata): + """ + Check response from SAB + + :param jdata: Response from requests api call + :return: a list of (Boolean, string) which is True if SAB is not reporting an error + """ + error = jdata.get('error') + + if error == 'API Key Incorrect': + log.warning("Sabnzbd's API key is incorrect") + elif error: + log.error('Sabnzbd encountered an error: {0}', error) + + return not error, error or jdata + + +def get_sab_access_method(host=None): + """ + Find out how we should connect to SAB + + :param host: hostname where SAB lives + :return: (boolean, string) with True if method was successful + """ + session.params.update({ + 'output': 'json', + 'ma_username': app.SAB_USERNAME, + 'ma_password': app.SAB_PASSWORD, + 'apikey': app.SAB_APIKEY, + }) + url = urljoin(host, 'api') + response = session.get(url, params={'mode': 'auth'}, verify=False) + + try: + data = response.json() + except ValueError: + return False, response + else: + return _check_sab_response(data) + + +def test_authentication(host=None, username=None, password=None, apikey=None): + """ + Sends a simple API request to SAB to determine if the given connection information is connect + + :param host: The host where SAB is running (incl port) + :param username: The username to use for the HTTP request + :param password: The password to use for the HTTP request + :param apikey: The API key to provide to SAB + :return: A tuple containing the success boolean and a message + """ + session.params.update({ + 'ma_username': username, + 'ma_password': password, + 'apikey': apikey, + }) + url = urljoin(host, 'api') + + response = session.get(url, params={'mode': 'queue'}, verify=False) + try: + data = response.json() + except ValueError: + return False, response + else: + # check the result and determine if it's good or not + result, sab_text = _check_sab_response(data) + return result, 'success' if result else sab_text diff --git a/medusa/helper/quality.py b/medusa/clients/torrent/__init__.py similarity index 51% rename from medusa/helper/quality.py rename to medusa/clients/torrent/__init__.py index ea5a0b387b..32e136cc28 100644 --- a/medusa/helper/quality.py +++ b/medusa/clients/torrent/__init__.py @@ -1,7 +1,7 @@ # coding=utf-8 -# This file is part of Medusa. +# Author: Nic Wolfe # - +# This file is part of Medusa. # # Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -15,20 +15,34 @@ # # You should have received a copy of the GNU General Public License # along with Medusa. If not, see . +"""Clients module.""" -from ..common import Quality, qualityPresetStrings +from __future__ import unicode_literals -def get_quality_string(quality): - """ - :param quality: The quality to convert into a string - :return: The string representation of the provided quality - """ +_clients = [ + 'deluge', + 'deluged', + 'download_station', + 'mlnet', + 'qbittorrent', + 'rtorrent', + 'transmission', + 'utorrent', +] + - if quality in qualityPresetStrings: - return qualityPresetStrings[quality] +def get_client_module(name): + """Import the client module for the given name.""" + return __import__('{prefix}.{name}_client'.format(prefix=__name__, name=name.lower()), fromlist=_clients) - if quality in Quality.qualityStrings: - return Quality.qualityStrings[quality] - return 'Custom' +def get_client_class(name): + """Return the client API class for the given name. + + :param name: + :type name: string + :return: + :rtype: class + """ + return get_client_module(name).api diff --git a/medusa/clients/deluge_client.py b/medusa/clients/torrent/deluge_client.py similarity index 53% rename from medusa/clients/deluge_client.py rename to medusa/clients/torrent/deluge_client.py index 7e872908b2..5a9fe53553 100644 --- a/medusa/clients/deluge_client.py +++ b/medusa/clients/torrent/deluge_client.py @@ -1,20 +1,5 @@ # coding=utf-8 -# Author: Mr_Orange -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . + """Deluge Web Client.""" from __future__ import unicode_literals @@ -23,11 +8,95 @@ import logging from base64 import b64encode +from medusa import app + +from medusa.clients.torrent.generic import GenericClient + +from medusa.helpers import ( + is_already_processed_media, + is_info_hash_in_history, + is_info_hash_processed, +) + +from medusa.logger.adapters.style import BraceAdapter + from requests.exceptions import RequestException -from .generic import GenericClient -from .. import app -logger = logging.getLogger(__name__) +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) + + +def read_torrent_status(torrent_data): + """Read torrent status from Deluge and Deluged client.""" + found_torrents = False + info_hash_to_remove = [] + for torrent in torrent_data.items(): + info_hash = str(torrent[0]) + details = torrent[1] + if not is_info_hash_in_history(info_hash): + continue + found_torrents = True + + to_remove = False + for i in details['files']: + # Check if media was processed + # OR check hash in case of RARed torrents + if is_already_processed_media(i['path']) or is_info_hash_processed(info_hash): + to_remove = True + + # Don't need to check status if we are not going to remove it. + if not to_remove: + log.info('Torrent not yet post-processed. Skipping: {torrent}', + {'torrent': details['name']}) + continue + + status = 'busy' + if details['is_finished']: + status = 'completed' + elif details['is_seed']: + status = 'seeding' + elif details['paused']: + status = 'paused' + else: + status = details['state'] + + if status == 'completed': + log.info( + 'Torrent completed and reached minimum' + ' ratio: [{ratio:.3f}/{ratio_limit:.3f}] or' + ' seed idle limit' + ' Removing it: [{name}]', + ratio=details['ratio'], + ratio_limit=torrent['stop_ratio'], + name=torrent['name'] + ) + info_hash_to_remove.append(info_hash) + elif status == 'seeding': + if float(details['ratio']) < float(details['stop_ratio']): + log.info( + 'Torrent did not reach minimum' + ' ratio: [{ratio:.3f}/{ratio_limit:.3f}].' + ' Keeping it: [{name}]', + ratio=torrent['ratio'], + ratio_limit=torrent['stop_ratio'], + name=torrent['name'] + ) + else: + log.info( + 'Torrent completed and reached minimum ratio but it' + ' was force started again. Current' + ' ratio: [{ratio:.3f}/{ratio_limit:.3f}].' + ' Keeping it: [{name}]', + ratio=torrent['uploadRatio'], + ratio_limit=torrent['seedRatioLimit'], + name=torrent['name'] + ) + else: + log.info('Torrent is {status}. Keeping it: [{name}]', status=status, name=details['name']) + + if not found_torrents: + log.info('No torrents found that were snatched by Medusa') + return info_hash_to_remove class DelugeAPI(GenericClient): @@ -71,8 +140,11 @@ def _get_auth(self): }) try: - self.response = self.session.post(self.url, data=post_data.encode('utf-8'), - verify=app.TORRENT_VERIFY_CERT) + self.response = self.session.post( + self.url, + data=post_data.encode('utf-8'), + verify=app.TORRENT_VERIFY_CERT + ) except RequestException: return None @@ -85,14 +157,18 @@ def _get_auth(self): 'id': 11, }) try: - self.response = self.session.post(self.url, data=post_data.encode('utf-8'), - verify=app.TORRENT_VERIFY_CERT) + self.response = self.session.post( + self.url, + data=post_data.encode('utf-8'), + verify=app.TORRENT_VERIFY_CERT + ) except RequestException: return None hosts = self.response.json()['result'] if not hosts: - logger.error('{name}: WebUI does not contain daemons', name=self.name) + log.error('{name}: WebUI does not contain daemons', + {'name': self.name}) return None post_data = json.dumps({ @@ -104,8 +180,11 @@ def _get_auth(self): }) try: - self.response = self.session.post(self.url, data=post_data.encode('utf-8'), - verify=app.TORRENT_VERIFY_CERT) + self.response = self.session.post( + self.url, + data=post_data.encode('utf-8'), + verify=app.TORRENT_VERIFY_CERT + ) except RequestException: return None @@ -116,14 +195,18 @@ def _get_auth(self): }) try: - self.response = self.session.post(self.url, data=post_data.encode('utf-8'), - verify=app.TORRENT_VERIFY_CERT) + self.response = self.session.post( + self.url, + data=post_data.encode('utf-8'), + verify=app.TORRENT_VERIFY_CERT + ) except RequestException: return None connected = self.response.json()['result'] if not connected: - logger.error('{name}: WebUI could not connect to daemon', name=self.name) + log.error('{name}: WebUI could not connect to daemon', + {'name': self.name}) return None return self.auth @@ -163,6 +246,28 @@ def _add_torrent_file(self, result): return self.response.json()['result'] + def move_torrent(self, info_hash): + """Set new torrent location given info_hash. + + :param info_hash: + :type info_hash: string + :return + :rtype: bool + """ + if not app.TORRENT_SEED_LOCATION or not info_hash: + return + + post_data = json.dumps({ + 'method': 'core.move_storage', + 'params': [ + [info_hash], + app.TORRENT_SEED_LOCATION, + ], + 'id': 72, + }) + + return not self.response.json()['error'] if self._request(method='post', data=post_data) else False + def remove_torrent(self, info_hash): """Remove torrent from client using given info_hash. @@ -189,7 +294,9 @@ def _set_torrent_label(self, result): if result.show.is_anime: label = app.TORRENT_LABEL_ANIME.lower() if ' ' in label: - logger.error('{name}: Invalid label. Label must not contain a space', name=self.name) + log.error('{name}: Invalid label. Label must not contain a space', + {'name': self.name}) + return False if label: @@ -205,8 +312,9 @@ def _set_torrent_label(self, result): if labels is not None: if label not in labels: - logger.debug('{name}: {label} label does not exist in Deluge we must add it', name=self.name, - label=label) + log.debug('{name}: {label} label does not exist in Deluge' + ' we must add it', + {'name': self.name, 'label': label}) post_data = json.dumps({ 'method': 'label.add', 'params': [ @@ -216,7 +324,8 @@ def _set_torrent_label(self, result): }) self._request(method='post', data=post_data) - logger.debug('{name}: {label} label added to Deluge', name=self.name, label=label) + log.debug('{name}: {label} label added to Deluge', + {'name': self.name, 'label': label}) # add label to torrent post_data = json.dumps({ @@ -229,9 +338,12 @@ def _set_torrent_label(self, result): }) self._request(method='post', data=post_data) - logger.debug('{name}: {label} label added to torrent', name=self.name, label=label) + log.debug('{name}: {label} label added to torrent', + {'name': self.name, 'label': label}) + else: - logger.debug('{name}: label plugin not detected', name=self.name) + log.debug('{name}: label plugin not detected', + {'name': self.name}) return False return not self.response.json()['error'] @@ -255,7 +367,8 @@ def _set_torrent_ratio(self, result): self._request(method='post', data=post_data) - # Return false if we couldn't enable setting set_torrent_stop_at_ratio. No reason to set ratio. + # if unable to set_torrent_stop_at_ratio return False + # No reason to set ratio. if self.response.json()['error']: return False @@ -330,5 +443,35 @@ def _set_torrent_pause(self, result): return True + def remove_ratio_reached(self): + """Remove all Medusa torrents that ratio was reached. + + It loops in all hashes returned from client and check if it is in the snatch history + if its then it checks if we already processed media from the torrent (episode status `Downloaded`) + If is a RARed torrent then we don't have a media file so we check if that hash is from an + episode that has a `Downloaded` status + """ + post_data = json.dumps({ + 'method': 'core.get_torrents_status', + 'params': [ + {}, + ['name', 'hash', 'progress', 'state', 'ratio', 'stop_ratio', + 'is_seed', 'is_finished', 'paused', 'files'], + ], + 'id': 72, + }) + + log.info('Checking Deluge torrent status.') + if self._request(method='post', data=post_data): + if self.response.json()['error']: + log.info('Error while fetching torrents status') + return + else: + torrent_data = self.response.json()['result'] + self.read_torrent_status(torrent_data) + # Commented for now + # for info_hash in to_remove: + # self.remove_torrent(info_hash) + api = DelugeAPI diff --git a/medusa/clients/deluged_client.py b/medusa/clients/torrent/deluged_client.py similarity index 71% rename from medusa/clients/deluged_client.py rename to medusa/clients/torrent/deluged_client.py index 2511a3c2af..bfb12aedf3 100644 --- a/medusa/clients/deluged_client.py +++ b/medusa/clients/torrent/deluged_client.py @@ -1,22 +1,27 @@ # coding=utf-8 -# Author: Paul Wollaston -# Contributions: Luke Mullan -# -# This client script allows connection to Deluge Daemon directly, completely -# circumventing the requirement to use the WebUI. -"""Deluge Daemon Client.""" + +""" +Deluge Daemon Client. + +This client script allows connection to Deluge Daemon directly, completely +circumventing the requirement to use the WebUI. +""" from __future__ import unicode_literals import logging from base64 import b64encode +from medusa import app +from medusa.clients.torrent.deluge_client import read_torrent_status +from medusa.clients.torrent.generic import GenericClient +from medusa.logger.adapters.style import BraceAdapter + from synchronousdeluge import DelugeClient -from .generic import GenericClient -from .. import app -logger = logging.getLogger(__name__) +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class DelugeDAPI(GenericClient): @@ -91,7 +96,19 @@ def remove_torrent(self, info_hash): :return :rtype: bool """ - return self.drpc.remove_torrent_ratio(info_hash, True) + return self.drpc.remove_torrent_data(info_hash) + + def move_torrent(self, info_hash): + """Set new torrent location given info_hash. + + :param info_hash: + :type info_hash: string + :return + :rtype: bool + """ + if not app.TORRENT_SEED_LOCATION or not info_hash: + return + return self.drpc.move_storage(info_hash, app.TORRENT_SEED_LOCATION) def _set_torrent_label(self, result): @@ -99,7 +116,8 @@ def _set_torrent_label(self, result): if result.show.is_anime: label = app.TORRENT_LABEL_ANIME.lower() if ' ' in label: - logger.error('{name}: Invalid label. Label must not contain a space', name=self.name) + log.error('{name}: Invalid label. Label must not contain a space', + {'name': self.name}) return False return self.drpc.set_torrent_label(result.hash, label) if label else True @@ -128,6 +146,25 @@ def test_authentication(self): else: return False, 'Error: Unable to Authenticate! Please check your config!' + def remove_ratio_reached(self): + """Remove all Medusa torrents that ratio was reached. + + It loops in all hashes returned from client and check if it is in the snatch history + if its then it checks if we already processed media from the torrent (episode status `Downloaded`) + If is a RARed torrent then we don't have a media file so we check if that hash is from an + episode that has a `Downloaded` status + """ + log.info('Checking DelugeD torrent status.') + + if not self.connect(): + return + + torrent_data = self.drpc.get_all_torrents() + read_torrent_status(torrent_data) + # Commented for now + # for info_hash in to_remove: + # self.remove_torrent(info_hash) + class DelugeRPC(object): """Deluge RPC client class.""" @@ -174,6 +211,46 @@ def test(self): else: return True + def remove_torrent_data(self, torrent_id): + """Remove torrent from client using given info_hash. + + :param torrent_id: + :type torrent_id: str + :return: + :rtype: str or bool + """ + try: + self.connect() + self.client.core.remove_torrent(torrent_id, True).get() + except Exception: + return False + else: + return True + finally: + if self.client: + self.disconnect() + + def move_storage(self, torrent_id, location): + """Move torrent to new location and return torrent id/hash. + + :param torrent_id: + :type torrent_id: str + :param location: + :type location: str + :return: + :rtype: str or bool + """ + try: + self.connect() + self.client.core.move_storage(torrent_id, location).get() + except Exception: + return False + else: + return True + finally: + if self.client: + self.disconnect() + def add_torrent_magnet(self, torrent, options, info_hash): """Add Torrent magnet and return torrent id/hash. @@ -339,8 +416,28 @@ def disconnect(self): def _check_torrent(self, info_hash): torrent_id = self.client.core.get_torrent_status(info_hash, {}).get() if torrent_id['hash']: - logger.debug('DelugeD: Torrent already exists in Deluge') + log.debug('DelugeD: Torrent already exists in Deluge') return info_hash return False + def get_all_torrents(self): + """Get all torrents in client. + + :return: + :rtype: bool + """ + try: + self.connect() + torrents_data = self.client.core.get_torrents_status({}, ('name', 'hash', 'progress', 'state', + 'ratio', 'stop_ratio', 'is_seed', 'is_finished', + 'paused', 'files')).get() + except Exception: + return False + else: + return torrents_data + finally: + if self.client: + self.disconnect() + + api = DelugeDAPI diff --git a/medusa/clients/download_station_client.py b/medusa/clients/torrent/download_station_client.py similarity index 81% rename from medusa/clients/download_station_client.py rename to medusa/clients/torrent/download_station_client.py index 2280766c6d..941ad24eee 100644 --- a/medusa/clients/download_station_client.py +++ b/medusa/clients/torrent/download_station_client.py @@ -1,26 +1,11 @@ # coding=utf-8 -# Authors: -# Pedro Jose Pereira Vieito (Twitter: @pvieito) -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . -# - -# Uses the Synology Download Station API: -# http://download.synology.com/download/Document/DeveloperGuide/Synology_Download_Station_Web_API.pdf -"""Synology Download Station Client.""" + +""" +Synology Download Station Client. + +Uses the Synology Download Station API: +http://download.synology.com/download/Document/DeveloperGuide/Synology_Download_Station_Web_API.pdf +""" from __future__ import unicode_literals @@ -28,13 +13,16 @@ import os import re +from medusa import app +from medusa.clients.torrent.generic import GenericClient +from medusa.helpers import handle_requests_exception +from medusa.logger.adapters.style import BraceAdapter + from requests.compat import urljoin from requests.exceptions import RequestException -from .generic import GenericClient -from .. import app -from ..helpers import handle_requests_exception -logger = logging.getLogger(__name__) +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class DownloadStationAPI(GenericClient): @@ -86,7 +74,7 @@ def _check_response(self): self.auth = jdata.get('success') if not self.auth: error_code = jdata.get('error', {}).get('code') - logger.info(self.error_map.get(error_code, jdata)) + log.info(self.error_map.get(error_code, jdata)) self.session.cookies.clear() return self.auth @@ -189,7 +177,8 @@ def _check_destination(self): jdata = self.response.json() version_string = jdata.get('data', {}).get('version_string') if not version_string: - logger.warning('Could not get the version string from DSM: {response}', response=jdata) + log.warning('Could not get the version string from DSM:' + ' {response}', {'response': jdata}) return False if version_string.startswith('DSM 6'): @@ -220,12 +209,15 @@ def _check_destination(self): if destination and os.path.isabs(destination): torrent_path = re.sub(r'^/volume\d/', '', destination).lstrip('/') else: - logger.info('Default destination could not be determined for DSM6: {response}', - response=jdata) + log.info('Default destination could not be' + ' determined for DSM6: {response}', + {'response': jdata}) + return False if destination or torrent_path: - logger.info('Destination is now {path}', path=torrent_path or destination) + log.info('Destination is now {path}', + {'path': torrent_path or destination}) self.checked_destination = True self.destination = torrent_path diff --git a/medusa/clients/generic.py b/medusa/clients/torrent/generic.py similarity index 71% rename from medusa/clients/generic.py rename to medusa/clients/torrent/generic.py index e75a5e6fb2..138502f1a9 100644 --- a/medusa/clients/generic.py +++ b/medusa/clients/torrent/generic.py @@ -3,18 +3,27 @@ from __future__ import unicode_literals +import logging import re import time import traceback from base64 import b16encode, b32decode + from hashlib import sha1 from bencode import bdecode, bencode from bencode.BTL import BTFailure + +from medusa import app, db, helpers +from medusa.helper.common import http_code_description +from medusa.logger.adapters.style import BraceAdapter + import requests from six.moves.http_cookiejar import CookieJar -from .. import app, db, helpers, logger -from ..helper.common import http_code_description + + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class GenericClient(object): @@ -51,50 +60,62 @@ def _request(self, method='get', params=None, data=None, files=None, cookies=Non self.last_time = time.time() self._get_auth() - data_str = str(data) - logger.log('{name}: Requested a {method} connection to {url} with Params: {params} Data: {data}{etc}'.format - (name=self.name, method=method.upper(), url=self.url, - params=params, data=data_str[0:99], - etc='...' if len(data_str) > 99 else ''), logger.DEBUG) + text = str(data) + log.debug( + '{name}: Requested a {method} connection to {url} with' + ' params: {params} Data: {data}', { + 'name': self.name, + 'method': method.upper(), + 'url': self.url, + 'params': params, + 'data': text[0:99] + '...' if len(text) > 102 else text + } + ) if not self.auth: - logger.log('{name}: Authentication Failed'.format(name=self.name), logger.WARNING) + log.warning('{name}: Authentication Failed', {'name': self.name}) return False try: self.response = self.session.__getattribute__(method)(self.url, params=params, data=data, files=files, cookies=cookies, timeout=120, verify=False) except requests.exceptions.ConnectionError as msg: - logger.log('{name}: Unable to connect {error}'.format - (name=self.name, error=msg), logger.ERROR) + log.error('{name}: Unable to connect {error}', + {'name': self.name, 'error': msg}) return False except (requests.exceptions.MissingSchema, requests.exceptions.InvalidURL): - logger.log('{name}: Invalid Host'.format(name=self.name), logger.ERROR) + log.error('{name}: Invalid Host', {'name': self.name}) return False except requests.exceptions.HTTPError as msg: - logger.log('{name}: Invalid HTTP Request {error}'.format(name=self.name, error=msg), logger.ERROR) + log.error('{name}: Invalid HTTP Request {error}', + {'name': self.name, 'error': msg}) return False except requests.exceptions.Timeout as msg: - logger.log('{name}: Connection Timeout {error}'.format(name=self.name, error=msg), logger.WARNING) + log.warning('{name}: Connection Timeout {error}', + {'name': self.name, 'error': msg}) return False except Exception as msg: - logger.log('{name}: Unknown exception raised when send torrent to {name} : {error}'.format - (name=self.name, error=msg), logger.ERROR) + log.error('{name}: Unknown exception raised when send torrent to' + ' {name} : {error}', {'name': self.name, 'error': msg}) return False if self.response.status_code == 401: - logger.log('{name}: Invalid Username or Password, check your config'.format - (name=self.name), logger.ERROR) + log.error('{name}: Invalid Username or Password,' + ' check your config', {'name': self.name}) return False code_description = http_code_description(self.response.status_code) if code_description is not None: - logger.log('{name}: {code}'.format(name=self.name, code=code_description), logger.INFO) + log.info('{name}: {code}', + {'name': self.name, 'code': code_description}) return False - logger.log('{name}: Response to {method} request is {response}'.format - (name=self.name, method=method.upper(), response=self.response.text), logger.DEBUG) + log.debug('{name}: Response to {method} request is {response}', { + 'name': self.name, + 'method': method.upper(), + 'response': self.response.text + }) return True @@ -192,8 +213,10 @@ def _get_info_hash(result): info = torrent_bdecode['info'] result.hash = sha1(bencode(info)).hexdigest() except (BTFailure, KeyError): - logger.log('Unable to bdecode torrent. Invalid torrent: {0}. Deleting cached result if exists'.format - (result.name), logger.WARNING) + log.warning( + 'Unable to bdecode torrent. Invalid torrent: {name}. ' + 'Deleting cached result if exists', {'name': result.name} + ) cache_db_con = db.DBConnection('cache.db') cache_db_con.action( b'DELETE FROM [{provider}] ' @@ -201,7 +224,7 @@ def _get_info_hash(result): [result.name] ) except Exception: - logger.log(traceback.format_exc(), logger.ERROR) + log.error(traceback.format_exc()) return result @@ -215,11 +238,11 @@ def send_torrent(self, result): """ r_code = False - logger.log('Calling {name} Client'.format(name=self.name), logger.DEBUG) + log.debug('Calling {name} Client', {'name': self.name}) if not self.auth: if not self._get_auth(): - logger.log('{name}: Authentication Failed'.format(name=self.name), logger.WARNING) + log.warning('{name}: Authentication Failed', {'name': self.name}) return r_code try: @@ -238,31 +261,40 @@ def send_torrent(self, result): r_code = self._add_torrent_file(result) if not r_code: - logger.log('{name}: Unable to send Torrent'.format(name=self.name), logger.WARNING) + log.warning('{name}: Unable to send Torrent', + {'name': self.name}) return False if not self._set_torrent_pause(result): - logger.log('{name}: Unable to set the pause for Torrent'.format(name=self.name), logger.ERROR) + log.error('{name}: Unable to set the pause for Torrent', + {'name': self.name}) if not self._set_torrent_label(result): - logger.log('{name}: Unable to set the label for Torrent'.format(name=self.name), logger.ERROR) + log.error('{name}: Unable to set the label for Torrent', + {'name': self.name}) if not self._set_torrent_ratio(result): - logger.log('{name}: Unable to set the ratio for Torrent'.format(name=self.name), logger.ERROR) + log.error('{name}: Unable to set the ratio for Torrent', + {'name': self.name}) if not self._set_torrent_seed_time(result): - logger.log('{name}: Unable to set the seed time for Torrent'.format(name=self.name), logger.ERROR) + log.error('{name}: Unable to set the seed time for Torrent', + {'name': self.name}) if not self._set_torrent_path(result): - logger.log('{name}: Unable to set the path for Torrent'.format(name=self.name), logger.ERROR) + log.error('{name}: Unable to set the path for Torrent', + {'name': self.name}) if result.priority != 0 and not self._set_torrent_priority(result): - logger.log('{name}: Unable to set priority for Torrent'.format(name=self.name), logger.ERROR) + log.error('{name}: Unable to set priority for Torrent', + {'name': self.name}) except Exception as msg: - logger.log('{name}: Failed Sending Torrent'.format(name=self.name), logger.ERROR) - logger.log('{name}: Exception raised when sending torrent: {result}. Error: {error}'.format - (name=self.name, result=result, error=msg), logger.DEBUG) + log.error('{name}: Failed Sending Torrent', + {'name': self.name}) + log.debug('{name}: Exception raised when sending torrent {result}.' + ' Error: {error}', + {'name': self.name, 'result': result, 'error': msg}) return r_code return r_code diff --git a/medusa/clients/mlnet_client.py b/medusa/clients/torrent/mlnet_client.py similarity index 66% rename from medusa/clients/mlnet_client.py rename to medusa/clients/torrent/mlnet_client.py index ce2f4e6b10..f3c6dee5ca 100644 --- a/medusa/clients/mlnet_client.py +++ b/medusa/clients/torrent/mlnet_client.py @@ -1,26 +1,10 @@ # coding=utf-8 -# Author: kounch -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . """MLDonkey Client.""" from __future__ import unicode_literals -from .generic import GenericClient +from medusa.clients.torrent.generic import GenericClient class MLNetAPI(GenericClient): diff --git a/medusa/clients/qbittorrent_client.py b/medusa/clients/torrent/qbittorrent_client.py similarity index 84% rename from medusa/clients/qbittorrent_client.py rename to medusa/clients/torrent/qbittorrent_client.py index 3de46d0d87..e268a85b2d 100644 --- a/medusa/clients/qbittorrent_client.py +++ b/medusa/clients/torrent/qbittorrent_client.py @@ -1,28 +1,13 @@ # coding=utf-8 -# Author: Mr_Orange -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . """qBittorrent Client.""" from __future__ import unicode_literals +from medusa import app +from medusa.clients.torrent.generic import GenericClient + from requests.auth import HTTPDigestAuth -from .generic import GenericClient -from .. import app class QBittorrentAPI(GenericClient): diff --git a/medusa/clients/rtorrent_client.py b/medusa/clients/torrent/rtorrent_client.py similarity index 73% rename from medusa/clients/rtorrent_client.py rename to medusa/clients/torrent/rtorrent_client.py index 0fe8f41092..e159786270 100644 --- a/medusa/clients/rtorrent_client.py +++ b/medusa/clients/torrent/rtorrent_client.py @@ -1,40 +1,20 @@ # coding=utf-8 -# Author: jkaberg -# - -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . - - -# pylint: disable=line-too-long - -# based on fuzemans work -# https://github.com/RuudBurger/CouchPotatoServer/blob/develop/couchpotato/core/downloaders/rtorrent/main.py + """rTorrent Client.""" from __future__ import unicode_literals import logging +from medusa import app +from medusa.clients.torrent.generic import GenericClient +from medusa.logger.adapters.style import BraceAdapter + from rtorrent import RTorrent -from .generic import GenericClient -from .. import app -logger = logging.getLogger(__name__) +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class RTorrentAPI(GenericClient): @@ -97,8 +77,9 @@ def _add_torrent_uri(self, result): # Start torrent torrent.start() - except Exception as error: - logger.warning('Error while sending torrent: {error!r}', error=error) + except Exception as msg: + log.warning('Error while sending torrent: {error!r}', + {'error': msg}) return False else: return True @@ -129,7 +110,8 @@ def _add_torrent_file(self, result): # Start torrent torrent.start() except Exception as msg: - logger.warning('Error while sending torrent: {error!r}', error=msg) + log.warning('Error while sending torrent: {error!r}', + {'error': msg}) return False else: return True diff --git a/medusa/clients/transmission_client.py b/medusa/clients/torrent/transmission_client.py similarity index 61% rename from medusa/clients/transmission_client.py rename to medusa/clients/torrent/transmission_client.py index 00736e4ff5..41ffcf744e 100644 --- a/medusa/clients/transmission_client.py +++ b/medusa/clients/torrent/transmission_client.py @@ -1,21 +1,5 @@ # coding=utf-8 -# Author: Mr_Orange -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . """Transmission Client.""" from __future__ import unicode_literals @@ -26,14 +10,20 @@ import re from base64 import b64encode -from requests.compat import urljoin +from medusa import app +from medusa.clients.torrent.generic import GenericClient +from medusa.helpers import ( + is_already_processed_media, + is_info_hash_in_history, + is_info_hash_processed, +) +from medusa.logger.adapters.style import BraceAdapter -from .generic import GenericClient -from .. import app -from ..helpers import is_already_processed_media, is_info_hash_in_history, is_info_hash_processed +from requests.compat import urljoin -logger = logging.getLogger(__name__) +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class TransmissionAPI(GenericClient): @@ -214,6 +204,32 @@ def remove_torrent(self, info_hash): return self.response.json()['result'] == 'success' + def move_torrent(self, info_hash): + """Set new torrent location given info_hash. + + :param info_hash: + :type info_hash: string + :return + :rtype: bool + """ + if not app.TORRENT_SEED_LOCATION or not info_hash: + return + + arguments = { + 'ids': [info_hash], + 'location': app.TORRENT_SEED_LOCATION, + 'move': 'true' + } + + post_data = json.dumps({ + 'arguments': arguments, + 'method': 'torrent-set-location', + }) + + self._request(method='post', data=post_data) + + return self.response.json()['result'] == 'success' + def remove_ratio_reached(self): """Remove all Medusa torrents that ratio was reached. @@ -229,28 +245,33 @@ def remove_ratio_reached(self): 4 = Downloading 5 = Queued to seed 6 = Seeding + + isFinished = whether seeding finished (based on idle timeout or seed ratio) + IsStalled = Based on Tranmission setting "Transfer is stalled when inactive for" """ - logger.info('Checking Transmission torrent status.') + log.info('Checking Transmission torrent status.') return_params = { - 'fields': ['id', 'name', 'hashString', 'percentDone', 'status', 'eta', 'isStalled', - 'isFinished', 'downloadDir', 'uploadRatio', 'secondsSeeding', 'seedIdleLimit', 'files'] + 'fields': ['name', 'hashString', 'percentDone', 'status', + 'isStalled', 'errorString', 'seedRatioLimit', + 'isFinished', 'uploadRatio', 'seedIdleLimit', 'files'] } post_data = json.dumps({'arguments': return_params, 'method': 'torrent-get'}) if not self._request(method='post', data=post_data): - logger.debug('Could not connect to Transmission. Check logs') + log.debug('Could not connect to Transmission. Check logs') return try: returned_data = json.loads(self.response.content) except ValueError: - logger.warning('Unexpected data received from Transmission: {resp}', resp=self.response.content) + log.warning('Unexpected data received from Transmission: {resp}', + {'resp': self.response.content}) return if not returned_data['result'] == 'success': - logger.debug('Nothing in queue or error') + log.debug('Nothing in queue or error') return found_torrents = False @@ -263,37 +284,81 @@ def remove_ratio_reached(self): to_remove = False for i in torrent['files']: - # Check if media was processed OR check hash in case of RARed torrents + # Check if media was processed + # OR check hash in case of RARed torrents if is_already_processed_media(i['name']) or is_info_hash_processed(str(torrent['hashString'])): to_remove = True # Don't need to check status if we are not going to remove it. if not to_remove: - logger.info("Torrent wasn't post-processed yet. Skipping: {torrent_name}", - torrent_name=torrent['name']) + log.info('Torrent not yet post-processed. Skipping: {torrent}', + {'torrent': torrent['name']}) continue status = 'busy' + error_string = torrent.get('errorString') if torrent.get('isStalled') and not torrent['percentDone'] == 1: - status = 'failed' + status = 'stalled' + elif error_string and 'unregistered torrent' in error_string.lower(): + status = 'unregistered' elif torrent['status'] == 0: if torrent['percentDone'] == 1 and torrent.get('isFinished'): status = 'completed' else: status = 'stopped' + elif torrent['status'] == 6: + status = 'seeding' if status == 'completed': - logger.info("Torrent completed and reached minimum ratio: [{ratio:.3f}] or " - "seed idle limit: [{seed_limit} min]. Removing it: [{name}]", - ratio=torrent['uploadRatio'], seed_limit=torrent['seedIdleLimit'], name=torrent['name']) - # self.remove_torrent(torrent['hashString']) + log.info( + 'Torrent completed and reached minimum' + ' ratio: [{ratio:.3f}/{ratio_limit:.3f}] or' + ' seed idle limit: [{seed_limit} min].' + ' Removing it: [{name}]', + ratio=torrent['uploadRatio'], + ratio_limit=torrent['seedRatioLimit'], + seed_limit=torrent['seedIdleLimit'], + name=torrent['name'] + ) + self.remove_torrent(torrent['hashString']) + elif status == 'stalled': + log.warning('Torrent is stalled. Check it: [{name}]', + name=torrent['name']) + elif status == 'unregistered': + log.warning('Torrent was unregistered from tracker.' + ' Check it: [{name}]', name=torrent['name']) + elif status == 'seeding': + if float(torrent['uploadRatio']) < float(torrent['seedRatioLimit']): + log.info( + 'Torrent did not reach minimum' + ' ratio: [{ratio:.3f}/{ratio_limit:.3f}].' + ' Keeping it: [{name}]', + ratio=torrent['uploadRatio'], + ratio_limit=torrent['seedRatioLimit'], + name=torrent['name'] + ) + else: + log.info( + 'Torrent completed and reached minimum ratio but it' + ' was force started again. Current' + ' ratio: [{ratio:.3f}/{ratio_limit:.3f}].' + ' Keeping it: [{name}]', + ratio=torrent['uploadRatio'], + ratio_limit=torrent['seedRatioLimit'], + name=torrent['name'] + ) + elif status in ('stopped', 'busy'): + log.info('Torrent is {status}. Keeping it: [{name}]', + status=status, name=torrent['name']) else: - logger.info("Torrent didn't reached minimum ratio: [{ratio}]. " - "Keeping it: [{name}]", - ratio=torrent['uploadRatio'], name=torrent['name']) - + log.warning( + 'Torrent has an unmapped status. Keeping it: [{name}].' + ' Report torrent info: {info}', + name=torrent['name'], + info=torrent + ) if not found_torrents: - logger.info('No torrents found that were snatched by Medusa') + log.info('No torrents found that were snatched by Medusa') api = TransmissionAPI diff --git a/medusa/clients/utorrent_client.py b/medusa/clients/torrent/utorrent_client.py similarity index 82% rename from medusa/clients/utorrent_client.py rename to medusa/clients/torrent/utorrent_client.py index 8692b4edf0..ca37e59f3b 100644 --- a/medusa/clients/utorrent_client.py +++ b/medusa/clients/torrent/utorrent_client.py @@ -1,21 +1,5 @@ # coding=utf-8 -# Authors: Mr_Orange , EchelonFour -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . """uTorrent Client.""" from __future__ import unicode_literals @@ -23,13 +7,16 @@ import logging import re +from medusa import app +from medusa.clients.torrent.generic import GenericClient +from medusa.logger.adapters.style import BraceAdapter + from requests.compat import urljoin from six import iteritems -from .generic import GenericClient -from .. import app -logger = logging.getLogger(__name__) +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class UTorrentAPI(GenericClient): @@ -51,8 +38,8 @@ def __init__(self, host=None, username=None, password=None): def _request(self, method='get', params=None, data=None, files=None, cookies=None): if cookies: - logger.debug('{name}: Received unused argument {arg}: {value}', name=self.name, arg='cookies', - value=cookies) + log.debug('{name}: Received unused argument: cookies={value!r}', + {'name': self.name, 'value': cookies}) # Workaround for uTorrent 2.2.1 # Need an OrderedDict but only supported in 2.7+ diff --git a/medusa/config.py b/medusa/config.py index b22c714a50..f445a5e397 100644 --- a/medusa/config.py +++ b/medusa/config.py @@ -24,8 +24,9 @@ from requests.compat import urlsplit from six import iteritems from six.moves.urllib.parse import urlunsplit, uses_netloc -from . import app, common, db, helpers, logger, naming +from . import app, common, db, helpers, logger, naming, scheduler from .helper.common import try_int +from .version_checker import CheckVersion # Address poor support for scgi over unix domain sockets # this is not nicely handled by python currently @@ -235,6 +236,26 @@ def change_BACKLOG_FREQUENCY(freq): app.backlog_search_scheduler.cycleTime = datetime.timedelta(minutes=app.BACKLOG_FREQUENCY) +def change_PROPERS_FREQUENCY(check_propers_interval): + """ + Change frequency of backlog thread + + :param freq: New frequency + """ + if not app.DOWNLOAD_PROPERS: + return + + if app.CHECK_PROPERS_INTERVAL == check_propers_interval: + return + + if check_propers_interval in app.PROPERS_SEARCH_INTERVAL: + update_interval = datetime.timedelta(minutes=app.PROPERS_SEARCH_INTERVAL[check_propers_interval]) + else: + update_interval = datetime.timedelta(hours=1) + app.CHECK_PROPERS_INTERVAL = check_propers_interval + app.proper_finder_scheduler.cycleTime = update_interval + + def change_UPDATE_FREQUENCY(freq): """ Change frequency of daily updater thread @@ -283,6 +304,7 @@ def change_VERSION_NOTIFY(version_notify): :param version_notify: New frequency """ + oldSetting = app.VERSION_NOTIFY app.VERSION_NOTIFY = version_notify @@ -294,6 +316,19 @@ def change_VERSION_NOTIFY(version_notify): app.version_check_scheduler.forceRun() +def change_GIT_PATH(): + """ + Recreate the version_check scheduler when GIT_PATH is changed. + Force a run to clear or set any error messages. + """ + app.version_check_scheduler = None + app.version_check_scheduler = scheduler.Scheduler( + CheckVersion(), cycleTime=datetime.timedelta(hours=app.UPDATE_FREQUENCY), threadName="CHECKVERSION", silent=False) + app.version_check_scheduler.enable = True + app.version_check_scheduler.start() + app.version_check_scheduler.forceRun() + + def change_DOWNLOAD_PROPERS(download_propers): """ Enable/Disable proper download thread diff --git a/medusa/databases/__init__.py b/medusa/databases/__init__.py index c01bca885d..058045cf6c 100644 --- a/medusa/databases/__init__.py +++ b/medusa/databases/__init__.py @@ -1,20 +1,3 @@ # coding=utf-8 -# Author: Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . - __all__ = ["main_db", "cache_db", "failed_db"] diff --git a/medusa/databases/cache_db.py b/medusa/databases/cache_db.py index 60e746d8bd..2d123d7aa5 100644 --- a/medusa/databases/cache_db.py +++ b/medusa/databases/cache_db.py @@ -1,26 +1,10 @@ # coding=utf-8 -# Author: Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . - -from .. import db - - -# Add new migrations at the bottom of the list; subclass the previous migration. +from medusa import db + + +# Add new migrations at the bottom of the list +# and subclass the previous migration. class InitialSchema(db.SchemaUpgrade): def test(self): return self.hasTable("db_version") diff --git a/medusa/databases/failed_db.py b/medusa/databases/failed_db.py index 05d95444b4..cf62429529 100644 --- a/medusa/databases/failed_db.py +++ b/medusa/databases/failed_db.py @@ -1,27 +1,11 @@ # coding=utf-8 -# Author: Tyler Fenby -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . +from medusa import db +from medusa.common import Quality -from .. import db -from ..common import Quality - -# Add new migrations at the bottom of the list; subclass the previous migration. +# Add new migrations at the bottom of the list +# and subclass the previous migration. class InitialSchema(db.SchemaUpgrade): def test(self): return self.hasTable('db_version') diff --git a/medusa/databases/main_db.py b/medusa/databases/main_db.py index d4583b28f1..c318afc400 100644 --- a/medusa/databases/main_db.py +++ b/medusa/databases/main_db.py @@ -1,36 +1,26 @@ # coding=utf-8 -# -# Author: Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . import datetime +import logging import os.path +import sys import warnings +from medusa import common, db, helpers, subtitles +from medusa.helper.common import dateTimeFormat, episode_num +from medusa.logger.adapters.style import BraceAdapter +from medusa.name_parser.parser import NameParser + from six import iteritems -from .. import common, db, helpers, logger, subtitles -from ..helper.common import dateTimeFormat, episode_num -from ..name_parser.parser import NameParser + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) MIN_DB_VERSION = 40 # oldest db version we support migrating from MAX_DB_VERSION = 44 # Used to check when checking for updates -CURRENT_MINOR_DB_VERSION = 5 +CURRENT_MINOR_DB_VERSION = 8 class MainSanityCheck(db.DBSanityCheck): @@ -50,17 +40,18 @@ def check(self): self.clean_null_indexer_mappings() def clean_null_indexer_mappings(self): - logger.log(u'Checking for null indexer mappings', logger.DEBUG) + log.debug(u'Checking for null indexer mappings') query = "SELECT * from indexer_mapping where mindexer_id = ''" sql_results = self.connection.select(query) if sql_results: - logger.log(u"Found {0} null indexer mapping. Deleting...".format(len(sql_results)), logger.DEBUG) + log.debug(u'Found {0} null indexer mapping. Deleting...', + len(sql_results)) self.connection.action("DELETE FROM indexer_mapping WHERE mindexer_id = ''") def update_old_propers(self): # This is called once when we create proper_tags columns - logger.log(u'Checking for old propers without proper tags', logger.DEBUG) + log.debug(u'Checking for old propers without proper tags') query = "SELECT resource FROM history WHERE (proper_tags is null or proper_tags is '') " + \ "AND (action LIKE '%2' OR action LIKE '%9') AND " + \ "(resource LIKE '%REPACK%' or resource LIKE '%PROPER%' or resource LIKE '%REAL%')" @@ -68,36 +59,41 @@ def update_old_propers(self): if sql_results: for sql_result in sql_results: proper_release = sql_result['resource'] - logger.log(u"Found old propers without proper tags: {0}".format(proper_release), logger.DEBUG) + log.debug(u'Found old propers without proper tags: {0}', + proper_release) parse_result = NameParser()._parse_string(proper_release) if parse_result.proper_tags: proper_tags = '|'.join(parse_result.proper_tags) - logger.log(u"Add proper tags '{0}' to '{1}'".format(proper_tags, proper_release), logger.DEBUG) + log.debug(u'Add proper tags {0!r} to {1!r}', + proper_tags, proper_release) self.connection.action("UPDATE history SET proper_tags = ? WHERE resource = ?", [proper_tags, proper_release]) def fix_subtitle_reference(self): - logger.log(u'Checking for delete episodes with subtitle reference', logger.DEBUG) + log.debug(u'Checking for delete episodes with subtitle reference') query = "SELECT episode_id, showid, location, subtitles, subtitles_searchcount, subtitles_lastsearch " + \ "FROM tv_episodes WHERE location = '' AND subtitles is not ''" sql_results = self.connection.select(query) if sql_results: for sql_result in sql_results: - logger.log(u"Found deleted episode id {0} from show ID {1} with subtitle data. Erasing reference...".format - (sql_result['episode_id'], sql_result['showid']), logger.WARNING) + log.warning(u'Found deleted episode id {0} from show ID {1}' + u' with subtitle data. Erasing reference...', + sql_result['episode_id'], sql_result['showid']) self.connection.action("UPDATE tv_episodes SET subtitles = '', subtitles_searchcount = 0, subtitles_lastsearch = '' " + \ "WHERE episode_id = %i" % (sql_result['episode_id'])) def convert_archived_to_compound(self): - logger.log(u'Checking for archived episodes not qualified', logger.DEBUG) + log.debug(u'Checking for archived episodes not qualified') query = "SELECT episode_id, showid, e.status, e.location, season, episode, anime " + \ "FROM tv_episodes e, tv_shows s WHERE e.status = %s AND e.showid = s.indexer_id" % common.ARCHIVED sql_results = self.connection.select(query) if sql_results: - logger.log(u"Found %i shows with bare archived status, attempting automatic conversion..." % len(sql_results), logger.WARNING) + log.warning(u'Found {0} shows with bare archived status, ' + u'attempting automatic conversion...', + len(sql_results)) for archivedEp in sql_results: fixedStatus = common.Quality.composite_status(common.ARCHIVED, common.Quality.UNKNOWN) @@ -106,24 +102,28 @@ def convert_archived_to_compound(self): quality = common.Quality.name_quality(archivedEp['location'], archivedEp['anime'], extend=False) fixedStatus = common.Quality.composite_status(common.ARCHIVED, quality) - logger.log(u'Changing status from {old_status} to {new_status} for {id}: {ep} at {location} (File {result})'.format - (old_status=common.statusStrings[common.ARCHIVED], new_status=common.statusStrings[fixedStatus], - id=archivedEp['showid'], - ep=episode_num(archivedEp['season'], archivedEp['episode']), - location=archivedEp['location'] if archivedEp['location'] else 'unknown location', - result=('NOT FOUND', 'EXISTS')[bool(existing)])) + log.info( + u'Changing status from {old_status} to {new_status} for' + u' {id}: {ep} at {location} (File {result})', + {'old_status': common.statusStrings[common.ARCHIVED], + 'new_status': common.statusStrings[fixedStatus], + 'id': archivedEp['showid'], + 'ep': episode_num(archivedEp['season'], + archivedEp['episode']), + 'location': archivedEp['location'] or 'unknown location', + 'result': 'EXISTS' if existing else 'NOT FOUND', } + ) self.connection.action("UPDATE tv_episodes SET status = %i WHERE episode_id = %i" % (fixedStatus, archivedEp['episode_id'])) def fix_duplicate_shows(self, column='indexer_id'): - sql_results = self.connection.select( "SELECT show_id, " + column + ", COUNT(" + column + ") as count FROM tv_shows GROUP BY " + column + " HAVING count > 1") for cur_duplicate in sql_results: - logger.log(u"Duplicate show detected! " + column + ": " + str(cur_duplicate[column]) + u" count: " + str( - cur_duplicate["count"]), logger.DEBUG) + log.info(u'Duplicate show detected! {0}: {1!s} count: {2!s}', + column, cur_duplicate[column], cur_duplicate["count"]) cur_dupe_results = self.connection.select( "SELECT show_id, " + column + " FROM tv_shows WHERE " + column + " = ? LIMIT ?", @@ -131,9 +131,9 @@ def fix_duplicate_shows(self, column='indexer_id'): ) for cur_dupe_id in cur_dupe_results: - logger.log( - u"Deleting duplicate show with " + column + ": " + str(cur_dupe_id[column]) + u" show_id: " + str( - cur_dupe_id["show_id"])) + log.info(u'Deleting duplicate show with {0}: {1!s}' + u' show_id: {2!s}', column, cur_dupe_id[column], + cur_dupe_id["show_id"]) self.connection.action("DELETE FROM tv_shows WHERE show_id = ?", [cur_dupe_id["show_id"]]) def fix_duplicate_episodes(self): @@ -143,10 +143,10 @@ def fix_duplicate_episodes(self): for cur_duplicate in sql_results: - logger.log(u"Duplicate episode detected! showid: " + str(cur_duplicate["showid"]) + u" season: " + str( - cur_duplicate["season"]) + u" episode: " + str(cur_duplicate["episode"]) + u" count: " + str( - cur_duplicate["count"]), logger.DEBUG) - + log.debug(u'Duplicate episode detected! showid: {0!s}' + u' season: {1!s} episode: {2!s} count: {3!s}', + cur_duplicate["showid"], cur_duplicate["season"], + cur_duplicate["episode"], cur_duplicate["count"]) cur_dupe_results = self.connection.select( "SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? and episode = ? ORDER BY episode_id DESC LIMIT ?", [cur_duplicate["showid"], cur_duplicate["season"], cur_duplicate["episode"], @@ -154,7 +154,8 @@ def fix_duplicate_episodes(self): ) for cur_dupe_id in cur_dupe_results: - logger.log(u"Deleting duplicate episode with episode_id: " + str(cur_dupe_id["episode_id"])) + log.info(u'Deleting duplicate episode with episode_id: {0!s}', + cur_dupe_id["episode_id"]) self.connection.action("DELETE FROM tv_episodes WHERE episode_id = ?", [cur_dupe_id["episode_id"]]) def fix_orphan_episodes(self): @@ -163,34 +164,42 @@ def fix_orphan_episodes(self): "SELECT episode_id, showid, tv_shows.indexer_id FROM tv_episodes LEFT JOIN tv_shows ON tv_episodes.showid=tv_shows.indexer_id WHERE tv_shows.indexer_id is NULL") for cur_orphan in sql_results: - logger.log(u"Orphan episode detected! episode_id: " + str(cur_orphan["episode_id"]) + " showid: " + str( - cur_orphan["showid"]), logger.DEBUG) - logger.log(u"Deleting orphan episode with episode_id: " + str(cur_orphan["episode_id"])) + log.debug(u'Orphan episode detected! episode_id: {0!s}' + u' showid: {1!s}', cur_orphan['episode_id'], + cur_orphan['showid']) + log.info(u'Deleting orphan episode with episode_id: {0!s}', + cur_orphan['episode_id']) self.connection.action("DELETE FROM tv_episodes WHERE episode_id = ?", [cur_orphan["episode_id"]]) def fix_missing_table_indexes(self): if not self.connection.select("PRAGMA index_info('idx_indexer_id')"): - logger.log(u"Missing idx_indexer_id for TV Shows table detected!, fixing...") + log.info(u'Missing idx_indexer_id for TV Shows table detected!,' + u' fixing...') self.connection.action("CREATE UNIQUE INDEX idx_indexer_id ON tv_shows(indexer_id);") if not self.connection.select("PRAGMA index_info('idx_tv_episodes_showid_airdate')"): - logger.log(u"Missing idx_tv_episodes_showid_airdate for TV Episodes table detected!, fixing...") + log.info(u'Missing idx_tv_episodes_showid_airdate for TV Episodes' + u' table detected!, fixing...') self.connection.action("CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid, airdate);") if not self.connection.select("PRAGMA index_info('idx_showid')"): - logger.log(u"Missing idx_showid for TV Episodes table detected!, fixing...") + log.info(u'Missing idx_showid for TV Episodes table detected!,' + u' fixing...') self.connection.action("CREATE INDEX idx_showid ON tv_episodes (showid);") if not self.connection.select("PRAGMA index_info('idx_status')"): - logger.log(u"Missing idx_status for TV Episodes table detected!, fixing...") + log.info(u'Missing idx_status for TV Episodes table detected!,' + u' fixing...') self.connection.action("CREATE INDEX idx_status ON tv_episodes (status, season, episode, airdate)") if not self.connection.select("PRAGMA index_info('idx_sta_epi_air')"): - logger.log(u"Missing idx_sta_epi_air for TV Episodes table detected!, fixing...") + log.info(u'Missing idx_sta_epi_air for TV Episodes table' + u' detected!, fixing...') self.connection.action("CREATE INDEX idx_sta_epi_air ON tv_episodes (status, episode, airdate)") if not self.connection.select("PRAGMA index_info('idx_sta_epi_sta_air')"): - logger.log(u"Missing idx_sta_epi_sta_air for TV Episodes table detected!, fixing...") + log.info(u'Missing idx_sta_epi_sta_air for TV Episodes table' + u' detected!, fixing...') self.connection.action("CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season, episode, status, airdate)") def fix_unaired_episodes(self): @@ -202,7 +211,8 @@ def fix_unaired_episodes(self): [curDate.toordinal(), common.SKIPPED, common.WANTED]) for cur_unaired in sql_results: - logger.log(u"Fixing unaired episode status for episode_id: %s" % cur_unaired["episode_id"]) + log.info(u'Fixing unaired episode status for episode_id: {0!s}', + cur_unaired["episode_id"]) self.connection.action("UPDATE tv_episodes SET status = ? WHERE episode_id = ?", [common.UNAIRED, cur_unaired["episode_id"]]) @@ -220,6 +230,12 @@ def fix_indexer_show_statues(self): 'pilot rejected': 'Ended', 'canceled': 'Ended', 'ended': 'Ended', + 'to be determined': 'Continuing', + 'running': 'Continuing', + 'planned': 'Continuing', + 'in production': 'Continuing', + 'pilot': 'Continuing', + 'cancelled': 'Ended', '': 'Unknown', } @@ -230,9 +246,11 @@ def fix_episode_statuses(self): sql_results = self.connection.select("SELECT episode_id, showid FROM tv_episodes WHERE status IS NULL") for cur_ep in sql_results: - logger.log(u"MALFORMED episode status detected! episode_id: " + str(cur_ep["episode_id"]) + " showid: " + str( - cur_ep["showid"]), logger.DEBUG) - logger.log(u"Fixing malformed episode status with episode_id: " + str(cur_ep["episode_id"])) + log.debug(u'MALFORMED episode status detected! episode_id: {0!s}' + u' showid: {1!s}', cur_ep['episode_id'], + cur_ep['showid']) + log.info(u'Fixing malformed episode status with' + u' episode_id: {0!s}', cur_ep['episode_id']) self.connection.action("UPDATE tv_episodes SET status = ? WHERE episode_id = ?", [common.UNKNOWN, cur_ep["episode_id"]]) @@ -243,9 +261,11 @@ def fix_invalid_airdates(self): [datetime.date.max.toordinal()]) for bad_airdate in sql_results: - logger.log(u"Bad episode airdate detected! episode_id: " + str(bad_airdate["episode_id"]) + " showid: " + str( - bad_airdate["showid"]), logger.DEBUG) - logger.log(u"Fixing bad episode airdate for episode_id: " + str(bad_airdate["episode_id"])) + log.debug(u'Bad episode airdate detected! episode_id: {0!s}' + u' showid: {1!s}', bad_airdate['episode_id'], + bad_airdate['showid']) + log.info(u'Fixing bad episode airdate for episode_id: {0!s}', + bad_airdate['episode_id']) self.connection.action("UPDATE tv_episodes SET airdate = '1' WHERE episode_id = ?", [bad_airdate["episode_id"]]) def fix_subtitles_codes(self): @@ -261,13 +281,15 @@ def fix_subtitles_codes(self): for sql_result in sql_results: langs = [] - logger.log(u"Checking subtitle codes for episode_id: %s, codes: %s" % - (sql_result['episode_id'], sql_result['subtitles']), logger.DEBUG) + log.debug(u'Checking subtitle codes for episode_id: {0!s},' + u' codes: {1!s}', sql_result['episode_id'], + sql_result['subtitles']) for subcode in sql_result['subtitles'].split(','): if not len(subcode) == 3 or subcode not in subtitles.subtitle_code_filter(): - logger.log(u"Fixing subtitle codes for episode_id: %s, invalid code: %s" % - (sql_result['episode_id'], subcode), logger.DEBUG) + log.debug(u'Fixing subtitle codes for episode_id: {0!s},' + u' invalid code: {1!s}', + sql_result['episode_id'], subcode) continue langs.append(subcode) @@ -280,11 +302,12 @@ def fix_show_nfo_lang(self): def backupDatabase(version): - logger.log(u"Backing up database before upgrade") + log.info(u'Backing up database before upgrade') if not helpers.backup_versioned_file(db.dbFilename(), version): - logger.log_error_and_exit(u"Database backup failed, abort upgrading database") + log.error(u'Database backup failed, abort upgrading database') + sys.exit(1) else: - logger.log(u"Proceeding with upgrade") + log.info(u'Proceeding with upgrade') # ====================== @@ -305,11 +328,11 @@ def execute(self): "CREATE TABLE info(last_backlog NUMERIC, last_indexer NUMERIC, last_proper_search NUMERIC);", "CREATE TABLE scene_numbering(indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, absolute_number NUMERIC, scene_absolute_number NUMERIC, PRIMARY KEY(indexer_id, season, episode));", "CREATE TABLE tv_shows(show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer NUMERIC, show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_indexer NUMERIC, dvdorder NUMERIC, archive_firstmatch NUMERIC, rls_require_words TEXT, rls_ignore_words TEXT, sports NUMERIC, anime NUMERIC, scene NUMERIC, default_ep_status NUMERIC DEFAULT -1);", - "CREATE TABLE tv_episodes(episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid NUMERIC, indexer TEXT, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC, scene_season NUMERIC, scene_episode NUMERIC, absolute_number NUMERIC, scene_absolute_number NUMERIC, version NUMERIC DEFAULT -1, release_group TEXT);", + "CREATE TABLE tv_episodes(episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid INTEGER, indexer INTEGER, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC, scene_season NUMERIC, scene_episode NUMERIC, absolute_number NUMERIC, scene_absolute_number NUMERIC, version NUMERIC DEFAULT -1, release_group TEXT);", "CREATE TABLE blacklist (show_id INTEGER, range TEXT, keyword TEXT);", "CREATE TABLE whitelist (show_id INTEGER, range TEXT, keyword TEXT);", "CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER);", - "CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER, mindexer NUMERIC, PRIMARY KEY (indexer_id, indexer));", + "CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer INTEGER, mindexer_id INTEGER, mindexer INTEGER, PRIMARY KEY (indexer_id, indexer, mindexer));", "CREATE UNIQUE INDEX idx_indexer_id ON tv_shows(indexer_id);", "CREATE INDEX idx_showid ON tv_episodes(showid);", "CREATE INDEX idx_sta_epi_air ON tv_episodes(status, episode, airdate);", @@ -325,18 +348,25 @@ def execute(self): cur_db_version = self.checkDBVersion() if cur_db_version < MIN_DB_VERSION: - logger.log_error_and_exit(u"Your database version (" + - str(cur_db_version) + ") is too old to migrate from what this version of the application supports (" + - str(MIN_DB_VERSION) + ").\n" + - "Upgrade using a previous version (tag) build 496 to build 501 of the application first or remove database file to begin fresh." - ) + log.error( + u'Your database version ({0!s}) is too old to migrate' + u' from what this version of the application' + u' supports ({1!s}).\n' + u'Upgrade using a previous version (tag) build 496 to' + u' build 501 of the application first or remove database' + u' file to begin fresh.', cur_db_version, MIN_DB_VERSION, + ) + sys.exit(1) if cur_db_version > MAX_DB_VERSION: - logger.log_error_and_exit(u"Your database version (" + - str(cur_db_version) + ") has been incremented past what this version of the application supports (" + - str(MAX_DB_VERSION) + ").\n" + - "If you have used other forks of the application, your database may be unusable due to their modifications." - ) + log.error( + u'Your database version ({0!s}) has been incremented past' + u' what this version of the application supports' + u' ({1!s}).\n' + u'If you have used other forks of the application, your' + u' database may be unusable due to their modifications.', + cur_db_version, MAX_DB_VERSION, + ) class AddVersionToTvEpisodes(InitialSchema): @@ -346,7 +376,7 @@ def test(self): def execute(self): backupDatabase(self.checkDBVersion()) - logger.log(u"Adding column version to tv_episodes and history") + log.info(u'Adding column version to tv_episodes and history') self.addColumn("tv_episodes", "version", "NUMERIC", "-1") self.addColumn("tv_episodes", "release_group", "TEXT", "") self.addColumn("history", "version", "NUMERIC", "-1") @@ -361,7 +391,7 @@ def test(self): def execute(self): backupDatabase(self.checkDBVersion()) - logger.log(u"Adding column default_ep_status to tv_shows") + log.info(u'Adding column default_ep_status to tv_shows') self.addColumn("tv_shows", "default_ep_status", "NUMERIC", "-1") self.incDBVersion() @@ -374,7 +404,7 @@ def test(self): def execute(self): backupDatabase(self.checkDBVersion()) - logger.log(u"Converting column indexer and default_ep_status field types to numeric") + log.info(u'Converting column indexer and default_ep_status field types to numeric') self.connection.action("DROP TABLE IF EXISTS tmp_tv_shows") self.connection.action("ALTER TABLE tv_shows RENAME TO tmp_tv_shows") self.connection.action("CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer NUMERIC, show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_indexer NUMERIC, dvdorder NUMERIC, archive_firstmatch NUMERIC, rls_require_words TEXT, rls_ignore_words TEXT, sports NUMERIC, anime NUMERIC, scene NUMERIC, default_ep_status NUMERIC)") @@ -407,12 +437,12 @@ def inc_minor_version(self): def execute(self): backupDatabase(self.checkDBVersion()) - logger.log(u"Add minor version numbers to database") + log.info(u'Add minor version numbers to database') self.addColumn(b'db_version', b'db_minor_version') self.inc_minor_version() - logger.log(u'Updated to: %d.%d' % self.connection.version) + log.info(u'Updated to: {}.{}', *self.connection.version) class TestIncreaseMajorVersion(AddMinorVersion): @@ -434,11 +464,11 @@ def execute(self): """ backupDatabase(self.connection.version) - logger.log(u"Test major and minor version updates database") + log.info(u'Test major and minor version updates database') self.inc_major_version() self.inc_minor_version() - logger.log(u'Updated to: %d.%d' % self.connection.version) + log.info(u'Updated to: {}.{}', *self.connection.version) class AddProperTags(TestIncreaseMajorVersion): @@ -457,14 +487,14 @@ def execute(self): backupDatabase(self.connection.version) if not self.hasColumn('history', 'proper_tags'): - logger.log(u'Adding column proper_tags to history') + log.info(u'Adding column proper_tags to history') self.addColumn('history', 'proper_tags', 'TEXT', u'') # Call the update old propers once MainSanityCheck(self.connection).update_old_propers() self.inc_minor_version() - logger.log(u'Updated to: %d.%d' % self.connection.version) + log.info(u'Updated to: {}.{}', *self.connection.version) class AddManualSearched(AddProperTags): @@ -483,17 +513,17 @@ def execute(self): backupDatabase(self.connection.version) if not self.hasColumn('history', 'manually_searched'): - logger.log(u'Adding column manually_searched to history') + log.info(u'Adding column manually_searched to history') self.addColumn('history', 'manually_searched', 'NUMERIC', 0) if not self.hasColumn('tv_episodes', 'manually_searched'): - logger.log(u'Adding column manually_searched to tv_episodes') + log.info(u'Adding column manually_searched to tv_episodes') self.addColumn('tv_episodes', 'manually_searched', 'NUMERIC', 0) MainSanityCheck(self.connection).update_old_propers() self.inc_minor_version() - logger.log(u'Updated to: %d.%d' % self.connection.version) + log.info(u'Updated to: {}.{}', *self.connection.version) class AddInfoHash(AddManualSearched): @@ -508,7 +538,7 @@ def test(self): def execute(self): backupDatabase(self.connection.version) - logger.log(u"Adding column info_hash in history") + log.info(u'Adding column info_hash in history') if not self.hasColumn("history", "info_hash"): self.addColumn("history", "info_hash", 'TEXT', None) self.inc_minor_version() @@ -526,11 +556,79 @@ def test(self): def execute(self): backupDatabase(self.connection.version) - logger.log(u"Adding column plot in imdb_info") - if not self.hasColumn("imdb_info", "plot"): - self.addColumn("imdb_info", "plot", 'TEXT', None) + log.info(u'Adding column plot in imdb_info') + if not self.hasColumn('imdb_info', 'plot'): + self.addColumn('imdb_info', 'plot', 'TEXT', None) + + log.info(u'Adding column plot in tv_show') + if not self.hasColumn('tv_shows', 'plot'): + self.addColumn('tv_shows', 'plot', 'TEXT', None) + self.inc_minor_version() + + +class AddResourceSize(AddPlot): + """Adds column size to history table.""" + + def test(self): + """ + Test if the version is at least 44.6 + """ + return self.connection.version >= (44, 6) + + def execute(self): + backupDatabase(self.connection.version) + + log.info(u"Adding column size in history") + if not self.hasColumn("history", "size"): + self.addColumn("history", "size", 'NUMERIC', -1) + + self.inc_minor_version() + + +class AddPKIndexerMapping(AddResourceSize): + """Add PK to mindexer column in indexer_mapping table.""" + + def test(self): + """Test if the version is at least 44.7""" + return self.connection.version >= (44, 7) + + def execute(self): + backupDatabase(self.connection.version) + + log.info(u'Adding PK to mindexer column in indexer_mapping table') + self.connection.action("DROP TABLE IF EXISTS new_indexer_mapping;") + self.connection.action("CREATE TABLE IF NOT EXISTS new_indexer_mapping" + "(indexer_id INTEGER, indexer INTEGER, mindexer_id INTEGER, mindexer INTEGER," + "PRIMARY KEY (indexer_id, indexer, mindexer));") + self.connection.action("INSERT INTO new_indexer_mapping SELECT * FROM indexer_mapping;") + self.connection.action("DROP TABLE IF EXISTS indexer_mapping;") + self.connection.action("ALTER TABLE new_indexer_mapping RENAME TO indexer_mapping;") + self.connection.action("DROP TABLE IF EXISTS new_indexer_mapping;") + self.inc_minor_version() + + +class AddIndexerInteger(AddPKIndexerMapping): + """Make indexer as INTEGER in tv_episodes table.""" + + def test(self): + """Test if the version is at least 44.8""" + return self.connection.version >= (44, 8) + + def execute(self): + backupDatabase(self.connection.version) - logger.log(u"Adding column plot in tv_show") - if not self.hasColumn("tv_shows", "plot"): - self.addColumn("tv_shows", "plot", 'TEXT', None) + log.info(u'Make indexer and indexer_id as INTEGER in tv_episodes table') + self.connection.action("DROP TABLE IF EXISTS new_tv_episodes;") + self.connection.action("CREATE TABLE new_tv_episodes(episode_id INTEGER PRIMARY KEY, showid NUMERIC," + "indexerid INTEGER, indexer INTEGER, name TEXT, season NUMERIC, episode NUMERIC," + "description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC," + "location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT," + "subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP," + "is_proper NUMERIC, scene_season NUMERIC, scene_episode NUMERIC," + "absolute_number NUMERIC, scene_absolute_number NUMERIC, version NUMERIC DEFAULT -1," + "release_group TEXT, manually_searched NUMERIC);") + self.connection.action("INSERT INTO new_tv_episodes SELECT * FROM tv_episodes;") + self.connection.action("DROP TABLE IF EXISTS tv_episodes;") + self.connection.action("ALTER TABLE new_tv_episodes RENAME TO tv_episodes;") + self.connection.action("DROP TABLE IF EXISTS new_tv_episodoes;") self.inc_minor_version() diff --git a/medusa/db.py b/medusa/db.py index e1d42d5b0f..0e24ce8a72 100644 --- a/medusa/db.py +++ b/medusa/db.py @@ -100,16 +100,16 @@ def _execute(self, query, args=None, fetchall=False, fetchone=False): :return: query results """ try: + cursor = self.connection.cursor() if not args: - sql_results = self.connection.cursor().execute(query) + sql_results = cursor.execute(query) else: - sql_results = self.connection.cursor().execute(query, args) + sql_results = cursor.execute(query, args) if fetchall: return sql_results.fetchall() elif fetchone: return sql_results.fetchone() - else: - return sql_results + return sql_results except sqlite3.OperationalError as e: # This errors user should be able to fix it. if 'unable to open database file' in e.args[0] or \ diff --git a/medusa/event_queue.py b/medusa/event_queue.py index c89dc856a0..0c71c1ba6c 100644 --- a/medusa/event_queue.py +++ b/medusa/event_queue.py @@ -1,10 +1,14 @@ # coding=utf-8 + +import logging import threading import traceback +from medusa.helper.exceptions import ex + from six.moves.queue import Empty, Queue -from . import logger -from .helper.exceptions import ex + +log = logging.getLogger(__name__) class Event(object): @@ -53,9 +57,10 @@ def run(self): # exiting thread self.stop.clear() - except Exception as e: - logger.log(u"Exception generated in thread " + self.name + ": " + ex(e), logger.ERROR) - logger.log(repr(traceback.format_exc()), logger.DEBUG) + except Exception as error: + log.error(u"Exception generated in thread %s: %s", + self.name, ex(error)) + log.debug(repr(traceback.format_exc())) # System Events class SystemEvent(Event): diff --git a/medusa/exception_handler.py b/medusa/exception_handler.py index a8d8df4bea..e21194b29d 100644 --- a/medusa/exception_handler.py +++ b/medusa/exception_handler.py @@ -21,6 +21,6 @@ def handle(err, message='', *args, **kwargs): elif err.errno == 13: logger.warning('{m}Permission denied: {error_msg}', m=m, error_msg=err) else: - logger.warning('{m}Environment error: {error_msg}', m=m, error_msg=err) + logger.exception('{m}Environment error: {error_msg}', m=m, error_msg=err) else: logger.exception('{m}Exception generated: {error_msg}', m=m, error_msg=err) diff --git a/medusa/failed_history.py b/medusa/failed_history.py index 537dc52bf6..9d7cf14ee9 100644 --- a/medusa/failed_history.py +++ b/medusa/failed_history.py @@ -154,7 +154,7 @@ def revert_episode(ep_obj): try: logger.log(u'Reverting episode status for {show} {ep}. Checking if we have previous status'.format - (show=ep_obj.name, ep=episode_num(ep_obj.season, ep_obj.episode))) + (show=ep_obj.show.name, ep=episode_num(ep_obj.season, ep_obj.episode))) with ep_obj.lock: if ep_obj.episode in history_eps: ep_obj.status = history_eps[ep_obj.episode]['old_status'] @@ -271,19 +271,19 @@ def find_release(ep_obj): # Clear old snatches for this release if any exist failed_db_con = db.DBConnection('failed.db') - # failed_db_con.action( - # 'DELETE FROM history ' - # 'WHERE showid = {0}' - # ' AND season = {1}' - # ' AND episode = {2}' - # ' AND date < ( SELECT max(date)' - # ' FROM history' - # ' WHERE showid = {0}' - # ' AND season = {1}' - # ' AND episode = {2}' - # ' )'.format - # (ep_obj.show.indexerid, ep_obj.season, ep_obj.episode) - # ) + failed_db_con.action( + 'DELETE FROM history ' + 'WHERE showid = {0}' + ' AND season = {1}' + ' AND episode = {2}' + ' AND date < ( SELECT max(date)' + ' FROM history' + ' WHERE showid = {0}' + ' AND season = {1}' + ' AND episode = {2}' + ' )'.format + (ep_obj.show.indexerid, ep_obj.season, ep_obj.episode) + ) # Search for release in snatch history results = failed_db_con.select( @@ -310,11 +310,11 @@ def find_release(ep_obj): # Found a previously failed release logger.log(u'Failed release found for {show} {ep}: {release}'.format - (show=ep_obj.name, ep=episode_num(ep_obj.season, ep_obj.episode), + (show=ep_obj.show.name, ep=episode_num(ep_obj.season, ep_obj.episode), release=result['release']), logger.DEBUG) return release, provider # Release was not found logger.log(u'No releases found for {show} {ep}'.format - (show=ep_obj.name, ep=episode_num(ep_obj.season, ep_obj.episode)), logger.DEBUG) + (show=ep_obj.show.name, ep=episode_num(ep_obj.season, ep_obj.episode)), logger.DEBUG) return release, provider diff --git a/medusa/generic_queue.py b/medusa/generic_queue.py index 7affaf3a0a..8d64593bd7 100644 --- a/medusa/generic_queue.py +++ b/medusa/generic_queue.py @@ -1,26 +1,10 @@ # coding=utf-8 -# Author: Nic Wolfe - -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . import datetime +import logging import threading -from . import logger +log = logging.getLogger() class QueuePriorities(object): @@ -31,27 +15,21 @@ class QueuePriorities(object): class GenericQueue(object): def __init__(self): - self.currentItem = None - self.queue = [] - self.queue_name = "QUEUE" - self.min_priority = 0 - self.lock = threading.Lock() - self.amActive = False def pause(self): """Pauses this queue.""" - logger.log(u"Pausing queue") + log.info(u"Pausing queue") self.min_priority = 999999999999 def unpause(self): """Unpauses this queue.""" - logger.log(u"Unpausing queue") + log.info(u"Unpausing queue") self.min_priority = 0 def add_item(self, item): @@ -77,12 +55,14 @@ def run(self, force=False): # only start a new task if one isn't already going if self.currentItem is None or not self.currentItem.isAlive(): - # if the thread is dead then the current item should be finished + # if the thread is dead then the current item should be + # finished if self.currentItem: self.currentItem.finish() self.currentItem = None - # if there's something in the queue then run it in a thread and take it out of the queue + # if there's something in the queue then run it in a thread + # and take it out of the queue if self.queue: # sort by priority @@ -106,7 +86,10 @@ def sorter(x, y): # launch the queue item in a thread self.currentItem = self.queue.pop(0) - self.currentItem.name = self.queue_name + '-' + self.currentItem.name + self.currentItem.name = u'{queue}-{item}'.format( + queue=self.queue_name, + item=self.currentItem.name, + ) self.currentItem.start() self.amActive = False @@ -115,7 +98,6 @@ def sorter(x, y): class QueueItem(threading.Thread): def __init__(self, name, action_id=0): super(QueueItem, self).__init__() - self.name = name.replace(" ", "-").upper() self.inProgress = False self.priority = QueuePriorities.NORMAL @@ -125,12 +107,9 @@ def __init__(self, name, action_id=0): def run(self): """Implementing classes should call this.""" - self.inProgress = True def finish(self): """Implementing Classes should call this.""" - self.inProgress = False - threading.currentThread().name = self.name diff --git a/medusa/github_client.py b/medusa/github_client.py index f05c83c6bc..0e952ba9d6 100644 --- a/medusa/github_client.py +++ b/medusa/github_client.py @@ -33,6 +33,51 @@ def authenticate(username, password): raise +def token_authenticate(token): + """Github authentication. + + :param token: + :type token: string + :return: + :rtype: Github or None + """ + try: + if token: + gh = github.MainClass.Github(login_or_token=token, user_agent='Medusa') + + # Make a simple request to validate username and password + gh.get_rate_limit() + + return gh + except github.BadCredentialsException: + logger.warning('Invalid Github credentials. Please check your Github credentials in Medusa settings.') + except github.TwoFactorException: + logger.warning('Invalid Github token. Please check your Github token in Medusa settings.') + except github.GithubException as e: + logger.debug('Unable to contact Github: {ex!r}', ex=e) + raise + + +def get_user(gh=None): + """Return the github repository. + + :param organization: + :type organization: string + :param repo: + :type repo: string + :param gh: + :type gh: Github + :return: + :rtype github.Repository.Repository + """ + try: + gh = gh or github.MainClass.Github(user_agent='Medusa') + return gh.get_user().login + except github.GithubException as e: + logger.debug('Unable to contact Github: {ex!r}', ex=e) + raise + + def get_github_repo(organization, repo, gh=None): """Return the github repository. diff --git a/medusa/helper/collections.py b/medusa/helper/collections.py new file mode 100644 index 0000000000..5da32e69ac --- /dev/null +++ b/medusa/helper/collections.py @@ -0,0 +1,11 @@ +# coding=utf-8 +"""Extended collections.""" + + +class NonEmptyDict(dict): + """Dictionary that only accept values that are not none and not empty strings.""" + + def __setitem__(self, key, value): + """Discard None values and empty strings.""" + if key in self or value is not None and value != '': + super(NonEmptyDict, self).__setitem__(key, value) diff --git a/medusa/helper/common.py b/medusa/helper/common.py index caacaf2cc8..ace00de4a5 100644 --- a/medusa/helper/common.py +++ b/medusa/helper/common.py @@ -1,20 +1,4 @@ # coding=utf-8 -# This file is part of Medusa. -# - -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . from __future__ import unicode_literals @@ -25,11 +9,13 @@ from fnmatch import fnmatch +from medusa import app + from six import PY3, text_type -from .. import app -logger = logging.getLogger(__name__) +log = logging.getLogger(__name__) +log.addHandler(logging.NullHandler()) if PY3: long = int @@ -309,8 +295,8 @@ def try_int(candidate, default_value=0): try: return int(candidate) except (ValueError, TypeError): - if candidate and ("," in candidate or "." in candidate): - logger.error(u"Failed parsing provider. Traceback: %r" % traceback.format_exc()) + if candidate and (',' in candidate or '.' in candidate): + log.error(u'Failed parsing provider. Traceback: %r', traceback.format_exc()) return default_value diff --git a/medusa/helper/encoding.py b/medusa/helper/encoding.py index d1dfcd84f8..f30bde0f92 100644 --- a/medusa/helper/encoding.py +++ b/medusa/helper/encoding.py @@ -1,24 +1,10 @@ -# Author: Nic Wolfe - -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . +# coding=utf-8 from chardet import detect + +from medusa import app + from six import text_type -from .. import app def ss(var): diff --git a/medusa/helper/exceptions.py b/medusa/helper/exceptions.py index 8fd59112c1..2e98b5027b 100644 --- a/medusa/helper/exceptions.py +++ b/medusa/helper/exceptions.py @@ -1,24 +1,8 @@ # coding=utf-8 -# This file is part of Medusa. -# - -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . -from six import text_type +from medusa.helper.encoding import ss -from .encoding import ss +from six import text_type def ex(e): diff --git a/medusa/helpers.py b/medusa/helpers/__init__.py similarity index 76% rename from medusa/helpers.py rename to medusa/helpers/__init__.py index 81b3da8378..42a35b8bff 100644 --- a/medusa/helpers.py +++ b/medusa/helpers/__init__.py @@ -1,21 +1,5 @@ # coding=utf-8 -# Author: Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . -# pylint:disable=too-many-lines + """Various helper methods.""" import base64 @@ -58,20 +42,22 @@ from imdbpie import imdbpie +from medusa import app, db +from medusa.common import USER_AGENT +from medusa.helper.common import episode_num, http_code_description, media_extensions, pretty_file_size, subtitle_extensions +from medusa.helper.exceptions import ex +from medusa.indexers.indexer_exceptions import IndexerException +from medusa.logger.adapters.style import BraceAdapter +from medusa.show.show import Show + import requests from requests.compat import urlparse from six import binary_type, string_types, text_type from six.moves import http_client -from . import app, db -from .common import USER_AGENT -from .helper.common import episode_num, http_code_description, media_extensions, pretty_file_size, subtitle_extensions -from .helper.exceptions import ex -from .indexers.indexer_exceptions import IndexerException -from .show.show import Show - -logger = logging.getLogger(__name__) +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) try: @@ -125,7 +111,8 @@ def is_media_file(filename): return sep_file[2].lower() in media_extensions except TypeError as error: # Not a string - logger.debug('Invalid filename. Filename must be a string. {error}', error=error) + log.debug(u'Invalid filename. Filename must be a string. {error}', + {'error': error}) return False @@ -189,7 +176,7 @@ def make_dir(path): os.makedirs(path) # do the library update for synoindex - from . import notifiers + from medusa import notifiers notifiers.synoindex_notifier.addFolder(path) except OSError: return False @@ -208,7 +195,7 @@ def search_indexer_for_show_id(show_name, indexer=None, indexer_id=None, ui=None :param ui: Custom UI for indexer use :return: """ - from .indexers.indexer_api import indexerApi + from medusa.indexers.indexer_api import indexerApi show_names = [re.sub('[. -]', ' ', show_name)] # Query Indexers for each search term and build the list of results @@ -221,7 +208,8 @@ def search_indexer_for_show_id(show_name, indexer=None, indexer_id=None, ui=None t = indexer_api.indexer(**indexer_api_params) for name in show_names: - logger.debug(u'Trying to find {name} on {api_name}', name=name, api_name=indexer_api.name) + log.debug(u'Trying to find {name} on {indexer}', + {'name': name, 'indexer': indexer_api.name}) try: search = t[indexer_id] if indexer_id else t[name] @@ -241,7 +229,8 @@ def search_indexer_for_show_id(show_name, indexer=None, indexer_id=None, ui=None if not (seriesname and series_id): continue show = Show.find(app.showList, int(series_id)) - # Check if we can find the show in our list (if not, it's not the right show) + # Check if we can find the show in our list + # if not, it's not the right show if (indexer_id is None) and (show is not None) and (show.indexerid == int(series_id)): return seriesname, i, int(series_id) elif (indexer_id is not None) and (int(indexer_id) == int(series_id)): @@ -295,12 +284,12 @@ def copy_file(src_file, dest_file): try: shutil.copyfile(src_file, dest_file) except (SpecialFileError, Error) as error: - logger.warning(u'{error}', error=error) + log.warning(error) except OSError as error: if 'No space left on device' in error: - logger.warning(u'{error}', error=error) + log.warning(error) else: - logger.error(u'{error}', error=error) + log.error(error) else: try: shutil.copymode(src_file, dest_file) @@ -352,13 +341,26 @@ def hardlink_file(src_file, dest_file): try: link(src_file, dest_file) fix_set_group_id(dest_file) - except OSError as e: - if hasattr(e, 'errno') and e.errno == 17: # File exists. Don't fallback to copy - logger.warning(u'Failed to create hardlink of {source} at {dest}. Error: {error!r}', - source=src_file, dest=dest_file, error=e) + except OSError as msg: + if hasattr(msg, 'errno') and msg.errno == 17: + # File exists. Don't fallback to copy + log.warning( + u'Failed to create hardlink of {source} at {destination}.' + u' Error: {error!r}', { + 'source': src_file, + 'destination': dest_file, + 'error': msg + } + ) else: - logger.warning(u'Failed to create hardlink of {source} at {dest}. Error: {error!r}. Copying instead', - source=src_file, dest=dest_file, error=e) + log.warning( + u'Failed to create hardlink of {source} at {destination}.' + u' Error: {error!r}. Copying instead', { + 'source': src_file, + 'dest': dest_file, + 'error': msg, + } + ) copy_file(src_file, dest_file) @@ -392,13 +394,26 @@ def move_and_symlink_file(src_file, dest_file): shutil.move(src_file, dest_file) fix_set_group_id(dest_file) symlink(dest_file, src_file) - except OSError as e: - if hasattr(e, 'errno') and e.errno == 17: # File exists. Don't fallback to copy - logger.warning(u'Failed to create symlink of {source} at {dest}. Error: {error!r}', - source=src_file, dest=dest_file, error=e) + except OSError as msg: + if hasattr(msg, 'errno') and msg.errno == 17: + # File exists. Don't fallback to copy + log.warning( + u'Failed to create symlink of {source} at {destination}.' + u' Error: {error!r}', { + 'source': src_file, + 'dest': dest_file, + 'error': msg, + } + ) else: - logger.warning(u'Failed to create symlink of {source} at {dest}. Error: {error!r}. Copying instead', - source=src_file, dest=dest_file, error=e) + log.warning( + u'Failed to create symlink of {source} at {destination}.' + u' Error: {error!r}. Copying instead', { + 'source': src_file, + 'dest': dest_file, + 'error': msg, + } + ) copy_file(src_file, dest_file) @@ -408,16 +423,18 @@ def make_dirs(path): :param path: :rtype path: str """ - logger.debug(u'Checking if the path {path} already exists', path=path) + log.debug(u'Checking if the path {path} already exists', {'path': path}) if not os.path.isdir(path): # Windows, create all missing folders if os.name == 'nt' or os.name == 'ce': try: - logger.debug(u"Folder {path} didn't exist, creating it", path=path) + log.debug(u"Folder {path} didn't exist, creating it", + {'path': path}) os.makedirs(path) - except (OSError, IOError) as e: - logger.error(u"Failed creating {path} : {error!r}", path=path, error=e) + except (OSError, IOError) as msg: + log.error(u"Failed creating {path} : {error!r}", + {'path': path, 'error': msg}) return False # not Windows, create all missing folders and set permissions @@ -434,16 +451,19 @@ def make_dirs(path): continue try: - logger.debug(u"Folder {path} didn't exist, creating it", path=sofar) + log.debug(u"Folder {path} didn't exist, creating it", + {'path': sofar}) os.mkdir(sofar) - # use normpath to remove end separator, otherwise checks permissions against itself + # use normpath to remove end separator, + # otherwise checks permissions against itself chmod_as_parent(os.path.normpath(sofar)) # do the library update for synoindex - from . import notifiers + from medusa import notifiers notifiers.synoindex_notifier.addFolder(sofar) - except (OSError, IOError) as e: - logger.error(u'Failed creating {path} : {error!r}', path=sofar, error=e) + except (OSError, IOError) as msg: + log.error(u'Failed creating {path} : {error!r}', + {'path': sofar, 'error': msg}) return False return True @@ -461,7 +481,7 @@ def rename_ep_file(cur_path, new_path, old_path_length=0): :param old_path_length: The length of media file path (old name) WITHOUT THE EXTENSION :type old_path_length: int """ - from . import subtitles + from medusa import subtitles if old_path_length == 0 or old_path_length > len(cur_path): # approach from the right cur_file_name, cur_file_ext = os.path.splitext(cur_path) @@ -489,10 +509,12 @@ def rename_ep_file(cur_path, new_path, old_path_length=0): # move the file try: - logger.info(u"Renaming file from '{old}' to '{new}'", old=cur_path, new=new_path) + log.info(u"Renaming file from '{old}' to '{new}'", + {'old': cur_path, 'new': new_path}) shutil.move(cur_path, new_path) - except (OSError, IOError) as e: - logger.error(u"Failed renaming '{old}' to '{new}' : {error!r}", old=cur_path, new=new_path, error=e) + except (OSError, IOError) as msg: + log.error(u"Failed renaming '{old}' to '{new}' : {error!r}", + {'old': cur_path, 'new': new_path, 'error': msg}) return False # clean up any old folders that are empty @@ -512,7 +534,8 @@ def delete_empty_folders(top_dir, keep_dir=None): if not top_dir or not os.path.isdir(top_dir): return - logger.info(u'Trying to clean any empty folder under {path}'.format(path=top_dir)) + log.info(u'Trying to clean any empty folder under {path}', + {'path': top_dir}) for directory in os.walk(top_dir, topdown=False): dirpath = directory[0] @@ -522,16 +545,19 @@ def delete_empty_folders(top_dir, keep_dir=None): if dirpath != keep_dir and not os.listdir(dirpath): try: - logger.info(u'Deleting empty folder: {folder}'.format(folder=dirpath)) + log.info(u'Deleting empty folder: {folder}', + {'folder': dirpath}) os.rmdir(dirpath) # Do the library update for synoindex - from . import notifiers + from medusa import notifiers notifiers.synoindex_notifier.deleteFolder(dirpath) - except OSError as e: - logger.warning(u'Unable to delete {folder}. Error: {error!r}'.format(folder=dirpath, error=e)) + except OSError as msg: + log.warning(u'Unable to delete {folder}. Error: {error!r}', + {'folder': dirpath, 'error': msg}) else: - logger.debug(u'Not deleting {folder}. The folder is not empty or should be kept.'.format(folder=dirpath)) + log.debug(u'Not deleting {folder}. The folder is not empty' + u' or should be kept.', {'folder': dirpath}) def file_bit_filter(mode): @@ -560,7 +586,8 @@ def chmod_as_parent(child_path): parent_path = os.path.dirname(child_path) if not parent_path: - logger.debug(u'No parent path provided in {path}, unable to get permissions from it', path=child_path) + log.debug(u'No parent path provided in {path}, unable to get' + u' permissions from it', {'path': child_path}) return child_path = os.path.join(parent_path, os.path.basename(child_path)) @@ -586,15 +613,23 @@ def chmod_as_parent(child_path): user_id = os.geteuid() if user_id not in (0, child_path_owner): - logger.debug(u'Not running as root or owner of {path}, not trying to set permissions', path=child_path) + log.debug(u'Not running as root or owner of {path}, not trying to set' + u' permissions', {'path': child_path}) return try: os.chmod(child_path, child_mode) - logger.debug(u'Setting permissions for {path} to {mode} as parent directory has {parent_mode}', - path=child_path, mode=child_mode, parent_mode=parent_mode) + log.debug( + u'Setting permissions for {path} to {mode} as parent directory' + u' has {parent_mode}', { + 'path': child_path, + 'mode': child_mode, + 'parent_mode': parent_mode + } + ) except OSError: - logger.debug(u'Failed to set permission for {path} to {mode}', path=child_path, mode=child_mode) + log.debug(u'Failed to set permission for {path} to {mode}', + {'path': child_path, 'mode': child_mode}) def fix_set_group_id(child_path): @@ -626,16 +661,19 @@ def fix_set_group_id(child_path): user_id = os.geteuid() if user_id not in (0, child_path_owner): - logger.debug(u'Not running as root or owner of {path}, not trying to set the set-group-ID', path=child_path) + log.debug(u'Not running as root or owner of {path}, not trying to' + u' set the set-group-ID', {'path': child_path}) return try: os.chown(child_path, -1, parent_gid) - logger.debug(u'Respecting the set-group-ID bit on the parent directory for {path}', path=child_path) + log.debug(u'Respecting the set-group-ID bit on the parent' + u' directory for {path}', {'path': child_path}) except OSError: - logger.error( - u'Failed to respect the set-group-ID bit on the parent directory for {path} (setting group ID {gid})', - path=child_path, gid=parent_gid) + log.error( + u'Failed to respect the set-group-ID bit on the parent' + u' directory for {path} (setting group ID {gid})', + {'path': child_path, 'gid': parent_gid}) def is_anime_in_show_list(): @@ -662,7 +700,7 @@ def get_absolute_number_from_season_and_episode(show, season, episode): :param episode: Episode number :return: The absolute number """ - from . import db + from medusa import db absolute_number = None if season and episode: @@ -672,11 +710,16 @@ def get_absolute_number_from_season_and_episode(show, season, episode): if len(sql_results) == 1: absolute_number = int(sql_results[0][b'absolute_number']) - logger.debug(u'Found absolute number {absolute} for show {show} {ep}', - absolute=absolute_number, show=show.name, ep=episode_num(season, episode)) + log.debug( + u'Found absolute number {absolute} for show {show} {ep}', { + 'absolute': absolute_number, + 'show': show.name, + 'ep': episode_num(season, episode), + } + ) else: - logger.debug(u'No entries for absolute number for show {show} {ep}', - show=show.name, ep=episode_num(season, episode)) + log.debug(u'No entries for absolute number for show {show} {ep}', + {'show': show.name, 'ep': episode_num(season, episode)}) return absolute_number @@ -693,7 +736,8 @@ def get_all_episodes_from_absolute_number(show, absolute_numbers, indexer_id=Non ep = show.get_episode(None, None, absolute_number=absolute_number) if ep: episodes.append(ep.episode) - # this will always take the last found season so eps that cross the season border are not handeled well + # this will always take the last found season so eps that cross + # the season border are not handled well season = ep.season return season, episodes @@ -739,7 +783,8 @@ def create_https_certificates(ssl_cert, ssl_key): from OpenSSL import crypto from certgen import createKeyPair, createCertRequest, createCertificate, TYPE_RSA, serial except Exception: - logger.warning(u'pyopenssl module missing, please install for https access') + log.warning(u'pyopenssl module missing, please install for' + u' https access') return False # Create the CA Certificate @@ -757,7 +802,7 @@ def create_https_certificates(ssl_cert, ssl_key): io.open(ssl_key, 'wb').write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey)) io.open(ssl_cert, 'wb').write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert)) except Exception: - logger.error(u'Error creating SSL key and certificate') + log.error(u'Error creating SSL key and certificate') return False return True @@ -779,24 +824,27 @@ def backup_versioned_file(old_file, version): while not os.path.isfile(new_file): if not os.path.isfile(old_file): - logger.debug(u"Not creating backup, {old_file} doesn't exist", old_file=old_file) + log.debug(u"Not creating backup, {old_file} doesn't exist", + {'old_file': old_file}) break try: - logger.debug(u'Trying to back up {old} to new', old=old_file, new=new_file) + log.debug(u'Trying to back up {old} to new', + {'old': old_file, 'new': new_file}) shutil.copy(old_file, new_file) - logger.debug(u"Backup done") + log.debug(u"Backup done") break - except Exception as e: - logger.warning(u'Error while trying to back up {old} to {new} : {error!r}', - old=old_file, new=new_file, error=e) + except Exception as msg: + log.warning(u'Error while trying to back up {old} to {new}:' + u' {error!r}', + {'old': old_file, 'new': new_file, 'error': msg}) num_tries += 1 time.sleep(1) - logger.debug(u'Trying again.') + log.debug(u'Trying again.') if num_tries >= 10: - logger.error(u'Unable to back up {old} to {new} please do it manually.', - old=old_file, new=new_file) + log.error(u'Unable to back up {old} to {new}, please do it' + u' manually.', {'old': old_file, 'new': new_file}) return False return True @@ -821,36 +869,43 @@ def restore_versioned_file(backup_file, version): restore_file = backup_file if not os.path.isfile(new_file): - logger.debug(u"Not restoring, %s doesn't exist" % new_file) + log.debug(u"Not restoring, {file} doesn't exist", {'file': new_file}) return False try: - logger.debug(u"Trying to backup %s to %s.r%s before restoring backup" % (new_file, new_file, version)) + log.debug(u'Trying to backup {file} to {file}.r{version} before ' + u'restoring backup', {'file': new_file, 'version': version}) shutil.move(new_file, new_file + '.' + 'r' + str(version)) except Exception as e: - logger.warning(u"Error while trying to backup DB file %s before proceeding with restore: %r" % - (restore_file, ex(e))) + log.warning(u'Error while trying to backup DB file {name} before' + u' proceeding with restore: {error!r}', + {'name': restore_file, 'error': ex(e)}) return False while not os.path.isfile(new_file): if not os.path.isfile(restore_file): - logger.debug(u"Not restoring, %s doesn't exist" % restore_file) + log.debug(u'Not restoring, {file} does not exist', + {'file': restore_file}) break try: - logger.debug(u"Trying to restore file %s to %s" % (restore_file, new_file)) + log.debug(u'Trying to restore file {old} to {new}', + {'old': restore_file, 'new': new_file}) shutil.copy(restore_file, new_file) - logger.debug(u"Restore done") + log.debug(u"Restore done") break except Exception as e: - logger.warning(u"Error while trying to restore file %s. Error: %r" % (restore_file, ex(e))) + log.warning(u'Error while trying to restore file {name}.' + u' Error: {msg!r}', + {'name': restore_file, 'msg': ex(e)}) num_tries += 1 time.sleep(1) - logger.debug(u"Trying again. Attempt #: %s" % num_tries) + log.debug(u'Trying again. Attempt #: {0}', num_tries) if num_tries >= 10: - logger.warning(u"Unable to restore file %s to %s" % (restore_file, new_file)) + log.warning(u'Unable to restore file {old} to {new}', + {'old': restore_file, 'new': new_file}) return False return True @@ -937,7 +992,7 @@ def full_sanitize_scene_name(name): def get_show(name, try_indexers=False): - from . import classes, name_cache, scene_exceptions + from medusa import classes, name_cache, scene_exceptions if not app.showList: return @@ -968,8 +1023,9 @@ def get_show(name, try_indexers=False): # add show to cache if show and not from_cache: name_cache.addNameToCache(name, show.indexerid) - except Exception as e: - logger.debug(u"Error when attempting to find show: %s. Error: %r " % (name, repr(e))) + except Exception as msg: + log.debug(u'Error when attempting to find show: {name}.' + u' Error: {msg!r}', {'name': name, 'msg': msg}) return show @@ -1010,8 +1066,8 @@ def real_path(path): def validate_show(show, season=None, episode=None): """Reindex show from originating indexer, and return indexer information for the passed episode.""" - from .indexers.indexer_api import indexerApi - from .indexers.indexer_exceptions import IndexerEpisodeNotFound, IndexerSeasonNotFound + from medusa.indexers.indexer_api import indexerApi + from medusa.indexers.indexer_exceptions import IndexerEpisodeNotFound, IndexerSeasonNotFound indexer_lang = show.lang try: @@ -1034,21 +1090,22 @@ def validate_show(show, season=None, episode=None): def set_up_anidb_connection(): """Connect to anidb.""" if not app.USE_ANIDB: - logger.debug(u"Usage of anidb disabled. Skiping") + log.debug(u'Usage of anidb disabled. Skipping') return False if not app.ANIDB_USERNAME and not app.ANIDB_PASSWORD: - logger.debug(u"anidb username and/or password are not set. Aborting anidb lookup.") + log.debug(u'anidb username and/or password are not set.' + u' Aborting anidb lookup.') return False if not app.ADBA_CONNECTION: def anidb_logger(msg): - return logger.debug(u"anidb: %s " % msg) + return log.debug(u'anidb: {0}', msg) try: app.ADBA_CONNECTION = adba.Connection(keepAlive=True, log=anidb_logger) - except Exception as e: - logger.warning(u"anidb exception msg: %r " % repr(e)) + except Exception as error: + log.warning(u'anidb exception msg: {0!r}', error) return False try: @@ -1056,8 +1113,8 @@ def anidb_logger(msg): app.ADBA_CONNECTION.auth(app.ANIDB_USERNAME, app.ANIDB_PASSWORD) else: return True - except Exception as e: - logger.warning(u"anidb exception msg: %r " % repr(e)) + except Exception as error: + log.warning(u'anidb exception msg: {0!r}', error) return False return app.ADBA_CONNECTION.authed() @@ -1077,8 +1134,8 @@ def backup_config_zip(file_list, archive, arcname=None): a.write(f, os.path.relpath(f, arcname)) a.close() return True - except Exception as e: - logger.error(u'Zip creation error: {error!r} ', error=e) + except Exception as error: + log.error(u'Zip creation error: {0!r} ', error) return False @@ -1104,8 +1161,8 @@ def path_leaf(path): zip_file.extract(member, target_dir) zip_file.close() return True - except Exception as e: - logger.error(u'Zip extraction error: {error!r}', error=e) + except Exception as error: + log.error(u'Zip extraction error: {0!r}', error) shutil.rmtree(target_dir) return False @@ -1151,7 +1208,7 @@ def request_defaults(kwargs): # request session proxies if app.PROXY_SETTING: - logger.debug(u"Using global proxy: " + app.PROXY_SETTING) + log.debug(u'Using global proxy: {0}', app.PROXY_SETTING) scheme, address = splittype(app.PROXY_SETTING) address = app.PROXY_SETTING if scheme else 'http://' + app.PROXY_SETTING proxies = { @@ -1165,7 +1222,7 @@ def request_defaults(kwargs): def prepare_cf_req(session, request): - logger.debug(u'CloudFlare protection detected, trying to bypass it.') + log.debug(u'CloudFlare protection detected, trying to bypass it.') try: tokens, user_agent = cfscrape.get_tokens(request.url) @@ -1177,10 +1234,11 @@ def prepare_cf_req(session, request): request.headers.update({u'User-Agent': user_agent}) else: request.headers = {u'User-Agent': user_agent} - logger.debug(u'CloudFlare protection successfully bypassed.') + log.debug(u'CloudFlare protection successfully bypassed.') return session.prepare_request(request) except (ValueError, AttributeError) as error: - logger.warning(u"Couldn't bypass CloudFlare's anti-bot protection. Error: {err_msg}", err_msg=error) + log.warning(u'Could not bypass CloudFlare anti-bot protection.' + u' Error: {0}', error) def get_url(url, post_data=None, params=None, headers=None, timeout=30, session=None, **kwargs): @@ -1207,28 +1265,38 @@ def get_url(url, post_data=None, params=None, headers=None, timeout=30, session= if cf_resp.ok: return cf_resp - logger.debug(u'Requested url {url} returned status code {status}: {desc}'.format - (url=resp.url, status=resp.status_code, desc=http_code_description(resp.status_code))) + log.debug( + u'Requested url {url} returned status code {status}:' + u' {description}', { + 'url': resp.url, + 'status': resp.status_code, + 'description': http_code_description(resp.status_code), + } + ) if response_type and response_type != u'response': return None - except requests.exceptions.RequestException as e: - logger.debug(u'Error requesting url {url}. Error: {err_msg}', url=url, err_msg=e) + except (requests.exceptions.RequestException, socket.gaierror) as error: + log.debug(u'Error requesting url {url}. Error: {msg}', + {'url': url, 'msg': error}) return None - except Exception as e: - if u'ECONNRESET' in e or (hasattr(e, u'errno') and e.errno == errno.ECONNRESET): - logger.warning(u'Connection reset by peer accessing url {url}. Error: {err_msg}'.format(url=url, err_msg=e)) + except Exception as error: + if u'ECONNRESET' in error or (hasattr(error, u'errno') and error.errno == errno.ECONNRESET): + log.warning(u'Connection reset by peer accessing url {url}.' + u' Error: {msg}', {'url': url, 'msg': error}) else: - logger.info(u'Unknown exception in url {url}. Error: {err_msg}', url=url, err_msg=e) - logger.debug(traceback.format_exc()) + log.info(u'Unknown exception in url {url}.' + u' Error: {msg}', {'url': url, 'msg': error}) + log.debug(traceback.format_exc()) return None if not response_type or response_type == u'response': return resp else: - warnings.warn(u'Returning {0} instead of {1} will be deprecated in the near future!'.format - (response_type, 'response'), PendingDeprecationWarning) + warnings.warn(u'Returning {0} instead of {1} will be deprecated in the' + u' near future!'.format(response_type, 'response'), + PendingDeprecationWarning) if response_type == u'json': try: return resp.json() @@ -1255,8 +1323,14 @@ def download_file(url, filename, session=None, headers=None, **kwargs): hooks=hooks, proxies=proxies)) as resp: if not resp.ok: - logger.debug(u'Requested download URL {url} returned status code is {code}: {description}'.format - (url=url, code=resp.status_code, description=http_code_description(resp.status_code))) + log.debug( + u'Requested download URL {url} returned' + u' status code {code}: {description}', { + 'url': url, + 'code': resp.status_code, + 'description': http_code_description(resp.status_code), + } + ) return False try: @@ -1267,25 +1341,33 @@ def download_file(url, filename, session=None, headers=None, **kwargs): fp.flush() chmod_as_parent(filename) - except OSError as e: + except OSError as msg: remove_file_failed(filename) - logger.warning(u'Problem setting permissions or writing file to: {location}. Error: {error}'.format - (location=filename, error=e)) + log.warning( + u'Problem setting permissions or writing file' + u' to: {location}. Error: {msg}', { + 'location': filename, + 'msg': msg, + } + ) return False - except requests.exceptions.RequestException as e: + except requests.exceptions.RequestException as msg: remove_file_failed(filename) - logger.warning(u'Error requesting download URL: {url}. Error: {error}'.format(url=url, error=e)) + log.warning(u'Error requesting download URL: {url}. Error: {error}', + {'url': url, 'error': msg}) return False - except EnvironmentError as e: + except EnvironmentError as msg: remove_file_failed(filename) - logger.warning(u'Unable to save the file: {name}. Error: {error}'.format(name=filename, error=e)) + log.warning(u'Unable to save the file: {name}. Error: {error}', + {'name': filename, 'error': msg}) return False - except Exception as e: + except Exception as msg: remove_file_failed(filename) - logger.error(u'Unknown exception while downloading file {name} from URL: {url}. Error: {error}'.format - (name=filename, url=url, error=e)) - logger.debug(traceback.format_exc()) + log.error(u'Unknown exception while downloading file {name}' + u' from URL: {url}. Error: {error}', + {'name': filename, 'url': url, 'error': msg}) + log.debug(traceback.format_exc()) return False return True @@ -1297,19 +1379,24 @@ def handle_requests_exception(requests_exception): raise requests_exception except requests.exceptions.SSLError as error: if ssl.OPENSSL_VERSION_INFO < (1, 0, 1, 5): - logger.info("SSL Error requesting url: '{0}' You have {1}, try upgrading OpenSSL to 1.0.1e+".format( - error.request.url, ssl.OPENSSL_VERSION)) + log.info( + u'SSL Error requesting: {url} You have {version},' + u' try upgrading OpenSSL to 1.0.1e+', + {'url': error.request.url, 'version': ssl.OPENSSL_VERSION}) if app.SSL_VERIFY: - logger.info( - "SSL Error requesting url: '{0}'. Disable Cert Verification on the advanced tab of /config/general") - logger.debug(default.format(error)) - logger.debug(traceback.format_exc()) + log.info( + u'SSL Error requesting url: {url}. Disable Cert Verification' + u' on the advanced tab of /config/general', + {'url': error.request.url} + ) + log.debug(default.format(error)) + log.debug(traceback.format_exc()) except requests.exceptions.RequestException as error: - logger.info(default.format(error)) + log.info(default.format(error)) except Exception as error: - logger.error(default.format(error)) - logger.debug(traceback.format_exc()) + log.error(default.format(error)) + log.debug(traceback.format_exc()) def get_size(start_path='.'): @@ -1327,15 +1414,16 @@ def get_size(start_path='.'): fp = os.path.join(dirpath, f) try: total_size += os.path.getsize(fp) - except OSError as e: - logger.error(u"Unable to get size for file %s Error: %r" % (fp, ex(e))) - logger.debug(traceback.format_exc()) + except OSError as error: + log.error(u'Unable to get size for file {name} Error: {msg!r}', + {'name': fp, 'msg': ex(error)}) + log.debug(traceback.format_exc()) return total_size def generate_api_key(): """Return a new randomized API_KEY.""" - logger.info(u"Generating New API key") + log.info(u'Generating New API key') secure_hash = hashlib.sha512(str(time.time())) secure_hash.update(str(random.SystemRandom().getrandbits(4096))) return secure_hash.hexdigest()[:32] @@ -1363,25 +1451,28 @@ def verify_freespace(src, dest, oldfile=None): if not isinstance(oldfile, list): oldfile = [oldfile] - logger.debug(u"Trying to determine free space on destination drive") + log.debug(u'Trying to determine free space on destination drive') if not os.path.isfile(src): - logger.warning("A path to a file is required for the source. {0} is not a file.".format(src)) + log.warning(u'A path to a file is required for the source.' + u' {source} is not a file.', {'source': src}) return True try: diskfree = get_disk_space_usage(dest, None) if not diskfree: - logger.warning(u"Unable to determine the free space on your OS.") + log.warning(u'Unable to determine the free space on your OS.') return True except Exception: - logger.warning(u"Unable to determine free space, so I will assume there is enough.") + log.warning(u'Unable to determine free space, assuming there is ' + u'enough.') return True try: neededspace = os.path.getsize(src) - except OSError as e: - logger.warning(u'Unable to determine needed space. Aborting. Error: {error_msg}'.format(error_msg=e)) + except OSError as error: + log.warning(u'Unable to determine needed space. Aborting.' + u' Error: {msg}', {'msg': error}) return False if oldfile: @@ -1392,8 +1483,13 @@ def verify_freespace(src, dest, oldfile=None): if diskfree > neededspace: return True else: - logger.warning(u"Not enough free space. Needed: {0} bytes ({1}), found: {2} bytes ({3})".format - (neededspace, pretty_file_size(neededspace), diskfree, pretty_file_size(diskfree))) + log.warning( + u'Not enough free space.' + u' Needed: {0} bytes ({1}),' + u' found: {2} bytes ({3})', + neededspace, pretty_file_size(neededspace), + diskfree, pretty_file_size(diskfree) + ) return False @@ -1513,7 +1609,8 @@ def get_tvdb_from_id(indexer_id, indexer): tvdb_id = data['externals']['thetvdb'] return tvdb_id - # If indexer is IMDB and we've still not returned a tvdb_id, let's try to use tvmaze's api, to get the tvdbid + # If indexer is IMDB and we've still not returned a tvdb_id, + # let's try to use tvmaze's api, to get the tvdbid if indexer == 'IMDB': url = 'http://api.tvmaze.com/lookup/shows?imdb={indexer_id}'.format(indexer_id=indexer_id) data = get_url(url, session=session, returns='json') @@ -1526,25 +1623,30 @@ def get_tvdb_from_id(indexer_id, indexer): def get_showname_from_indexer(indexer, indexer_id, lang='en'): - from .indexers.indexer_api import indexerApi + from medusa.indexers.indexer_api import indexerApi indexer_api_params = indexerApi(indexer).api_params.copy() if lang: indexer_api_params['language'] = lang - logger.info(u"{indexer_name}:{params!r}", indexer_name=indexerApi(indexer).name, params=indexer_api_params) + log.info(u'{0}: {1!r}', indexerApi(indexer).name, indexer_api_params) + s = None try: - t = indexerApi(indexer).indexer(**indexer_api_params) - s = t[int(indexer_id)] - except IndexerException as e: - logger.warning("Can't get showname for indexer {indexer_name} and indexer_id {indexer_id} in language {lang} " - "with cause: {cause}", - indexer_name=indexerApi(indexer).name, indexer_id=indexer_id, lang=lang, cause=e) - - if hasattr(s, 'data'): - return s.data.get('seriesname') - - return None + indexer_api = indexerApi(indexer).indexer(**indexer_api_params) + s = indexer_api[int(indexer_id)] + except IndexerException as msg: + log.warning( + 'Show name unavailable for {name} id {id} in {language}:' + ' {reason}', { + 'name': indexerApi(indexer).name, + 'id': indexer_id, + 'language': lang, + 'reason': msg, + } + ) + + data = getattr(s, 'data', {}) + return data.get('seriesname') # http://stackoverflow.com/a/20380514 @@ -1588,8 +1690,9 @@ def remove_folder(folder_path, level=logging.WARNING): if os.path.exists(folder_path): try: shutil.rmtree(folder_path) - except OSError as e: - logger.log(level, u'Unable to remove directory {folder}: {cause!r}', folder=folder_path, cause=e) + except OSError as error: + log.log(level, u'Unable to remove directory {folder}: {reason!r}', + {'folder': folder_path, 'reason': error}) def is_ip_private(ip): @@ -1679,7 +1782,7 @@ def get_broken_providers(): # Check if last broken providers update happened less than 60 minutes ago if app.BROKEN_PROVIDERS_UPDATE and isinstance(app.BROKEN_PROVIDERS_UPDATE, datetime.datetime) and \ (datetime.datetime.now() - app.BROKEN_PROVIDERS_UPDATE).seconds < 3600: - logger.debug('Broken providers already updated in the last hour') + log.debug('Broken providers already updated in the last hour') return # Update last broken providers update-timestamp to avoid updating again in less than 60 minutes @@ -1688,12 +1791,13 @@ def get_broken_providers(): url = '{base_url}/providers/broken_providers.json'.format(base_url=app.BASE_PYMEDUSA_URL) response = get_url(url, session=make_session(), returns='json') if response is None: - logger.warning('Unable to update the list with broken providers. ' - 'This list is used to disable broken providers. ' - 'You may encounter errors in the logfiles if you are using a broken provider.') + log.warning('Unable to update the list with broken providers.' + ' This list is used to disable broken providers.' + ' You may encounter errors in the log files if you are' + ' using a broken provider.') return [] - logger.info('Broken providers found: {0}'.format(response)) + log.info('Broken providers found: {0}', response) return ','.join(response) @@ -1758,3 +1862,13 @@ def title_to_imdb(title, start_year, imdb_api=None): # Return the most relevant result (can be erroneous) if title_matches: return title_matches[0] + + +def get_title_without_year(title, title_year): + """Get title without year.""" + if not title_year: + return title + year = ' ({year})'.format(year=title_year) + if year in title: + title = title.replace(year, '') + return title diff --git a/medusa/helper/externals.py b/medusa/helpers/externals.py similarity index 69% rename from medusa/helper/externals.py rename to medusa/helpers/externals.py index 427de7c636..d026e0275c 100644 --- a/medusa/helper/externals.py +++ b/medusa/helpers/externals.py @@ -1,32 +1,19 @@ # coding=utf-8 -# Author: p0psicles -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . -# pylint:disable=too-many-lines + """Externals helper functions.""" import logging -from traktor import TokenExpiredException, TraktApi, TraktException -from .. import app -from ..indexers.indexer_api import indexerApi -from ..indexers.indexer_config import indexerConfig -from ..indexers.indexer_exceptions import IndexerException, IndexerShowAllreadyInLibrary, IndexerUnavailable +from medusa import app +from medusa.indexers.indexer_api import indexerApi +from medusa.indexers.indexer_config import indexerConfig +from medusa.indexers.indexer_exceptions import IndexerException, IndexerShowAllreadyInLibrary, IndexerUnavailable +from medusa.logger.adapters.style import BraceAdapter + +from traktor import AuthException, TokenExpiredException, TraktApi, TraktException -logger = logging.getLogger(__name__) +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) def get_trakt_externals(externals): @@ -34,19 +21,20 @@ def get_trakt_externals(externals): :param externals: Dictionary of key/value pairs with external id's. """ - def trakt_request(api, url): + def trakt_request(api, trakt_url): """Perform the request and handle possible token refresh.""" try: - result = api.request(url) or [] + trakt_result = api.request(trakt_url) or [] if api.access_token_refreshed: app.TRAKT_ACCESS_TOKEN = api.access_token app.TRAKT_REFRESH_TOKEN = api.refresh_token app.instance.save_config() - except (TokenExpiredException, TraktException) as e: - logger.info(u'Could not use Trakt to enrich with externals. Cause: {cause}', cause=e) + except (AuthException, TraktException, TokenExpiredException) as e: + log.info(u'Could not use Trakt to enrich with externals: {0}', + e.message or e) return [] else: - return result + return trakt_result trakt_settings = {'trakt_api_key': app.TRAKT_API_KEY, 'trakt_api_secret': app.TRAKT_API_SECRET, @@ -63,8 +51,12 @@ def trakt_request(api, url): continue url = id_lookup.format(external_key=trakt_mapping[external_key], external_value=externals[external_key]) - logger.debug(u"Looking for externals using Trakt and {indexer_name}'s {id}", - indexer_name=trakt_mapping[external_key], id=externals[external_key]) + log.debug( + u'Looking for externals using Trakt and {indexer} id {number}', { + 'indexer': trakt_mapping[external_key], + 'number': externals[external_key], + } + ) result = trakt_request(trakt_api, url) if result and len(result) and result[0].get('show') and result[0]['show'].get('ids'): ids = {trakt_mapping_rev[k]: v for k, v in result[0]['show'].get('ids').items() @@ -105,17 +97,21 @@ def get_externals(show=None, indexer=None, indexed_show=None): except IndexerUnavailable: continue if hasattr(t, 'get_id_by_external'): - logger.debug(u"Trying other indexers {indexer_name} get_id_by_external", - indexer_name=indexerApi(other_indexer).name) - # Call the get_id_by_external and pass all the externals we have, except for the indexers own. + log.debug(u"Trying other indexer: {indexer} get_id_by_external", + {'indexer': indexerApi(other_indexer).name}) + # Call the get_id_by_external and pass all the externals we have, + # except for the indexers own. try: new_show_externals.update(t.get_id_by_external(**new_show_externals)) - except IndexerException as e: - logger.warning(u'Error getting external ids for other indexer {indexer_name} with cause {cause}', - indexer_name=indexerApi(show.indexer).name, cause=e) + except IndexerException as error: + log.warning( + u'Error getting external ids for other' + u' indexer {name}: {reason}', + {'name': indexerApi(show.indexer).name, 'reason': error.message}) # Try to update with the Trakt externals. - new_show_externals.update(get_trakt_externals(new_show_externals)) + if app.USE_TRAKT: + new_show_externals.update(get_trakt_externals(new_show_externals)) return new_show_externals @@ -138,10 +134,11 @@ def check_existing_shows(indexed_show, indexer): # Or one of it's externals. for show in app.showList: - # Check if the new shows indexer id matches the external for the show in library + # Check if the new shows indexer id matches the external for the show + # in library if show.externals.get(mappings[indexer]) and indexed_show['id'] == show.externals.get(mappings[indexer]): - logger.debug(u"The Show {show_name} was already added. Found it because the show's added id ({id})," - u" is already known it the db", show_name=show.name, id=indexed_show['id']) + log.debug(u'Show already in database. [{id}] {name}', + {'name': show.name, 'id': indexed_show['id']}) raise IndexerShowAllreadyInLibrary('The show {0} has already been added by the indexer {1}. ' 'Please remove the show, before you can add it through {2}.' .format(show.name, indexerApi(show.indexer).name, @@ -151,15 +148,20 @@ def check_existing_shows(indexed_show, indexer): if show.indexer not in other_indexers: continue - # Check if one of the new shows externals matches one of the externals for the show in library. + # Check if one of the new shows externals matches one of the + # externals for the show in library. if not new_show_externals.get(new_show_external_key) or not show.externals.get(new_show_external_key): continue if new_show_externals.get(new_show_external_key) == show.externals.get(new_show_external_key): - logger.debug(u"The Show {show_name} was already added. Found it because one of show's externals " - u'({external_id}) matches one of an existing shows external with value ({id})', - show_name=show.name, external_id=new_show_external_key, - id=show.externals.get(new_show_external_key)) + log.debug( + u'Show already in database under external ID ({existing})' + u' for ({id}) {name}', { + 'name': show.name, + 'id': show.externals.get(new_show_external_key), + 'existing': new_show_external_key, + } + ) raise IndexerShowAllreadyInLibrary('The show {0} has already been added by the indexer {1}. ' 'Please remove the show, before you can add it through {2}.' .format(show.name, indexerApi(show.indexer).name, diff --git a/medusa/helpers/quality.py b/medusa/helpers/quality.py new file mode 100644 index 0000000000..aac3fe3225 --- /dev/null +++ b/medusa/helpers/quality.py @@ -0,0 +1,18 @@ +# coding=utf-8 + +from medusa.common import Quality, qualityPresetStrings + + +def get_quality_string(quality): + """ + :param quality: The quality to convert into a string + :return: The string representation of the provided quality + """ + + if quality in qualityPresetStrings: + return qualityPresetStrings[quality] + + if quality in Quality.qualityStrings: + return Quality.qualityStrings[quality] + + return 'Custom' diff --git a/medusa/history.py b/medusa/history.py index 8a6a8a0534..0981ab2635 100644 --- a/medusa/history.py +++ b/medusa/history.py @@ -26,7 +26,7 @@ def _logHistoryItem(action, showid, season, episode, quality, resource, - provider, version=-1, proper_tags='', manually_searched=False, info_hash=None): + provider, version=-1, proper_tags='', manually_searched=False, info_hash=None, size=-1): """ Insert a history item in DB @@ -46,10 +46,10 @@ def _logHistoryItem(action, showid, season, episode, quality, resource, main_db_con.action( "INSERT INTO history " "(action, date, showid, season, episode, quality, " - "resource, provider, version, proper_tags, manually_searched, info_hash) " - "VALUES (?,?,?,?,?,?,?,?,?,?,?,?)", + "resource, provider, version, proper_tags, manually_searched, info_hash, size) " + "VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)", [action, logDate, showid, season, episode, quality, - resource, provider, version, proper_tags, manually_searched, info_hash]) + resource, provider, version, proper_tags, manually_searched, info_hash, size]) def log_snatch(searchResult): @@ -68,6 +68,7 @@ def log_snatch(searchResult): proper_tags = '|'.join(searchResult.proper_tags) manually_searched = searchResult.manually_searched info_hash = searchResult.hash.lower() if searchResult.hash else None + size = searchResult.size providerClass = searchResult.provider if providerClass is not None: @@ -80,10 +81,10 @@ def log_snatch(searchResult): resource = searchResult.name _logHistoryItem(action, showid, season, episode, quality, resource, - provider, version, proper_tags, manually_searched, info_hash) + provider, version, proper_tags, manually_searched, info_hash, size) -def logDownload(episode, filename, new_ep_quality, release_group=None, version=-1): +def log_download(episode, filename, new_ep_quality, release_group=None, version=-1): """ Log history of download @@ -96,6 +97,7 @@ def logDownload(episode, filename, new_ep_quality, release_group=None, version=- showid = int(episode.show.indexerid) season = int(episode.season) ep_number = int(episode.episode) + size = int(episode.file_size) quality = new_ep_quality @@ -107,7 +109,7 @@ def logDownload(episode, filename, new_ep_quality, release_group=None, version=- action = episode.status - _logHistoryItem(action, showid, season, ep_number, quality, filename, provider, version) + _logHistoryItem(action, showid, season, ep_number, quality, filename, provider, version, size=size) def logSubtitle(showid, season, episode, status, subtitle_result): diff --git a/medusa/image_cache.py b/medusa/image_cache.py index 350ed76183..3a76d35737 100644 --- a/medusa/image_cache.py +++ b/medusa/image_cache.py @@ -39,7 +39,8 @@ def __init__(self): def __del__(self): pass - def _cache_dir(self): + @classmethod + def _cache_dir(cls): """Build up the full path to the image cache directory.""" return os.path.abspath(os.path.join(app.CACHE_DIR, 'images')) @@ -47,7 +48,8 @@ def _thumbnails_dir(self): """Build up the full path to the thumbnails image cache directory.""" return os.path.abspath(os.path.join(self._cache_dir(), 'thumbnails')) - def poster_path(self, indexer_id): + @classmethod + def poster_path(cls, indexer_id): """ Build up the path to a poster cache for a given Indexer ID. @@ -55,9 +57,10 @@ def poster_path(self, indexer_id): :return: a full path to the cached poster file for the given Indexer ID """ poster_file_name = '{0}.poster.jpg'.format(indexer_id) - return os.path.join(self._cache_dir(), poster_file_name) + return os.path.join(cls._cache_dir(), poster_file_name) - def banner_path(self, indexer_id): + @classmethod + def banner_path(cls, indexer_id): """ Build up the path to a banner cache for a given Indexer ID. @@ -65,7 +68,7 @@ def banner_path(self, indexer_id): :return: a full path to the cached banner file for the given Indexer ID """ banner_file_name = '{0}.banner.jpg'.format(indexer_id) - return os.path.join(self._cache_dir(), banner_file_name) + return os.path.join(cls._cache_dir(), banner_file_name) def fanart_path(self, indexer_id): """ diff --git a/medusa/imdb_popular.py b/medusa/imdb_popular.py deleted file mode 100644 index 5c32f8338b..0000000000 --- a/medusa/imdb_popular.py +++ /dev/null @@ -1,115 +0,0 @@ -# coding=utf-8 -import os -import posixpath -import re -from datetime import date - -from bs4 import BeautifulSoup -from . import app, helpers - - -class ImdbPopular(object): - """This class contains everything for the IMDB popular page.""" - - def __init__(self): - """Constructor for ImdbPopular.""" - # Use akas.imdb.com, just like the imdb lib. - self.url = 'http://akas.imdb.com/search/title' - - self.params = { - 'at': 0, - 'sort': 'moviemeter', - 'title_type': 'tv_series', - 'year': '%s,%s' % (date.today().year - 1, date.today().year + 1), - } - - self.session = helpers.make_session() - - def fetch_popular_shows(self): - """Get popular show information from IMDB.""" - popular_shows = [] - - response = helpers.get_url(self.url, session=self.session, params=self.params, - headers={'Referer': 'http://akas.imdb.com/'}, returns='response') - if not response or not response.text: - return None - - soup = BeautifulSoup(response.text, 'html5lib') - results = soup.find('div', class_='lister-list') - rows = results.find_all('div', class_='lister-item mode-advanced') - - for row in rows: - show = {} - - image_div = row.find('div', class_='lister-item-image float-left') - if image_div: - image = image_div.find('img') - show['image_url_large'] = self.change_size(image['loadlate']) - show['image_path'] = posixpath.join('images', 'imdb_popular', os.path.basename(show['image_url_large'])) - self.cache_image(show['image_url_large']) - - content_div = row.find('div', class_='lister-item-content') - if content_div: - show_info = content_div.find('a') - show['name'] = show_info.get_text() - show['imdb_url'] = 'http://www.imdb.com' + show_info['href'] - show['imdb_tt'] = row.find('div', class_='ribbonize')['data-tconst'] - show['year'] = content_div.find('span', class_='lister-item-year text-muted unbold').get_text()[1:5] - - rating_div = content_div.find('div', class_='ratings-bar') - if rating_div: - rating_strong = rating_div.find('strong') - if rating_strong: - show['rating'] = rating_strong.get_text() - - votes_p = content_div.find('p', class_='sort-num_votes-visible') - if votes_p: - show['votes'] = votes_p.find('span', {'name': 'nv'}).get_text().replace(',', '') - - text_p = content_div.find('p', class_='text-muted') - if text_p: - show['outline'] = text_p.get_text(strip=True) - - popular_shows.append(show) - - return popular_shows - - @staticmethod - def change_size(image_url, factor=3): - """ - Change the size of the image we get from IMDB. - - :param: image_url: Image source URL - :param: factor: Multiplier for the image size - """ - match = re.search('(.+[X|Y])(\d+)(_CR\d+,\d+,)(\d+),(\d+)', image_url) - - if match: - matches = list(match.groups()) - matches[1] = int(matches[1]) * factor - matches[3] = int(matches[3]) * factor - matches[4] = int(matches[4]) * factor - - return '{0}{1}{2}{3},{4}_AL_.jpg'.format(matches[0], matches[1], matches[2], - matches[3], matches[4]) - else: - return image_url - - def cache_image(self, image_url): - """ - Store cache of image in cache dir. - - :param image_url: Image source URL - """ - path = os.path.abspath(os.path.join(app.CACHE_DIR, 'images', 'imdb_popular')) - - if not os.path.exists(path): - os.makedirs(path) - - full_path = os.path.join(path, os.path.basename(image_url)) - - if not os.path.isfile(full_path): - helpers.download_file(image_url, full_path, session=self.session) - - -imdb_popular = ImdbPopular() diff --git a/medusa/indexers/indexer_base.py b/medusa/indexers/indexer_base.py index 7a415aed98..3bfc66133f 100644 --- a/medusa/indexers/indexer_base.py +++ b/medusa/indexers/indexer_base.py @@ -407,8 +407,8 @@ def __getitem__(self, episode_number): def search(self, term=None, key=None): """Search all episodes in season, returns a list of matching Episode instances. - >>> t = Tvdb() - >>> t['scrubs'][1].search('first day') + >>> indexer_api = Tvdb() + >>> indexer_api['scrubs'][1].search('first day') [] >>> diff --git a/medusa/indexers/indexer_config.py b/medusa/indexers/indexer_config.py index 98d3f9ff1f..f6cb1eddc9 100644 --- a/medusa/indexers/indexer_config.py +++ b/medusa/indexers/indexer_config.py @@ -16,6 +16,8 @@ # You should have received a copy of the GNU General Public License # along with Medusa. If not, see . +import re + from .tmdb.tmdb import Tmdb from .tvdbv2.tvdbv2_api import TVDBv2 from .tvmaze.tvmaze_api import TVmaze @@ -41,8 +43,13 @@ INDEXER_TMDB = 4 EXTERNAL_IMDB = 10 EXTERNAL_ANIDB = 11 +EXTERNAL_TRAKT = 12 + +EXTERNAL_MAPPINGS = {EXTERNAL_IMDB: 'imdb_id', EXTERNAL_ANIDB: 'anidb_id', + INDEXER_TVRAGE: 'tvrage_id', EXTERNAL_TRAKT: 'trakt_id'} -EXTERNAL_MAPPINGS = {EXTERNAL_IMDB: 'imdb_id', EXTERNAL_ANIDB: 'anidb_id', INDEXER_TVRAGE: 'tvrage_id'} +# trakt indexer name vs Medusa indexer +TRAKT_INDEXERS = {'tvdb': INDEXER_TVDBV2, 'tmdb': INDEXER_TMDB, 'imdb': EXTERNAL_IMDB, 'trakt': EXTERNAL_TRAKT} indexerConfig = { INDEXER_TVDBV2: { @@ -55,7 +62,6 @@ 'use_zip': True, 'session': make_session(cache_etags=False), }, - 'trakt_id': 'tvdb_id', 'xem_origin': 'tvdb', 'icon': 'thetvdb16.png', 'scene_loc': '{base_url}/scene_exceptions/scene_exceptions_tvdb.json'.format(base_url=BASE_PYMEDUSA_URL), @@ -74,7 +80,6 @@ 'use_zip': True, 'session': make_session(cache_etags=False), }, - 'trakt_id': 'tvdb_id', 'xem_mapped_to': INDEXER_TVDBV2, 'icon': 'tvmaze16.png', 'scene_loc': '{base_url}/scene_exceptions/scene_exceptions_tvmaze.json'.format(base_url=BASE_PYMEDUSA_URL), @@ -93,7 +98,6 @@ 'use_zip': True, 'session': make_session(cache_etags=False), }, - 'trakt_id': 'tvdb_id', 'icon': 'tmdb16.png', 'scene_loc': '{base_url}/scene_exceptions/scene_exceptions_tmdb.json'.format(base_url=BASE_PYMEDUSA_URL), 'base_url': 'https://www.themoviedb.org', @@ -110,3 +114,52 @@ # For example: {'tvdb_id': 1, 'tvmaze_id': 3, 'tmdb_id': 4} reverse_mappings = {indexerConfig[indexer]['mapped_to']: indexer for indexer in indexerConfig} reverse_mappings.update({v: k for k, v in EXTERNAL_MAPPINGS.items()}) + + +def indexer_name_to_id(indexer_name): + """Reverse translate the indexer identifier to it's id. + + :param indexer_name: Identifier of the indexer. Example: will return 1 for 'tvdb'. + :return: The indexer id. + """ + return {v['identifier']: k for k, v in indexerConfig.items()}.get(indexer_name) + + +def indexer_id_to_name(indexer): + """Reverse translate the indexer identifier to it's id. + + :param indexer: Indexer id. E.g.: 1. + :return: The indexer name. E.g.: tvdb + """ + return indexerConfig[indexer]['identifier'] + + +def indexer_id_to_slug(indexer, indexer_id): + """A utility function to translate a shows indexex and indexer id to a slug. + + :param indexer: The indexer id. For example 1 for tvdb and 3 for tvmaze. + :param indexer_id: The shows id, for the specific indexer. + :return: A slug. For example tvdb1234 for indexer 1 and indexer id 1234. + """ + return '{name}{indexer_id}'.format(name=indexerConfig[indexer]['identifier'], indexer_id=indexer_id) + + +def slug_to_indexer_id(slug): + """A utility function to translate a shows slug to it's indexer and indexer id. + + :param slug: the slug used for the indexer and indexer id. + :return: A tuple with the indexer id and show id, for the specific indexer. + """ + if not slug: + return None, None + result = re.compile(r'([a-z]+)([0-9]+)').match(slug) + if result: + return indexer_name_to_id(result.group(1)), int(result.group(2)) + + +def get_trakt_indexer(indexer): + """Get trakt indexer name using given indexer number.""" + for trakt_indexer in TRAKT_INDEXERS: + if TRAKT_INDEXERS[trakt_indexer] == indexer: + return trakt_indexer + return None diff --git a/medusa/indexers/indexer_exceptions.py b/medusa/indexers/indexer_exceptions.py index 2a93430e8c..4116990bc4 100644 --- a/medusa/indexers/indexer_exceptions.py +++ b/medusa/indexers/indexer_exceptions.py @@ -70,3 +70,7 @@ class IndexerUnavailable(IndexerError): class IndexerShowAllreadyInLibrary(IndexerException): """The show is already in the library. Same show for multiple indexers, is not supported.""" + + +class IndexerAuthFailed(IndexerException): + """Indexer authentication exception.""" diff --git a/medusa/indexers/tmdb/tmdb.py b/medusa/indexers/tmdb/tmdb.py index 6c5ca23c45..a615faa0ac 100644 --- a/medusa/indexers/tmdb/tmdb.py +++ b/medusa/indexers/tmdb/tmdb.py @@ -35,8 +35,8 @@ class Tmdb(BaseIndexer): """Create easy-to-use interface to name of season/episode name. - t = tmdb() - t['Scrubs'][1][24]['episodename'] + indexer_api = tmdb() + indexer_api['Scrubs'][1][24]['episodename'] u'My Last Day' """ @@ -261,7 +261,7 @@ def _get_episodes(self, tmdb_id, specials=False, aired_season=None): # pylint: for cur_ep in episodes: if self.config['dvdorder']: logger.debug('Using DVD ordering.') - use_dvd = cur_ep['dvd_season'] is not None and cur_ep['dvd_episodenumber'] is not None + use_dvd = cur_ep.get('dvd_season') is not None and cur_ep.get('dvd_episodenumber') is not None else: use_dvd = False @@ -269,6 +269,11 @@ def _get_episodes(self, tmdb_id, specials=False, aired_season=None): # pylint: seasnum, epno = cur_ep.get('dvd_season'), cur_ep.get('dvd_episodenumber') else: seasnum, epno = cur_ep.get('seasonnumber'), cur_ep.get('episodenumber') + if self.config['dvdorder']: + logger.warning("Episode doesn't have DVD order available (season: %s, episode: %s). " + 'Falling back to non-DVD order. ' + 'Please consider disabling DVD order for the show with TMDB ID: %s', + seasnum, epno, tmdb_id) if seasnum is None or epno is None: logger.warning('An episode has incomplete season/episode number (season: %r, episode: %r)', seasnum, epno) @@ -295,8 +300,8 @@ def _parse_images(self, sid): http://theTMDB.com/api/[APIKEY]/series/[SERIES ID]/banners.xml images are retrieved using t['show name]['_banners'], for example: - >>> t = TMDB(images = True) - >>> t['scrubs']['_banners'].keys() + >>> indexer_api = TMDB(images = True) + >>> indexer_api['scrubs']['_banners'].keys() ['fanart', 'poster', 'series', 'season'] >>> t['scrubs']['_banners']['poster']['680x1000']['35308']['_bannerpath'] u'http://theTMDB.com/banners/posters/76156-2.jpg' @@ -391,8 +396,8 @@ def _parse_actors(self, sid): From http://theTMDB.com/api/[APIKEY]/series/[SERIES ID]/actors.xml Actors are retrieved using t['show name]['_actors'], for example: - >>> t = TMDB(actors = True) - >>> actors = t['scrubs']['_actors'] + >>> indexer_api = TMDB(actors = True) + >>> actors = indexer_api['scrubs']['_actors'] >>> type(actors) >>> type(actors[0]) @@ -589,7 +594,7 @@ def get_last_updated_seasons(self, show_list, from_time, weeks=1): return show_season_updates def get_id_by_external(self, **kwargs): - """Search tvmaze for a show, using an external id. + """Search tmdb for a show, using an external id. Accepts as kwargs, so you'l need to add the externals as key/values. :param tvrage_id: The tvrage id. @@ -605,9 +610,9 @@ def get_id_by_external(self, **kwargs): externals = self.tmdb.TV(result['tv_results'][0]['id']).external_ids() externals['tmdb_id'] = result['tv_results'][0]['id'] - externals = {external_id: external_value - for external_id, external_value + externals = {tmdb_external_id: external_value + for tmdb_external_id, external_value in externals.items() - if external_value and external_id in ['tvrage_id', 'imdb_id', 'tvdb_id']} + if external_value and tmdb_external_id in ['tvrage_id', 'imdb_id', 'tvdb_id']} return externals return {} diff --git a/medusa/indexers/tvdbv2/tvdbv2_api.py b/medusa/indexers/tvdbv2/tvdbv2_api.py index 35c7134aaa..46e1deea68 100644 --- a/medusa/indexers/tvdbv2/tvdbv2_api.py +++ b/medusa/indexers/tvdbv2/tvdbv2_api.py @@ -20,25 +20,24 @@ from collections import OrderedDict from requests.compat import urljoin -from requests.packages.urllib3.exceptions import MaxRetryError, RequestError +from requests.exceptions import RequestException -from tvdbapiv2 import (ApiClient, AuthenticationApi, SearchApi, SeriesApi, UpdatesApi) -from tvdbapiv2.rest import ApiException +from tvdbapiv2 import (ApiClient, SearchApi, SeriesApi, UpdatesApi) +from tvdbapiv2.exceptions import ApiException from ..indexer_base import (Actor, Actors, BaseIndexer) -from ..indexer_exceptions import (IndexerError, IndexerException, IndexerShowIncomplete, IndexerShowNotFound, - IndexerShowNotFoundInLanguage, IndexerUnavailable) +from ..indexer_exceptions import (IndexerAuthFailed, IndexerError, IndexerException, IndexerShowIncomplete, + IndexerShowNotFound, IndexerShowNotFoundInLanguage, IndexerUnavailable) from ..indexer_ui import BaseUI, ConsoleUI - logger = logging.getLogger(__name__) class TVDBv2(BaseIndexer): """Create easy-to-use interface to name of season/episode name. - >>> t = tvdbv2() - >>> t['Scrubs'][1][24]['episodename'] + >>> indexer_api = tvdbv2() + >>> indexer_api['Scrubs'][1][24]['episodename'] u'My Last Day' """ @@ -57,27 +56,17 @@ def __init__(self, *args, **kwargs): # pylint: disable=too-many-locals,too-many # Initiate the tvdb api v2 api_base_url = 'https://api.thetvdb.com' + # Set the session. + self.session = self.config['session'] + # client_id = 'username' # (optional! Only required for the /user routes) # client_secret = 'pass' # (optional! Only required for the /user routes) apikey = '0629B785CE550C8D' + tvdb_client = ApiClient(api_base_url, session=self.session, api_key=apikey) - authentication_string = {'apikey': apikey, 'username': '', 'userpass': ''} - - try: - unauthenticated_client = ApiClient(api_base_url) - auth_api = AuthenticationApi(unauthenticated_client) - access_token = auth_api.login_post(authentication_string) - auth_client = ApiClient(api_base_url, 'Authorization', 'Bearer ' + access_token.token) - except ApiException as e: - logger.warning("could not authenticate to the indexer TheTvdb.com, with reason '%s',%s)", e.reason, e.status) - raise IndexerUnavailable("Indexer unavailable with reason '%s' (%s)" % (e.reason, e.status)) - except (MaxRetryError, RequestError) as e: - logger.warning("could not authenticate to the indexer TheTvdb.com, with reason '%s'.", e.reason) - raise IndexerUnavailable("Indexer unavailable with reason '%s'" % e.reason) - - self.search_api = SearchApi(auth_client) - self.series_api = SeriesApi(auth_client) - self.updates_api = UpdatesApi(auth_client) + self.search_api = SearchApi(tvdb_client) + self.series_api = SeriesApi(tvdb_client) + self.updates_api = UpdatesApi(tvdb_client) # An api to indexer series/episode object mapping self.series_map = { @@ -153,10 +142,15 @@ def _show_search(self, show, request_language='en'): try: results = self.search_api.search_series_get(name=show, accept_language=request_language) except ApiException as e: + if e.status == 401: + raise IndexerAuthFailed( + 'Authentication failed, possible bad api key. reason: {reason} ({status})' + .format(reason=e.reason, status=e.status) + ) raise IndexerShowNotFound( - 'Show search failed in getting a result with reason: %s (%s)' % (e.reason, e.status) + 'Show search failed in getting a result with reason: %s' % e.reason ) - except (MaxRetryError, RequestError) as e: + except RequestException as e: raise IndexerException('Show search failed in getting a result with error: %r' % e) if results: @@ -194,9 +188,23 @@ def _get_show_by_id(self, tvdbv2_id, request_language='en'): # pylint: disable= :param tvdbv2_id: The shows tvdbv2 id :return: An ordered dict with the show searched for. """ + results = None if tvdbv2_id: logger.debug('Getting all show data for %s', [tvdbv2_id]) - results = self.series_api.series_id_get(tvdbv2_id, accept_language=request_language) + try: + results = self.series_api.series_id_get(tvdbv2_id, accept_language=request_language) + except ApiException as e: + if e.status == 401: + raise IndexerAuthFailed( + 'Authentication failed, possible bad api key. reason: {reason} ({status})' + .format(reason=e.reason, status=e.status) + ) + raise IndexerShowNotFound( + 'Show search failed in getting a result with reason: {reason} ({status})' + .format(reason=e.reason, status=e.status) + ) + except RequestException as e: + raise IndexerException('Show search failed in getting a result with error: %r' % e) if not results: return @@ -259,12 +267,17 @@ def _download_episodes(self, tvdb_id, specials=False, aired_season=None): page += 1 except ApiException as e: logger.debug('Error trying to index the episodes') + if e.status == 401: + raise IndexerAuthFailed( + 'Authentication failed, possible bad api key. reason: {reason} ({status})' + .format(reason=e.reason, status=e.status) + ) raise IndexerShowIncomplete( 'Show episode search exception, ' 'could not get any episodes. Did a {search_type} search. Exception: {ex}'.format (search_type='full' if not aired_season else 'season {season}'.format(season=aired_season), ex=e) ) - except (MaxRetryError, RequestError) as e: + except RequestException as e: raise IndexerUnavailable('Error connecting to Tvdb api. Caused by: {0!r}'.format(e)) if not results: @@ -290,17 +303,26 @@ def _parse_episodes(self, tvdb_id, episode_data): for cur_ep in episodes: if self.config['dvdorder']: logger.debug('Using DVD ordering.') - use_dvd = cur_ep['dvd_season'] is not None and cur_ep['dvd_episodenumber'] is not None + use_dvd = cur_ep.get('dvd_season') is not None and cur_ep.get('dvd_episodenumber') is not None else: use_dvd = False if use_dvd: seasnum, epno = cur_ep.get('dvd_season'), cur_ep.get('dvd_episodenumber') + if self.config['dvdorder']: + logger.warning("Episode doesn't have DVD order available (season: %s, episode: %s). " + 'Falling back to non-DVD order. ' + 'Please consider disabling DVD order for the show with TMDB ID: %s', + seasnum, epno, tvdb_id) else: seasnum, epno = cur_ep.get('seasonnumber'), cur_ep.get('episodenumber') if seasnum is None or epno is None: - logger.warning('An episode has incomplete season/episode number (season: %r, episode: %r)', seasnum, epno) + logger.warning('This episode has incomplete information. The season or episode number ' + '(season: %s, episode: %s) is missing. ' + 'to get rid of this warning, you will have to contact tvdb through their forums ' + 'and have them fix the specific episode.', + seasnum, epno) continue # Skip to next episode # float() is because https://github.com/dbr/tvnamer/issues/95 - should probably be fixed in TVDB data @@ -356,8 +378,8 @@ def _parse_images(self, sid): From http://thetvdb.com/api/[APIKEY]/series/[SERIES ID]/banners.xml images are retrieved using t['show name]['_banners'], for example: - >>> t = Tvdb(images = True) - >>> t['scrubs']['_banners'].keys() + >>> indexer_api = Tvdb(images = True) + >>> indexer_api['scrubs']['_banners'].keys() ['fanart', 'poster', 'series', 'season', 'seasonwide'] For a Poster >>> t['scrubs']['_banners']['poster']['680x1000']['35308']['_bannerpath'] @@ -384,9 +406,9 @@ def _parse_images(self, sid): # Let's get the different types of images available for this series try: series_images_count = self.series_api.series_id_images_get(sid, accept_language=self.config['language']) - except Exception as e: - logger.debug('Could not get image count for showid: %s, with exception: %r', sid, e) - return False + except (ApiException, RequestException) as e: + logger.info('Could not get image count for showid: %s with reason: %r', sid, e.message) + return for image_type, image_count in self._object_to_dict(series_images_count).iteritems(): try: @@ -436,10 +458,9 @@ def _parse_images(self, sid): v = self.config['artwork_prefix'] % v base_path[k] = v - - except Exception as e: + except (ApiException, RequestException) as e: logger.warning('Could not parse Poster for showid: %s, with exception: %r', sid, e) - return False + return self._save_images(sid, _images) self._set_show_data(sid, '_banners', _images) @@ -450,8 +471,8 @@ def _parse_actors(self, sid): From http://thetvdb.com/api/[APIKEY]/series/[SERIES ID]/actors.xml Actors are retrieved using t['show name]['_actors'], for example: - >>> t = Tvdb(actors = True) - >>> actors = t['scrubs']['_actors'] + >>> indexer_api = Tvdb(actors = True) + >>> actors = indexer_api['scrubs']['_actors'] >>> type(actors) >>> type(actors[0]) @@ -558,12 +579,20 @@ def get_last_updated_series(self, from_time, weeks=1, filter_show_list=None): try: while updates and count < weeks: updates = self.updates_api.updated_query_get(from_time).data - last_update_ts = max(x.last_updated for x in updates) - from_time = last_update_ts - total_updates += [int(_.id) for _ in updates] + if updates is not None: + last_update_ts = max(x.last_updated for x in updates) + from_time = last_update_ts + total_updates += [int(_.id) for _ in updates] count += 1 - except (ApiException, MaxRetryError, RequestError) as e: - raise IndexerUnavailable('Error connecting to Tvdb api. Caused by: {0!r}'.format(e)) + except ApiException as e: + if e.status == 401: + raise IndexerAuthFailed( + 'Authentication failed, possible bad api key. reason: {reason} ({status})' + .format(reason=e.reason, status=e.status) + ) + raise IndexerUnavailable('Error connecting to Tvdb api. Caused by: {0}'.format(e.message)) + except RequestException as e: + raise IndexerUnavailable('Error connecting to Tvdb api. Caused by: {0}'.format(e.message)) if total_updates and filter_show_list: new_list = [] @@ -592,6 +621,14 @@ def get_last_updated_seasons(self, show_list, from_time, weeks=1): episodes = self._download_episodes(show_id) for episode in episodes['episode']: + if episode.get('seasonnumber') is None or episode.get('episodenumber') is None: + logger.warning('This episode has incomplete information. The season or episode number ' + '(season: %s, episode: %s) is missing. ' + 'to get rid of this warning, you will have to contact tvdb through their forums ' + 'and have them fix the specific episode.', + episode.get('seasonnumber'), episode.get('episodenumber')) + continue + if int(episode['lastupdated']) > from_time: total_updates.append(int(episode['seasonnumber'])) diff --git a/medusa/indexers/tvmaze/tvmaze_api.py b/medusa/indexers/tvmaze/tvmaze_api.py index ca24dc88f0..2fcdadefb9 100644 --- a/medusa/indexers/tvmaze/tvmaze_api.py +++ b/medusa/indexers/tvmaze/tvmaze_api.py @@ -34,8 +34,8 @@ class TVmaze(BaseIndexer): """Create easy-to-use interface to name of season/episode name - >>> t = tvmaze() - >>> t['Scrubs'][1][24]['episodename'] + >>> indexer_api = tvmaze() + >>> indexer_api['Scrubs'][1][24]['episodename'] u'My Last Day' """ @@ -214,7 +214,7 @@ def _get_show_by_id(self, tvmaze_id, request_language='en'): # pylint: disable= """ results = None if tvmaze_id: - logger.debug('Getting all show data for %s', [tvmaze_id]) + logger.debug('Getting all show data for %s', tvmaze_id) results = self.tvmaze_api.get_show(maze_id=tvmaze_id) if not results: @@ -232,7 +232,7 @@ def _get_episodes(self, tvmaze_id, specials=False, aired_season=None): # pylint :return: An ordered dict with the show searched for. In the format of OrderedDict{"episode": [list of episodes]} """ # Parse episode data - logger.debug('Getting all episodes of %s', [tvmaze_id]) + logger.debug('Getting all episodes of %s', tvmaze_id) try: results = self.tvmaze_api.episode_list(tvmaze_id, specials=specials) except IDNotFound: @@ -252,7 +252,7 @@ def _get_episodes(self, tvmaze_id, specials=False, aired_season=None): # pylint for cur_ep in episodes: if self.config['dvdorder']: logger.debug('Using DVD ordering.') - use_dvd = cur_ep['dvd_season'] is not None and cur_ep['dvd_episodenumber'] is not None + use_dvd = cur_ep.get('dvd_season') is not None and cur_ep.get('dvd_episodenumber') is not None else: use_dvd = False @@ -260,6 +260,11 @@ def _get_episodes(self, tvmaze_id, specials=False, aired_season=None): # pylint seasnum, epno = cur_ep.get('dvd_season'), cur_ep.get('dvd_episodenumber') else: seasnum, epno = cur_ep.get('seasonnumber'), cur_ep.get('episodenumber') + if self.config['dvdorder']: + logger.warning("Episode doesn't have DVD order available (season: %s, episode: %s). " + 'Falling back to non-DVD order. ' + 'Please consider disabling DVD order for the show with TVmaze ID: %s', + seasnum, epno, tvmaze_id) if seasnum is None or epno is None: logger.warning('An episode has incomplete season/episode number (season: %r, episode: %r)', seasnum, epno) @@ -281,11 +286,11 @@ def _parse_images(self, tvmaze_id): images are retrieved using t['show name]['_banners'], for example: - >>> t = TVMaze(images = True) - >>> t['scrubs']['_banners'].keys() + >>> indexer_api = TVMaze(images = True) + >>> indexer_api['scrubs']['_banners'].keys() ['fanart', 'poster', 'series', 'season'] >>> t['scrubs']['_banners']['poster']['680x1000']['35308']['_bannerpath'] - u'http://theTMDB.com/banners/posters/76156-2.jpg' + u'http://thetvmaze.com/banners/posters/76156-2.jpg' >>> Any key starting with an underscore has been processed (not the raw @@ -293,12 +298,12 @@ def _parse_images(self, tvmaze_id): This interface will be improved in future versions. """ - logger.debug('Getting show banners for %s', [tvmaze_id]) + logger.debug('Getting show banners for %s', tvmaze_id) try: image_medium = self.shows[tvmaze_id]['image_medium'] except Exception: - logger.debug('Could not parse Poster for showid: %s', [tvmaze_id]) + logger.debug('Could not parse Poster for showid: %s', tvmaze_id) return False # Set the poster (using the original uploaded poster for now, as the medium formated is 210x195 @@ -322,7 +327,7 @@ def _parse_season_images(self, tvmaze_id): """Parse Show and Season posters.""" seasons = {} if tvmaze_id: - logger.debug('Getting all show data for %s', [tvmaze_id]) + logger.debug('Getting all show data for %s', tvmaze_id) try: seasons = self.tvmaze_api.show_seasons(maze_id=tvmaze_id) except BaseError as e: @@ -342,16 +347,16 @@ def _parse_season_images(self, tvmaze_id): def _parse_actors(self, tvmaze_id): """Parsers actors XML, from - http://theTMDB.com/api/[APIKEY]/series/[SERIES ID]/actors.xml + http://thetvmaze.com/api/[APIKEY]/series/[SERIES ID]/actors.xml Actors are retrieved using t['show name]['_actors'], for example: - >>> t = TVMaze(actors = True) - >>> actors = t['scrubs']['_actors'] + >>> indexer_api = TVMaze(actors = True) + >>> actors = indexer_api['scrubs']['_actors'] >>> type(actors) - + >>> type(actors[0]) - + >>> actors[0] >>> sorted(actors[0].keys()) @@ -359,12 +364,12 @@ def _parse_actors(self, tvmaze_id): >>> actors[0]['name'] u'Zach Braff' >>> actors[0]['image'] - u'http://theTMDB.com/banners/actors/43640.jpg' + u'http://thetvmaze.com/banners/actors/43640.jpg' Any key starting with an underscore has been processed (not the raw data from the indexer) """ - logger.debug('Getting actors for %s', [tvmaze_id]) + logger.debug('Getting actors for %s', tvmaze_id) try: actors = self.tvmaze_api.show_cast(tvmaze_id) except CastNotFound: @@ -386,7 +391,7 @@ def _parse_actors(self, tvmaze_id): self._set_show_data(tvmaze_id, '_actors', cur_actors) def _get_show_data(self, tvmaze_id, language='en'): # pylint: disable=too-many-branches,too-many-statements,too-many-locals - """Takes a series ID, gets the epInfo URL and parses the TheTMDB json response + """Takes a series ID, gets the epInfo URL and parses the tvmaze json response into the shows dict in layout: shows[series_id][season_number][episode_number] """ @@ -489,18 +494,17 @@ def get_id_by_external(self, **kwargs): :returns: A dict with externals, including the tvmaze id. """ mapping = {'thetvdb': 'tvdb_id', 'tvrage': 'tvrage_id', 'imdb': 'imdb_id'} - externals = {} for external_id in ['tvdb_id', 'imdb_id', 'tvrage_id']: if kwargs.get(external_id): try: result = self.tvmaze_api.get_show(**{external_id: kwargs.get(external_id)}) if result: - externals = result.externals - externals[external_id] = result.id - return {mapping[external_id]: external_value - for external_id, external_value - in externals.items() - if external_value and mapping.get(external_id)} + externals = {mapping[tvmaze_external_id]: external_value + for tvmaze_external_id, external_value + in result.externals.items() + if external_value and mapping.get(tvmaze_external_id)} + externals['tvmaze_id'] = result.maze_id + return externals except ShowNotFound: logger.debug('Could not get tvmaze externals using external key %s and id %s', external_id, kwargs.get(external_id)) @@ -508,4 +512,4 @@ def get_id_by_external(self, **kwargs): except BaseError as e: logger.warning('Could not get tvmaze externals. Cause: %s', e) continue - return externals + return {} diff --git a/medusa/init/logconfig.py b/medusa/init/logconfig.py index 81e0983dc5..489b07dda9 100644 --- a/medusa/init/logconfig.py +++ b/medusa/init/logconfig.py @@ -51,7 +51,9 @@ def process(self, msg, kwargs): :param kwargs: :return: """ - return BraceMessage(msg, (), kwargs), {k: kwargs[k] for k in self.reserved_keywords if k in kwargs} + reserved = {k: kwargs[k] for k in self.reserved_keywords if k in kwargs} + kwargs = {k: kwargs[k] for k in kwargs if k not in self.reserved_keywords} + return BraceMessage(msg, (), kwargs), reserved class BraceMessage(object): diff --git a/medusa/issue_submitter.py b/medusa/issue_submitter.py index e52c0dcfc2..26656661a6 100644 --- a/medusa/issue_submitter.py +++ b/medusa/issue_submitter.py @@ -13,7 +13,7 @@ from github.GithubException import GithubException, RateLimitExceededException from . import app, db from .classes import ErrorViewer -from .github_client import authenticate, get_github_repo +from .github_client import authenticate, get_github_repo, token_authenticate logger = logging.getLogger(__name__) @@ -148,10 +148,14 @@ def submit_github_issue(self, version_checker, max_issues=500): :return: user message and issue number :rtype: list of tuple(str, str) """ - if not app.DEBUG or not app.GIT_USERNAME or not app.GIT_PASSWORD: - logger.warning(IssueSubmitter.INVALID_CONFIG) - return [(IssueSubmitter.INVALID_CONFIG, None)] - + if app.GIT_AUTH_TYPE == 0: + if not app.DEBUG or not app.GIT_USERNAME or not app.GIT_PASSWORD: + logger.warning(IssueSubmitter.INVALID_CONFIG) + return [(IssueSubmitter.INVALID_CONFIG, None)] + else: + if not app.DEBUG or not app.GIT_TOKEN: + logger.warning(IssueSubmitter.INVALID_CONFIG) + return [(IssueSubmitter.INVALID_CONFIG, None)] if not ErrorViewer.errors: logger.info(IssueSubmitter.NO_ISSUES) return [(IssueSubmitter.NO_ISSUES, None)] @@ -166,7 +170,10 @@ def submit_github_issue(self, version_checker, max_issues=500): self.running = True try: - github = authenticate(app.GIT_USERNAME, app.GIT_PASSWORD) + if app.GIT_AUTH_TYPE == 0: + github = authenticate(app.GIT_USERNAME, app.GIT_PASSWORD) + else: + github = token_authenticate(app.GIT_TOKEN) if not github: return [(IssueSubmitter.BAD_CREDENTIALS, None)] diff --git a/medusa/logger.py b/medusa/logger/__init__.py similarity index 97% rename from medusa/logger.py rename to medusa/logger/__init__.py index 15401364f0..f9fc309314 100644 --- a/medusa/logger.py +++ b/medusa/logger/__init__.py @@ -29,24 +29,27 @@ import sys from collections import OrderedDict -from logging import NullHandler +from logging import ( + CRITICAL, + DEBUG, + ERROR, + INFO, + NullHandler, + WARNING, +) from logging.handlers import RotatingFileHandler import knowit + +from medusa import app +from medusa.init.logconfig import standard_logger + from requests.compat import quote from six import itervalues, text_type import subliminal from tornado.log import access_log, app_log, gen_log import traktor -from . import app -from .init.logconfig import standard_logger - # log levels -CRITICAL = logging.CRITICAL -ERROR = logging.ERROR -WARNING = logging.WARNING -INFO = logging.INFO -DEBUG = logging.DEBUG DB = 5 LOGGING_LEVELS = { @@ -103,7 +106,7 @@ def get_loggers(package): return [standard_logger(modname) for modname in list_modules(package)] -def read_loglines(log_file=None, modification_time=None, max_lines=None, max_traceback_depth=100, +def read_loglines(log_file=None, modification_time=None, start_index=0, max_lines=None, max_traceback_depth=100, predicate=lambda logline: True, formatter=lambda logline: logline): """A generator that returns the lines of all consolidated log files in descending order. @@ -111,6 +114,7 @@ def read_loglines(log_file=None, modification_time=None, max_lines=None, max_tra :type log_file: str or unicode :param modification_time: :type modification_time: datetime.datetime + :param start_index: :param max_lines: :type max_lines: int :param max_traceback_depth: @@ -129,6 +133,7 @@ def read_loglines(log_file=None, modification_time=None, max_lines=None, max_tra for f in log_files: if not f or not os.path.isfile(f): continue + if modification_time: log_mtime = os.path.getmtime(f) if log_mtime and datetime.datetime.fromtimestamp(log_mtime) < modification_time: @@ -147,7 +152,8 @@ def read_loglines(log_file=None, modification_time=None, max_lines=None, max_tra del traceback_lines[:] if predicate(logline): counter += 1 - yield formatter(logline) + if counter >= start_index: + yield formatter(logline) if max_lines is not None and counter >= max_lines: return @@ -157,7 +163,8 @@ def read_loglines(log_file=None, modification_time=None, max_lines=None, max_tra del traceback_lines[:] if predicate(logline): counter += 1 - yield formatter(logline) + if counter >= start_index: + yield formatter(logline) if max_lines is not None and counter >= max_lines: return else: @@ -167,7 +174,9 @@ def read_loglines(log_file=None, modification_time=None, max_lines=None, max_tra message = traceback_lines[-1] logline = LogLine(message, message=message, traceback_lines=list(reversed(traceback_lines[:-1]))) if predicate(logline): - yield formatter(logline) + counter += 1 + if counter >= start_index: + yield formatter(logline) def reverse_readlines(filename, buf_size=2097152, encoding=default_encoding): @@ -514,7 +523,7 @@ def format(self, record): :return: :rtype: str """ - from . import classes, common + from medusa import classes, common privacy_level = common.privacy_levels[app.PRIVACY_LEVEL] if not privacy_level: msg = super(CensoredFormatter, self).format(record) @@ -569,7 +578,7 @@ def init_logging(self, console_logging): :type console_logging: bool """ import medusa - from .helper.common import dateTimeFormat + from medusa.helper.common import dateTimeFormat self.loggers.extend(get_loggers(medusa)) self.loggers.extend(get_loggers(subliminal)) self.loggers.extend([access_log, app_log, gen_log]) @@ -603,7 +612,7 @@ def init_logging(self, console_logging): def reconfigure_file_handler(self): """Reconfigure rotating file handler.""" - from .helper.common import dateTimeFormat + from medusa.helper.common import dateTimeFormat target_file = os.path.join(app.LOG_DIR, app.LOG_FILENAME) target_size = int(app.LOG_SIZE * 1024 * 1024) target_number = int(app.LOG_NR) diff --git a/medusa/logger/adapters/__init__.py b/medusa/logger/adapters/__init__.py new file mode 100644 index 0000000000..9457835b67 --- /dev/null +++ b/medusa/logger/adapters/__init__.py @@ -0,0 +1,3 @@ +# coding=utf-8 + +"""Custom LoggingAdapters for Python logging.""" diff --git a/medusa/logger/adapters/style.py b/medusa/logger/adapters/style.py new file mode 100644 index 0000000000..5fec3e13c0 --- /dev/null +++ b/medusa/logger/adapters/style.py @@ -0,0 +1,71 @@ +# coding=utf-8 + +"""Style Adapters for Python logging.""" + +import collections +import functools +import logging + +from six import text_type + + +class BraceMessage(object): + """Lazily convert a Brace-formatted message.""" + + def __init__(self, msg, *args, **kwargs): + """Initialize a lazy-formatted message.""" + self.msg = msg + self.args = args + self.kwargs = kwargs + + def __str__(self): + """Convert to string.""" + args = self.args + kwargs = self.kwargs + if args and len(args) == 1: + if args[0] and isinstance(args[0], collections.Mapping): + args = [] + kwargs = self.args[0] + + try: + return self.msg.format(*args, **kwargs) + except IndexError: + return self.msg.format(kwargs) + + def __repr__(self): + """Convert to class representation.""" + sep = ', ' + kw_repr = '{key}={value!r}' + name = self.__class__.__name__ + args = sep.join(map(text_type, self.args)) + kwargs = sep.join(kw_repr.format(key=k, value=v) + for k, v in self.kwargs.items()) + return '{cls}({args})'.format( + cls=name, + args=sep.join([repr(self.msg), args, kwargs]) + ) + + +class BraceAdapter(logging.LoggerAdapter): + """Adapt logger to use Brace-formatted messages.""" + + def __init__(self, logger, extra=None): + """Initialize the Brace adapter with a logger.""" + super(BraceAdapter, self).__init__(logger, extra) + self.debug = functools.partial(self.log, logging.DEBUG) + self.info = functools.partial(self.log, logging.INFO) + self.warning = functools.partial(self.log, logging.WARNING) + self.error = functools.partial(self.log, logging.ERROR) + self.critical = functools.partial(self.log, logging.CRITICAL) + + def log(self, level, msg, *args, **kwargs): + """Log a message at the specified level using Brace-formatting.""" + if self.isEnabledFor(level): + msg, kwargs = self.process(msg, kwargs) + brace_msg = BraceMessage(msg, *args, **kwargs) + self.logger.log(level, brace_msg, **kwargs) + + def exception(self, msg, *args, **kwargs): + """Add exception information before delegating to self.log.""" + kwargs["exc_info"] = 1 + self.log(logging.ERROR, msg, *args, **kwargs) diff --git a/medusa/metadata/__init__.py b/medusa/metadata/__init__.py index a169ecf898..670039a7a2 100644 --- a/medusa/metadata/__init__.py +++ b/medusa/metadata/__init__.py @@ -19,9 +19,29 @@ import sys -from ..metadata import generic, helpers, kodi, kodi_12plus, mede8er, media_browser, ps3, tivo, wdtv +from medusa.metadata import ( + generic, + helpers, + kodi, + kodi_12plus, + mede8er, + media_browser, + ps3, + tivo, + wdtv, +) -__all__ = ['generic', 'helpers', 'kodi', 'kodi_12plus', 'media_browser', 'ps3', 'wdtv', 'tivo', 'mede8er'] +__all__ = [ + 'generic', + 'helpers', + 'kodi', + 'kodi_12plus', + 'mede8er', + 'media_browser', + 'ps3', + 'tivo', + 'wdtv', +] def available_generators(): diff --git a/medusa/metadata/generic.py b/medusa/metadata/generic.py index 134443a35a..68071914f7 100644 --- a/medusa/metadata/generic.py +++ b/medusa/metadata/generic.py @@ -1,46 +1,33 @@ # coding=utf-8 -# Author: Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . - import io +import logging import os import re +from medusa import app, exception_handler, helpers +from medusa.helper.common import replace_extension +from medusa.helper.exceptions import ex +from medusa.indexers.indexer_api import indexerApi +from medusa.indexers.indexer_config import INDEXER_TMDB, INDEXER_TVDBV2, INDEXER_TVMAZE +from medusa.indexers.indexer_exceptions import IndexerException, IndexerShowNotFound +from medusa.logger.adapters.style import BraceAdapter +from medusa.metadata import helpers as metadata_helpers + from requests.exceptions import RequestException from six import iterkeys import tmdbsimple as tmdb -from .. import app, exception_handler, helpers, logger -from ..helper.common import replace_extension -from ..helper.exceptions import ex -from ..indexers.indexer_api import indexerApi -from ..indexers.indexer_config import INDEXER_TMDB, INDEXER_TVDBV2, INDEXER_TVMAZE -from ..indexers.indexer_exceptions import IndexerException, IndexerShowNotFound -from ..metadata import helpers as metadata_helpers -from ..show_name_helpers import allPossibleShowNames try: import xml.etree.cElementTree as etree except ImportError: import xml.etree.ElementTree as etree - # todo: Implement Fanart.tv v3 API +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) + class GenericMetadata(object): """ @@ -63,18 +50,14 @@ def __init__(self, show_metadata=False, episode_metadata=False, fanart=False, season_posters=False, season_banners=False, season_all_poster=False, season_all_banner=False): - self.name = u"Generic" - - self._ep_nfo_extension = u"nfo" - self._show_metadata_filename = u"tvshow.nfo" - - self.fanart_name = u"fanart.jpg" - self.poster_name = u"poster.jpg" - self.banner_name = u"banner.jpg" - - self.season_all_poster_name = u"season-all-poster.jpg" - self.season_all_banner_name = u"season-all-banner.jpg" - + self.name = u'Generic' + self._ep_nfo_extension = u'nfo' + self._show_metadata_filename = u'tvshow.nfo' + self.fanart_name = u'fanart.jpg' + self.poster_name = u'poster.jpg' + self.banner_name = u'banner.jpg' + self.season_all_poster_name = u'season-all-poster.jpg' + self.season_all_banner_name = u'season-all-banner.jpg' self.show_metadata = show_metadata self.episode_metadata = episode_metadata self.fanart = fanart @@ -100,12 +83,12 @@ def get_id(self): @staticmethod def makeID(name): - name_id = re.sub(r"[+]", "plus", name) - name_id = re.sub(r"[^\w\d_]", "_", name_id).lower() + name_id = re.sub(r'[+]', 'plus', name) + name_id = re.sub(r'[^\w\d_]', '_', name_id).lower() return name_id def set_config(self, string): - config_list = [bool(int(x)) for x in string.split('|')] + config_list = [bool(int(x)) for x in string.split(u'|')] self.show_metadata = config_list[0] self.episode_metadata = config_list[1] self.fanart = config_list[2] @@ -121,7 +104,8 @@ def set_config(self, string): def _check_exists(location): if location: result = os.path.isfile(location) - logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG) + log.debug(u'Checking if {location} exists: {result}', + {u'location': location, u'result': result}) return result return False @@ -178,12 +162,12 @@ def get_episode_thumb_path(ep_obj): """ if os.path.isfile(ep_obj.location): - tbn_filename = ep_obj.location.rpartition(".") + tbn_filename = ep_obj.location.rpartition('.') - if tbn_filename[0] == "": - tbn_filename = ep_obj.location + "-thumb.jpg" + if tbn_filename[0] == '': + tbn_filename = ep_obj.location + '-thumb.jpg' else: - tbn_filename = tbn_filename[0] + "-thumb.jpg" + tbn_filename = tbn_filename[0] + '-thumb.jpg' else: return None @@ -201,11 +185,11 @@ def get_season_poster_path(show_obj, season): # Our specials thumbnail is, well, special if season == 0: - season_poster_filename = 'season-specials' + season_poster_filename = u'season-specials' else: - season_poster_filename = 'season' + str(season).zfill(2) + season_poster_filename = u'season' + str(season).zfill(2) - return os.path.join(show_obj.location, season_poster_filename + '-poster.jpg') + return os.path.join(show_obj.location, season_poster_filename + u'-poster.jpg') @staticmethod def get_season_banner_path(show_obj, season): @@ -219,11 +203,11 @@ def get_season_banner_path(show_obj, season): # Our specials thumbnail is, well, special if season == 0: - season_banner_filename = 'season-specials' + season_banner_filename = u'season-specials' else: - season_banner_filename = 'season' + str(season).zfill(2) + season_banner_filename = u'season' + str(season).zfill(2) - return os.path.join(show_obj.location, season_banner_filename + '-banner.jpg') + return os.path.join(show_obj.location, season_banner_filename + u'-banner.jpg') def get_season_all_poster_path(self, show_obj): return os.path.join(show_obj.location, self.season_all_poster_name) @@ -249,75 +233,94 @@ def _ep_data(self, ep_obj): def create_show_metadata(self, show_obj): if self.show_metadata and show_obj and not self._has_show_metadata(show_obj): - logger.log(u"Metadata provider {metadata_provider} creating show metadata for {show_name}" - .format(metadata_provider=self.name, show_name=show_obj.name), logger.DEBUG) + log.debug( + u'Metadata provider {name} creating series metadata for {series}', + {u'name': self.name, u'series': show_obj.name} + ) return self.write_show_file(show_obj) return False def create_episode_metadata(self, ep_obj): if self.episode_metadata and ep_obj and not self.has_episode_metadata(ep_obj): - logger.log(u"Metadata provider " + self.name + " creating episode metadata for " + ep_obj.pretty_name(), - logger.DEBUG) + log.debug( + u'Metadata provider {name} creating episode metadata for {episode}', + {u'name': self.name, u'episode': ep_obj.pretty_name()} + ) return self.write_ep_file(ep_obj) return False def update_show_indexer_metadata(self, show_obj): if self.show_metadata and show_obj and self._has_show_metadata(show_obj): - logger.log( - u"Metadata provider " + self.name + " updating show indexer info metadata file for " + show_obj.name, - logger.DEBUG) + log.debug( + u'Metadata provider {name} updating series indexer info metadata file for {series}', + {u'name': self.name, u'series': show_obj.name} + ) nfo_file_path = self.get_show_file_path(show_obj) try: - with io.open(nfo_file_path, 'rb') as xmlFileObj: + with io.open(nfo_file_path, u'rb') as xmlFileObj: showXML = etree.ElementTree(file=xmlFileObj) - indexerid = showXML.find('id') + indexerid = showXML.find(u'id') root = showXML.getroot() if indexerid is not None: indexerid.text = str(show_obj.indexerid) else: - etree.SubElement(root, "id").text = str(show_obj.indexerid) + etree.SubElement(root, u'id').text = str(show_obj.indexerid) # Make it purdy helpers.indent_xml(root) - showXML.write(nfo_file_path, encoding='UTF-8') + showXML.write(nfo_file_path, encoding=u'UTF-8') helpers.chmod_as_parent(nfo_file_path) return True except etree.ParseError as error: - logger.log('Received an invalid XML for {show}, try again later. Error: {error_msg}'.format - (show=show_obj.name, error_msg=error), logger.WARNING) + log.warning( + u'Received an invalid XML for {series}, try again later. Error: {error}', + {u'series': show_obj.name, u'error': error} + ) except IOError as e: - logger.log( - u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e), - logger.ERROR) + log.error( + u'Unable to write file to {location} - are you sure the folder is writeable? {error}', + {u'location': nfo_file_path, u'error': ex(e)} + ) def create_fanart(self, show_obj): if self.fanart and show_obj and not self._has_fanart(show_obj): - logger.log(u"Metadata provider " + self.name + " creating fanart for " + show_obj.name, logger.DEBUG) + log.debug( + u'Metadata provider {name} creating fanart for {series}', + {u'name': self.name, u'series': show_obj.name} + ) return self.save_fanart(show_obj) return False def create_poster(self, show_obj): if self.poster and show_obj and not self._has_poster(show_obj): - logger.log(u"Metadata provider " + self.name + " creating poster for " + show_obj.name, logger.DEBUG) + log.debug( + u'Metadata provider {name} creating poster for {series}', + {u'name': self.name, u'series': show_obj.name} + ) return self.save_poster(show_obj) return False def create_banner(self, show_obj): if self.banner and show_obj and not self._has_banner(show_obj): - logger.log(u"Metadata provider " + self.name + " creating banner for " + show_obj.name, logger.DEBUG) + log.debug( + u'Metadata provider {name} creating banner for {series}', + {u'name': self.name, u'series': show_obj.name} + ) return self.save_banner(show_obj) return False def create_episode_thumb(self, ep_obj): if self.episode_thumbnails and ep_obj and not self.has_episode_thumb(ep_obj): - logger.log(u"Metadata provider " + self.name + " creating episode thumbnail for " + ep_obj.pretty_name(), - logger.DEBUG) + log.debug( + u'Metadata provider {name} creating episode thumbnail for {episode}', + {u'name': self.name, u'episode': ep_obj.pretty_name()} + ) return self.save_thumbnail(ep_obj) return False @@ -326,8 +329,10 @@ def create_season_posters(self, show_obj): result = [] for season in iterkeys(show_obj.episodes): if not self._has_season_poster(show_obj, season): - logger.log(u"Metadata provider " + self.name + " creating season posters for " + show_obj.name, - logger.DEBUG) + log.debug( + u'Metadata provider {name} creating season posters for {series}', + {u'name': self.name, u'series': show_obj.name} + ) result.append(self.save_season_posters(show_obj, season)) return all(result) return False @@ -335,8 +340,10 @@ def create_season_posters(self, show_obj): def create_season_banners(self, show_obj): if self.season_banners and show_obj: result = [] - logger.log(u"Metadata provider " + self.name + " creating season banners for " + show_obj.name, - logger.DEBUG) + log.debug( + u'Metadata provider {name} creating season banners for {series}', + {u'name': self.name, u'series': show_obj.name} + ) for season in iterkeys(show_obj.episodes): # @UnusedVariable if not self._has_season_banner(show_obj, season): result += [self.save_season_banners(show_obj, season)] @@ -345,15 +352,19 @@ def create_season_banners(self, show_obj): def create_season_all_poster(self, show_obj): if self.season_all_poster and show_obj and not self._has_season_all_poster(show_obj): - logger.log(u"Metadata provider " + self.name + " creating season all poster for " + show_obj.name, - logger.DEBUG) + log.debug( + u'Metadata provider {name} creating season all poster for {series}', + {u'name': self.name, u'series': show_obj.name} + ) return self.save_season_all_poster(show_obj) return False def create_season_all_banner(self, show_obj): if self.season_all_banner and show_obj and not self._has_season_all_banner(show_obj): - logger.log(u"Metadata provider " + self.name + " creating season all banner for " + show_obj.name, - logger.DEBUG) + log.debug( + u'Metadata provider {name} creating season all banner for {series}', + {u'name': self.name, u'series': show_obj.name} + ) return self.save_season_all_banner(show_obj) return False @@ -407,14 +418,16 @@ def write_show_file(self, show_obj): try: if not os.path.isdir(nfo_file_dir): - logger.log(u"Metadata dir didn't exist, creating it at " + nfo_file_dir, logger.DEBUG) + log.debug(u'Metadata directory missing, creating it at {location}', + {u'location': nfo_file_dir}) os.makedirs(nfo_file_dir) helpers.chmod_as_parent(nfo_file_dir) - logger.log(u"Writing show nfo file to " + nfo_file_path, logger.DEBUG) + log.debug(u'Writing show nfo file to {location}', + {u'location': nfo_file_path}) - nfo_file = io.open(nfo_file_path, 'wb') - data.write(nfo_file, encoding='UTF-8') + nfo_file = io.open(nfo_file_path, u'wb') + data.write(nfo_file, encoding=u'UTF-8') nfo_file.close() helpers.chmod_as_parent(nfo_file_path) except IOError as e: @@ -450,13 +463,15 @@ def write_ep_file(self, ep_obj): try: if not os.path.isdir(nfo_file_dir): - logger.log(u"Metadata dir didn't exist, creating it at " + nfo_file_dir, logger.DEBUG) + log.debug(u'Metadata directory missing, creating it at {location}', + {u'location': nfo_file_dir}) os.makedirs(nfo_file_dir) helpers.chmod_as_parent(nfo_file_dir) - logger.log(u"Writing episode nfo file to " + nfo_file_path, logger.DEBUG) - nfo_file = io.open(nfo_file_path, 'wb') - data.write(nfo_file, encoding='UTF-8') + log.debug(u'Writing episode nfo file to {location}', + {u'location': nfo_file_path}) + nfo_file = io.open(nfo_file_path, u'wb') + data.write(nfo_file, encoding=u'UTF-8') nfo_file.close() helpers.chmod_as_parent(nfo_file_path) except IOError as e: @@ -477,14 +492,14 @@ def save_thumbnail(self, ep_obj): file_path = self.get_episode_thumb_path(ep_obj) if not file_path: - logger.log(u"Unable to find a file path to use for this thumbnail, not generating it", logger.DEBUG) + log.debug(u'Unable to find a file path to use for this thumbnail, not generating it') return False thumb_url = self._get_episode_thumb_url(ep_obj) # if we can't find one then give up if not thumb_url: - logger.log(u"No thumb is available for this episode, not creating a thumb", logger.DEBUG) + log.debug(u'No thumb is available for this episode, not creating a thumb') return False thumb_data = metadata_helpers.getShowImage(thumb_url) @@ -510,10 +525,10 @@ def save_fanart(self, show_obj, which=None): # use the default fanart name fanart_path = self.get_fanart_path(show_obj) - fanart_data = self._retrieve_show_image('fanart', show_obj, which) + fanart_data = self._retrieve_show_image(u'fanart', show_obj, which) if not fanart_data: - logger.log(u"No fanart image was retrieved, unable to write fanart", logger.DEBUG) + log.debug(u'No fanart image was retrieved, unable to write fanart') return False return self._write_image(fanart_data, fanart_path) @@ -529,10 +544,10 @@ def save_poster(self, show_obj, which=None): # use the default poster name poster_path = self.get_poster_path(show_obj) - poster_data = self._retrieve_show_image('poster', show_obj, which) + poster_data = self._retrieve_show_image(u'poster', show_obj, which) if not poster_data: - logger.log(u"No show poster image was retrieved, unable to write poster", logger.DEBUG) + log.debug(u'No show poster image was retrieved, unable to write poster') return False return self._write_image(poster_data, poster_path) @@ -548,10 +563,10 @@ def save_banner(self, show_obj, which=None): # use the default banner name banner_path = self.get_banner_path(show_obj) - banner_data = self._retrieve_show_image('banner', show_obj, which) + banner_data = self._retrieve_show_image(u'banner', show_obj, which) if not banner_data: - logger.log(u"No show banner image was retrieved, unable to write banner", logger.DEBUG) + log.debug(u'No show banner image was retrieved, unable to write banner') return False return self._write_image(banner_data, banner_path) @@ -586,14 +601,16 @@ def save_season_posters(self, show_obj, season): season_poster_file_path = self.get_season_poster_path(show_obj, cur_season) if not season_poster_file_path: - logger.log(u"Path for season " + str(cur_season) + " came back blank, skipping this season", - logger.DEBUG) + log.debug( + u'Path for season {number} came back blank, skipping this season', + {u'number': cur_season} + ) continue seasonData = metadata_helpers.getShowImage(season_url) if not seasonData: - logger.log(u"No season poster data available, skipping this season", logger.DEBUG) + log.debug(u'No season poster data available, skipping this season') continue result += [self._write_image(seasonData, season_poster_file_path)] @@ -633,14 +650,16 @@ def save_season_banners(self, show_obj, season): season_banner_file_path = self.get_season_banner_path(show_obj, cur_season) if not season_banner_file_path: - logger.log(u"Path for season " + str(cur_season) + " came back blank, skipping this season", - logger.DEBUG) + log.debug( + u'Path for season {number} came back blank, skipping this season', + {u'number': cur_season} + ) continue seasonData = metadata_helpers.getShowImage(season_url) if not seasonData: - logger.log(u"No season banner data available, skipping this season", logger.DEBUG) + log.debug(u'No season banner data available, skipping this season') continue result += [self._write_image(seasonData, season_banner_file_path)] @@ -654,10 +673,10 @@ def save_season_all_poster(self, show_obj, which=None): # use the default season all poster name poster_path = self.get_season_all_poster_path(show_obj) - poster_data = self._retrieve_show_image('poster', show_obj, which) + poster_data = self._retrieve_show_image(u'poster', show_obj, which) if not poster_data: - logger.log(u"No show poster image was retrieved, unable to write season all poster", logger.DEBUG) + log.debug(u'No show poster image was retrieved, unable to write season all poster') return False return self._write_image(poster_data, poster_path) @@ -666,10 +685,10 @@ def save_season_all_banner(self, show_obj, which=None): # use the default season all banner name banner_path = self.get_season_all_banner_path(show_obj) - banner_data = self._retrieve_show_image('banner', show_obj, which) + banner_data = self._retrieve_show_image(u'banner', show_obj, which) if not banner_data: - logger.log(u"No show banner image was retrieved, unable to write season all banner", logger.DEBUG) + log.debug(u'No show banner image was retrieved, unable to write season all banner') return False return self._write_image(banner_data, banner_path) @@ -685,22 +704,24 @@ def _write_image(self, image_data, image_path, obj=None): # don't bother overwriting it if os.path.isfile(image_path): - logger.log(u"Image already exists, not downloading", logger.DEBUG) + log.debug(u'Image already exists, not downloading') return False image_dir = os.path.dirname(image_path) if not image_data: - logger.log(u"Unable to retrieve image to save in %s, skipping" % image_path, logger.DEBUG) + log.debug(u'Unable to retrieve image to save in {location}, skipping', + {u'location': image_path}) return False try: if not os.path.isdir(image_dir): - logger.log(u"Metadata dir didn't exist, creating it at " + image_dir, logger.DEBUG) + log.debug(u'Metadata directory missing, creating it at {location}', + {u'location': image_path}) os.makedirs(image_dir) helpers.chmod_as_parent(image_dir) - outFile = io.open(image_path, 'wb') + outFile = io.open(image_path, u'wb') outFile.write(image_data) outFile.close() helpers.chmod_as_parent(image_path) @@ -724,20 +745,22 @@ def _retrieve_show_image(self, image_type, show_obj, which=None): indexer_show_obj = self._get_show_data(show_obj) - if image_type not in ('fanart', 'poster', 'banner', 'poster_thumb', 'banner_thumb'): - logger.log(u"Invalid image type " + str(image_type) + ", couldn't find it in the " + indexerApi( - show_obj.indexer).name + " object", logger.ERROR) + if image_type not in (u'fanart', u'poster', u'banner', u'poster_thumb', u'banner_thumb'): + log.error( + u'Invalid {image_type}, unable to find it in the {indexer}', + {u'image': image_type, u'indexer': indexerApi(show_obj.indexer).name} + ) return None - if image_type == 'poster_thumb': - if getattr(indexer_show_obj, 'poster', None): - image_url = re.sub('posters', '_cache/posters', indexer_show_obj['poster']) + if image_type == u'poster_thumb': + if getattr(indexer_show_obj, u'poster', None): + image_url = re.sub(u'posters', u'_cache/posters', indexer_show_obj[u'poster']) if not image_url: # Try and get images from TMDB image_url = self._retrieve_show_images_from_tmdb(show_obj, image_type) - elif image_type == 'banner_thumb': - if getattr(indexer_show_obj, 'banner', None): - image_url = re.sub('graphical', '_cache/graphical', indexer_show_obj['banner']) + elif image_type == u'banner_thumb': + if getattr(indexer_show_obj, u'banner', None): + image_url = re.sub(u'graphical', u'_cache/graphical', indexer_show_obj[u'banner']) else: if getattr(indexer_show_obj, image_type, None): image_url = indexer_show_obj[image_type] @@ -765,26 +788,26 @@ def _season_posters_dict(self, show_obj, season): indexer_show_obj = self._get_show_data(show_obj) # if we have no season banners then just finish - if not getattr(indexer_show_obj, '_banners', None): + if not getattr(indexer_show_obj, u'_banners', None): return result - if ('season' not in indexer_show_obj['_banners'] or - 'original' not in indexer_show_obj['_banners']['season'] or - season not in indexer_show_obj['_banners']['season']['original']): + if (u'season' not in indexer_show_obj[u'_banners'] or + u'original' not in indexer_show_obj[u'_banners'][u'season'] or + season not in indexer_show_obj[u'_banners'][u'season'][u'original']): return result # Give us just the normal poster-style season graphics - season_art_obj = indexer_show_obj['_banners']['season'] + season_art_obj = indexer_show_obj[u'_banners'][u'season'] # Returns a nested dictionary of season art with the season # number as primary key. It's really overkill but gives the option # to present to user via ui to pick down the road. # find the correct season in the TVDB object and just copy the dict into our result dict - for season_art_id in season_art_obj['original'][season].keys(): + for season_art_id in season_art_obj[u'original'][season].keys(): if season not in result: result[season] = {} - result[season][season_art_id] = season_art_obj['original'][season][season_art_id]['_bannerpath'] + result[season][season_art_id] = season_art_obj[u'original'][season][season_art_id][u'_bannerpath'] return result @@ -802,31 +825,32 @@ def _season_banners_dict(self, show_obj, season): indexer_show_obj = self._get_show_data(show_obj) # if we have no seasonwide banners then just finish - if not getattr(indexer_show_obj, '_banners', None): + if not getattr(indexer_show_obj, u'_banners', None): return result - if ('seasonwide' not in indexer_show_obj['_banners'] or - 'original' not in indexer_show_obj['_banners']['season'] or - season not in indexer_show_obj['_banners']['seasonwide']['original']): + if (u'seasonwide' not in indexer_show_obj[u'_banners'] or + u'original' not in indexer_show_obj[u'_banners'][u'seasonwide'] or + season not in indexer_show_obj[u'_banners'][u'seasonwide'][u'original']): return result # Give us just the normal poster-style season graphics - season_art_obj = indexer_show_obj['_banners']['seasonwide'] + season_art_obj = indexer_show_obj[u'_banners'][u'seasonwide'] # Returns a nested dictionary of season art with the season # number as primary key. It's really overkill but gives the option # to present to user via ui to pick down the road. # find the correct season in the TVDB object and just copy the dict into our result dict - for season_art_id in season_art_obj['original'][season].keys(): + for season_art_id in season_art_obj[u'original'][season].keys(): if season not in result: result[season] = {} - result[season][season_art_id] = season_art_obj['original'][season][season_art_id]['_bannerpath'] + result[season][season_art_id] = season_art_obj[u'original'][season][season_art_id][u'_bannerpath'] return result def _get_show_data(self, show_obj): - """Retrieve show data from the indexer. + """ + Retrieve show data from the indexer. Try to reuse the indexer_api class instance attribute. As we are reusing the indexers results, we need to do a full index including actors and images. @@ -838,28 +862,32 @@ def _get_show_data(self, show_obj): show_id = show_obj.indexerid try: - if not (show_obj.indexer_api and all([show_obj.indexer_api.config['banners_enabled'], - show_obj.indexer_api.config['actors_enabled']])): + if not (show_obj.indexer_api and all([show_obj.indexer_api.config[u'banners_enabled'], + show_obj.indexer_api.config[u'actors_enabled']])): show_obj.create_indexer(banners=True, actors=True) self.indexer_api = show_obj.indexer_api my_show = self.indexer_api[int(show_id)] except IndexerShowNotFound: - logger.log(u'Unable to find {indexer} show {id}, skipping it'.format - (indexer=indexerApi(show_obj.indexer).name, - id=show_id), logger.WARNING) + log.warning( + u'Unable to find {indexer} show {id}, skipping it', + {u'indexer': indexerApi(show_obj.indexer).name, u'id': show_id} + ) return False except (IndexerException, RequestException): - logger.log(u"{indexer} is down, can't use its data to add this show".format - (indexer=indexerApi(show_obj.indexer).name), logger.WARNING) + log.warning( + u'{indexer} is down, cannot use its data to add this show', + {u'indexer': indexerApi(show_obj.indexer).name} + ) return False # check for title and id - if not (getattr(my_show, 'seriesname', None) and getattr(my_show, 'id', None)): - logger.log(u'Incomplete info for {indexer} show {id}, skipping it'.format - (indexer=indexerApi(show_obj.indexer).name, - id=show_id), logger.WARNING) + if not (getattr(my_show, u'seriesname', None) and getattr(my_show, u'id', None)): + log.warning( + u'Incomplete info for {indexer} show {id}, skipping it', + {u'indexer': indexerApi(show_obj.indexer).name, u'id': show_id} + ) return False return my_show @@ -874,92 +902,105 @@ def retrieveShowMetadata(self, folder): metadata_path = os.path.join(folder, self._show_metadata_filename) if not os.path.isdir(folder) or not os.path.isfile(metadata_path): - logger.log(u"Can't load the metadata file from " + metadata_path + ", it doesn't exist", logger.DEBUG) + log.debug( + u'Cannot load the metadata file from {location}, it does not exist', + {u'location': metadata_path} + ) return empty_return - logger.log(u"Loading show info from metadata file in " + folder, logger.DEBUG) + log.debug(u'Loading show info from metadata file in {location}', + {u'location': folder}) try: - with io.open(metadata_path, 'rb') as xmlFileObj: + with io.open(metadata_path, u'rb') as xmlFileObj: showXML = etree.ElementTree(file=xmlFileObj) - if (showXML.findtext('title') is None or - (showXML.findtext('tvdbid') is None and showXML.findtext('id') is None)): - logger.log(u"Invalid info in tvshow.nfo (missing name or id): %s %s %s" - % (showXML.findtext('title'), showXML.findtext('tvdbid'), showXML.findtext('id')), - logger.DEBUG) + if (showXML.findtext(u'title') is None or + (showXML.findtext(u'tvdbid') is None and showXML.findtext(u'id') is None)): + log.debug( + u'Invalid info in tvshow.nfo (missing name or id): {0} {1} {2}', + showXML.findtext(u'title'), showXML.findtext(u'tvdbid'), showXML.findtext(u'id'), + ) return empty_return - name = showXML.findtext('title') + name = showXML.findtext(u'title') - if showXML.findtext('tvdbid'): - indexer_id = int(showXML.findtext('tvdbid')) - elif showXML.findtext('id'): - indexer_id = int(showXML.findtext('id')) + if showXML.findtext(u'tvdbid'): + indexer_id = int(showXML.findtext(u'tvdbid')) + elif showXML.findtext(u'id'): + indexer_id = int(showXML.findtext(u'id')) else: - logger.log(u"Empty or field in NFO, unable to find a ID", logger.WARNING) + log.warning(u'Empty or field in NFO, unable to find a ID') return empty_return if indexer_id is None: - logger.log(u"Invalid Indexer ID (" + str(indexer_id) + "), not using metadata file", logger.WARNING) + log.warning(u'Invalid Indexer ID ({0}), not using metadata file', + indexer_id) return empty_return indexer = None - if showXML.findtext('episodeguide/url'): - epg_url = showXML.findtext('episodeguide/url').lower() + if showXML.findtext(u'episodeguide/url'): + epg_url = showXML.findtext(u'episodeguide/url').lower() if str(indexer_id) in epg_url: - if 'thetvdb.com' in epg_url: + if u'thetvdb.com' in epg_url: indexer = INDEXER_TVDBV2 - elif 'tvmaze.com' in epg_url: + elif u'tvmaze.com' in epg_url: indexer = INDEXER_TVMAZE - elif 'themoviedb.org' in epg_url: + elif u'themoviedb.org' in epg_url: indexer = INDEXER_TMDB - elif 'tvrage' in epg_url: - logger.log(u"Invalid Indexer ID (" + str( - indexer_id) + "), not using metadata file because it has TVRage info", logger.WARNING) + elif u'tvrage' in epg_url: + log.warning( + u'Invalid Indexer ID ({0}), not using metadata file because it has TVRage info', + indexer_id + ) return empty_return - except Exception as e: - logger.log( - u"There was an error parsing your existing metadata file: '" + metadata_path + "' error: " + ex(e), - logger.WARNING) + except Exception as error: + log.warning( + u'There was an error parsing your existing metadata file: {location} error: {error}', + {u'location': metadata_path, u'error': ex(error)} + ) return empty_return return indexer_id, name, indexer @staticmethod def _retrieve_show_images_from_tmdb(show, img_type): - types = {'poster': 'poster_path', - 'banner': None, - 'fanart': 'backdrop_path', - 'poster_thumb': 'poster_path', - 'banner_thumb': None} + types = {u'poster': u'poster_path', + u'banner': None, + u'fanart': u'backdrop_path', + u'poster_thumb': u'poster_path', + u'banner_thumb': None} # get TMDB configuration info tmdb.API_KEY = app.TMDB_API_KEY config = tmdb.Configuration() try: response = config.info() - except RequestException as e: - logger.log('Indexer TMDB is unavailable at this time. Cause: {cause}'.format(cause=e), logger.WARNING) + except RequestException as error: + log.warning(u'Indexer TMDB is unavailable at this time: {reason}', + {u'reason': error}) return False - base_url = response['images']['base_url'] - sizes = response['images']['poster_sizes'] + base_url = response[u'images'][u'base_url'] + sizes = response[u'images'][u'poster_sizes'] def size_str_to_int(x): - return float("inf") if x == 'original' else int(x[1:]) + return float(u'inf') if x == u'original' else int(x[1:]) max_size = max(sizes, key=size_str_to_int) try: search = tmdb.Search() - for show_name in allPossibleShowNames(show): - for result in search.collection(query=show_name)['results'] + search.tv(query=show_name)['results']: + for show_name in show.get_all_possible_names(): + for result in search.collection(query=show_name)[u'results'] + search.tv(query=show_name)[u'results']: if types[img_type] and getattr(result, types[img_type]): - return "{0}{1}{2}".format(base_url, max_size, result[types[img_type]]) + return u'{0}{1}{2}'.format(base_url, max_size, result[types[img_type]]) except Exception: pass - logger.log(u"Could not find any " + img_type + " images on TMDB for " + show.name, logger.INFO) + log.info( + u'Could not find any {type} images on TMDB for {series}', + {u'type': img_type, u'series': show.name} + ) diff --git a/medusa/metadata/helpers.py b/medusa/metadata/helpers.py index 3bb8c502a2..eae2e1f4c8 100644 --- a/medusa/metadata/helpers.py +++ b/medusa/metadata/helpers.py @@ -1,23 +1,13 @@ # coding=utf-8 -# Author: Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . - -from .. import helpers, logger +import logging + +from medusa import helpers +from medusa.logger.adapters.style import BraceAdapter + + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) meta_session = helpers.make_session() @@ -28,15 +18,15 @@ def getShowImage(url, imgNum=None): # if they provided a fanart number try to use it instead if imgNum is not None: - tempURL = url.split('-')[0] + "-" + str(imgNum) + ".jpg" + tempURL = url.split('-')[0] + '-' + str(imgNum) + '.jpg' else: tempURL = url - logger.log(u"Fetching image from " + tempURL, logger.DEBUG) + log.debug(u'Fetching image from {url}', {'url': tempURL}) image_data = helpers.get_url(tempURL, session=meta_session, returns='content') if image_data is None: - logger.log(u"There was an error trying to retrieve the image, aborting", logger.WARNING) + log.warning(u'There was an error trying to retrieve the image, aborting') return return image_data diff --git a/medusa/metadata/kodi.py b/medusa/metadata/kodi.py index 0925000919..28780ab1b3 100644 --- a/medusa/metadata/kodi.py +++ b/medusa/metadata/kodi.py @@ -1,26 +1,10 @@ # coding=utf-8 -# Author: Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . import os -from ..helper.common import replace_extension -from ..metadata import kodi_12plus +from medusa.helper.common import replace_extension +from medusa.metadata import kodi_12plus class KODIMetadata(kodi_12plus.KODI_12PlusMetadata): diff --git a/medusa/metadata/kodi_12plus.py b/medusa/metadata/kodi_12plus.py index 24c447c884..d79b421782 100644 --- a/medusa/metadata/kodi_12plus.py +++ b/medusa/metadata/kodi_12plus.py @@ -1,37 +1,28 @@ # coding=utf-8 -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . - import datetime +import logging import re from babelfish import Country + +from medusa import helpers +from medusa.helper.common import dateFormat, episode_num +from medusa.indexers.indexer_api import indexerApi +from medusa.indexers.indexer_exceptions import IndexerEpisodeNotFound, IndexerSeasonNotFound +from medusa.logger.adapters.style import BraceAdapter +from medusa.metadata import generic + from six import string_types -from . import generic -from .. import helpers, logger -from ..helper.common import dateFormat, episode_num -from ..indexers.indexer_api import indexerApi -from ..indexers.indexer_exceptions import IndexerEpisodeNotFound, IndexerSeasonNotFound try: import xml.etree.cElementTree as etree except ImportError: import xml.etree.ElementTree as etree +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) + class KODI_12PlusMetadata(generic.GenericMetadata): """ @@ -233,21 +224,24 @@ def _ep_data(self, ep_obj): try: my_ep = my_show[ep_to_write.season][ep_to_write.episode] except (IndexerEpisodeNotFound, IndexerSeasonNotFound): - logger.log(u'Unable to find episode {ep_num} on {indexer}... ' - u'has it been removed? Should I delete from db?'.format - (ep_num=episode_num(ep_to_write.season, ep_to_write.episode), - indexer=indexerApi(ep_obj.show.indexer).name)) + log.info( + u'Unable to find episode {ep_num} on {indexer}...' + u' has it been removed? Should I delete from db?', { + 'ep_num': episode_num(ep_to_write.season, ep_to_write.episode), + 'indexer': indexerApi(ep_obj.show.indexer).name, + } + ) return None if not getattr(my_ep, 'firstaired', None): my_ep['firstaired'] = str(datetime.date.fromordinal(1)) if not getattr(my_ep, 'episodename', None): - logger.log(u'Not generating nfo because the ep has no title', logger.DEBUG) + log.debug(u'Not generating nfo because the ep has no title') return None - logger.log(u'Creating metadata for episode {ep_num}'.format - (ep_num=episode_num(ep_obj.season, ep_obj.episode)), logger.DEBUG) + log.debug(u'Creating metadata for episode {0}', + episode_num(ep_obj.season, ep_obj.episode)) if len(eps_to_write) > 1: episode = etree.SubElement(root_node, 'episodedetails') diff --git a/medusa/metadata/mede8er.py b/medusa/metadata/mede8er.py index 66d8d4641d..195e93663a 100644 --- a/medusa/metadata/mede8er.py +++ b/medusa/metadata/mede8er.py @@ -1,39 +1,30 @@ # coding=utf-8 -# Author: Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . +from __future__ import unicode_literals import datetime import io +import logging import os +from medusa import helpers +from medusa.helper.common import dateFormat, episode_num, replace_extension +from medusa.helper.exceptions import ex +from medusa.indexers.indexer_api import indexerApi +from medusa.indexers.indexer_exceptions import IndexerEpisodeNotFound, IndexerSeasonNotFound +from medusa.logger.adapters.style import BraceAdapter +from medusa.metadata import media_browser + from six import string_types -from .. import helpers, logger -from ..helper.common import dateFormat, episode_num, replace_extension -from ..helper.exceptions import ex -from ..indexers.indexer_api import indexerApi -from ..indexers.indexer_exceptions import IndexerEpisodeNotFound, IndexerSeasonNotFound -from ..metadata import media_browser try: import xml.etree.cElementTree as etree except ImportError: import xml.etree.ElementTree as etree +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) + class Mede8erMetadata(media_browser.MediaBrowserMetadata): """ @@ -208,10 +199,13 @@ def _ep_data(self, ep_obj): try: my_ep = my_show[ep_to_write.season][ep_to_write.episode] except (IndexerEpisodeNotFound, IndexerSeasonNotFound): - logger.log(u'Unable to find episode {ep_num} on {indexer}... ' - u'has it been removed? Should I delete from db?'.format - (ep_num=episode_num(ep_to_write.season, ep_to_write.episode), - indexer=indexerApi(ep_obj.show.indexer).name)) + log.info( + 'Unable to find episode {ep_num} on {indexer}...' + ' has it been removed? Should I delete from db?', { + 'ep_num': episode_num(ep_to_write.season, ep_to_write.episode), + 'indexer': indexerApi(ep_obj.show.indexer).name, + } + ) return None if ep_to_write == ep_obj: @@ -295,13 +289,13 @@ def _ep_data(self, ep_obj): if not episode_name.text: episode_name.text = ep_to_write.name else: - episode_name.text = u', '.join([episode_name.text, ep_to_write.name]) + episode_name.text = ', '.join([episode_name.text, ep_to_write.name]) if ep_to_write.description: if not overview.text: overview.text = ep_to_write.description else: - overview.text = u'\r'.join([overview.text, ep_to_write.description]) + overview.text = '\r'.join([overview.text, ep_to_write.description]) # Make it purdy helpers.indent_xml(root_node) @@ -335,24 +329,28 @@ def write_show_file(self, show_obj): try: if not os.path.isdir(nfo_file_dir): - logger.log(u'Metadata directory did not exist, creating it at {path}'.format - (path=nfo_file_dir), logger.DEBUG) + log.debug( + 'Metadata directory did not exist, creating it at {location}', + {'location': nfo_file_dir} + ) os.makedirs(nfo_file_dir) helpers.chmod_as_parent(nfo_file_dir) - logger.log(u'Writing show nfo file to {path}'.format - (path=nfo_file_path), logger.DEBUG) + log.debug( + 'Writing show nfo file to {location}', + {'location': nfo_file_dir} + ) nfo_file = io.open(nfo_file_path, 'wb') data.write(nfo_file, encoding='utf-8', xml_declaration=True) nfo_file.close() helpers.chmod_as_parent(nfo_file_path) - except IOError as e: - logger.log(u'Unable to write file to {path} - ' - u'are you sure the folder is writable? {exception}'.format - (path=nfo_file_path, exception=ex(e)), - logger.ERROR) + except IOError as error: + log.error( + 'Unable to write file to {location} - are you sure the folder is writable? {error}', + {'location': nfo_file_path, 'error': ex(error)} + ) return False return True @@ -384,13 +382,13 @@ def write_ep_file(self, ep_obj): try: if not os.path.isdir(nfo_file_dir): - logger.log(u'Metadata directory did not exist, creating it at {path}'.format - (path=nfo_file_dir), logger.DEBUG) + log.debug('Metadata directory did not exist, creating it at {location}', + {'location': nfo_file_dir}) os.makedirs(nfo_file_dir) helpers.chmod_as_parent(nfo_file_dir) - logger.log(u'Writing episode nfo file to {path}'.format - (path=nfo_file_path), logger.DEBUG) + log.debug('Writing episode nfo file to {location}', + {'location': nfo_file_path}) with io.open(nfo_file_path, 'wb') as nfo_file: # Calling encode directly, b/c often descriptions have wonky characters. @@ -399,9 +397,8 @@ def write_ep_file(self, ep_obj): helpers.chmod_as_parent(nfo_file_path) except IOError as e: - logger.log(u'Unable to write file to {path} - ' - u'are you sure the folder is writable? {exception}'.format - (path=nfo_file_path, exception=ex(e)), logger.ERROR) + log.error('Unable to write file to {location} - are you sure the folder is writable? {error}', + {'location': nfo_file_path, 'error': ex(e)}) return False return True diff --git a/medusa/metadata/media_browser.py b/medusa/metadata/media_browser.py index 2c300d9dda..0f040492be 100644 --- a/medusa/metadata/media_browser.py +++ b/medusa/metadata/media_browser.py @@ -1,37 +1,27 @@ # coding=utf-8 -# Author: Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . import datetime +import logging import os import re +from medusa import app, helpers +from medusa.helper.common import dateFormat, episode_num, replace_extension +from medusa.indexers.indexer_api import indexerApi +from medusa.indexers.indexer_exceptions import IndexerEpisodeNotFound, IndexerSeasonNotFound +from medusa.logger.adapters.style import BraceAdapter +from medusa.metadata import generic + from six import iteritems, string_types -from .. import app, helpers, logger -from ..helper.common import dateFormat, episode_num, replace_extension -from ..indexers.indexer_api import indexerApi -from ..indexers.indexer_exceptions import IndexerEpisodeNotFound, IndexerSeasonNotFound -from ..metadata import generic try: import xml.etree.cElementTree as etree except ImportError: import xml.etree.ElementTree as etree +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) + class MediaBrowserMetadata(generic.GenericMetadata): """ @@ -72,25 +62,23 @@ def __init__(self, season_all_poster, season_all_banner) - self.name = 'MediaBrowser' - - self._ep_nfo_extension = 'xml' - self._show_metadata_filename = 'series.xml' - - self.fanart_name = 'backdrop.jpg' - self.poster_name = 'folder.jpg' + self.name = u'MediaBrowser' + self._ep_nfo_extension = u'xml' + self._show_metadata_filename = u'series.xml' + self.fanart_name = u'backdrop.jpg' + self.poster_name = u'folder.jpg' # web-ui metadata template - self.eg_show_metadata = 'series.xml' - self.eg_episode_metadata = 'Season##\\metadata\\filename.xml' - self.eg_fanart = 'backdrop.jpg' - self.eg_poster = 'folder.jpg' - self.eg_banner = 'banner.jpg' - self.eg_episode_thumbnails = 'Season##\\metadata\\filename.jpg' - self.eg_season_posters = 'Season##\\folder.jpg' - self.eg_season_banners = 'Season##\\banner.jpg' - self.eg_season_all_poster = 'not supported' - self.eg_season_all_banner = 'not supported' + self.eg_show_metadata = u'series.xml' + self.eg_episode_metadata = u'Season##\\metadata\\filename.xml' + self.eg_fanart = u'backdrop.jpg' + self.eg_poster = u'folder.jpg' + self.eg_banner = u'banner.jpg' + self.eg_episode_thumbnails = u'Season##\\metadata\\filename.jpg' + self.eg_season_posters = u'Season##\\folder.jpg' + self.eg_season_banners = u'Season##\\banner.jpg' + self.eg_season_all_poster = u'not supported' + self.eg_season_all_banner = u'not supported' # Override with empty methods for unsupported features def retrieveShowMetadata(self, folder): @@ -113,11 +101,11 @@ def get_episode_file_path(self, ep_obj): if os.path.isfile(ep_obj.location): xml_file_name = replace_extension(os.path.basename(ep_obj.location), self._ep_nfo_extension) - metadata_dir_name = os.path.join(os.path.dirname(ep_obj.location), 'metadata') + metadata_dir_name = os.path.join(os.path.dirname(ep_obj.location), u'metadata') xml_file_path = os.path.join(metadata_dir_name, xml_file_name) else: - logger.log(u'Episode location doesn\'t exist: {path}'.format - (path=ep_obj.location), logger.DEBUG) + log.debug(u'Episode location missing: {path}', + {u'path': ep_obj.location}) return '' return xml_file_path @@ -132,8 +120,8 @@ def get_episode_thumb_path(ep_obj): """ if os.path.isfile(ep_obj.location): - tbn_file_name = replace_extension(os.path.basename(ep_obj.location), 'jpg') - metadata_dir_name = os.path.join(os.path.dirname(ep_obj.location), 'metadata') + tbn_file_name = replace_extension(os.path.basename(ep_obj.location), u'jpg') + metadata_dir_name = os.path.join(os.path.dirname(ep_obj.location), u'metadata') tbn_file_path = os.path.join(metadata_dir_name, tbn_file_name) else: return None @@ -151,7 +139,7 @@ def get_season_poster_path(show_obj, season): dir_list = [x for x in os.listdir(show_obj.location) if os.path.isdir(os.path.join(show_obj.location, x))] - season_dir_regex = r'^Season\s+(\d+)$' + season_dir_regex = ur'^Season\s+(\d+)$' season_dir = None @@ -159,7 +147,7 @@ def get_season_poster_path(show_obj, season): # MediaBrowser 1.x only supports 'Specials' # MediaBrowser 2.x looks to only support 'Season 0' # MediaBrowser 3.x looks to mimic KODI/Plex support - if season == 0 and cur_dir == 'Specials': + if season == 0 and cur_dir == u'Specials': season_dir = cur_dir break @@ -174,14 +162,13 @@ def get_season_poster_path(show_obj, season): break if not season_dir: - logger.log(u'Unable to find a season directory for season {season_num}'.format - (season_num=season), logger.DEBUG) + log.debug(u'Unable to find a season directory for season {0}', season) return None - logger.log(u'Using {path}/folder.jpg as season dir for season {season_num}'.format - (path=season_dir, season_num=season), logger.DEBUG) + log.debug(u'Using {path}/folder.jpg as season directory for season {number}', + {u'path': season_dir, u'number': season}) - return os.path.join(show_obj.location, season_dir, 'folder.jpg') + return os.path.join(show_obj.location, season_dir, u'folder.jpg') @staticmethod def get_season_banner_path(show_obj, season): @@ -194,7 +181,7 @@ def get_season_banner_path(show_obj, season): dir_list = [x for x in os.listdir(show_obj.location) if os.path.isdir(os.path.join(show_obj.location, x))] - season_dir_regex = r'^Season\s+(\d+)$' + season_dir_regex = ur'^Season\s+(\d+)$' season_dir = None @@ -202,7 +189,7 @@ def get_season_banner_path(show_obj, season): # MediaBrowser 1.x only supports 'Specials' # MediaBrowser 2.x looks to only support 'Season 0' # MediaBrowser 3.x looks to mimic KODI/Plex support - if season == 0 and cur_dir == 'Specials': + if season == 0 and cur_dir == u'Specials': season_dir = cur_dir break @@ -217,14 +204,13 @@ def get_season_banner_path(show_obj, season): break if not season_dir: - logger.log(u'Unable to find a season directory for season {season_num}'.format - (season_num=season), logger.DEBUG) + log.debug(u'Unable to find a season directory for season {0}', season) return None - logger.log(u'Using {path}/banner.jpg as season dir for season {season_num}'.format - (path=season_dir, season_num=season), logger.DEBUG) + log.debug(u'Using {path}/banner.jpg as season directory for season {number}', + {u'path': season_dir, u'number': season}) - return os.path.join(show_obj.location, season_dir, 'banner.jpg') + return os.path.join(show_obj.location, season_dir, u'banner.jpg') def _show_data(self, show_obj): """ @@ -240,123 +226,123 @@ def _show_data(self, show_obj): if not my_show: return False - tv_node = etree.Element('Series') + tv_node = etree.Element(u'Series') - if getattr(my_show, 'id', None): - indexerid = etree.SubElement(tv_node, 'id') - indexerid.text = str(my_show['id']) + if getattr(my_show, u'id', None): + indexerid = etree.SubElement(tv_node, u'id') + indexerid.text = str(my_show[u'id']) - if getattr(my_show, 'seriesname', None): - series_name = etree.SubElement(tv_node, 'SeriesName') - series_name.text = my_show['seriesname'] + if getattr(my_show, u'seriesname', None): + series_name = etree.SubElement(tv_node, u'SeriesName') + series_name.text = my_show[u'seriesname'] - if getattr(my_show, 'status', None): - status = etree.SubElement(tv_node, 'Status') - status.text = my_show['status'] + if getattr(my_show, u'status', None): + status = etree.SubElement(tv_node, u'Status') + status.text = my_show[u'status'] - if getattr(my_show, 'network', None): - network = etree.SubElement(tv_node, 'Network') - network.text = my_show['network'] + if getattr(my_show, u'network', None): + network = etree.SubElement(tv_node, u'Network') + network.text = my_show[u'network'] - if getattr(my_show, 'airs_time', None): - airs_time = etree.SubElement(tv_node, 'Airs_Time') - airs_time.text = my_show['airs_time'] + if getattr(my_show, u'airs_time', None): + airs_time = etree.SubElement(tv_node, u'Airs_Time') + airs_time.text = my_show[u'airs_time'] - if getattr(my_show, 'airs_dayofweek', None): - airs_day_of_week = etree.SubElement(tv_node, 'Airs_DayOfWeek') - airs_day_of_week.text = my_show['airs_dayofweek'] + if getattr(my_show, u'airs_dayofweek', None): + airs_day_of_week = etree.SubElement(tv_node, u'Airs_DayOfWeek') + airs_day_of_week.text = my_show[u'airs_dayofweek'] - first_aired = etree.SubElement(tv_node, 'FirstAired') - if getattr(my_show, 'firstaired', None): - first_aired.text = my_show['firstaired'] + first_aired = etree.SubElement(tv_node, u'FirstAired') + if getattr(my_show, u'firstaired', None): + first_aired.text = my_show[u'firstaired'] - if getattr(my_show, 'contentrating', None): - content_rating = etree.SubElement(tv_node, 'ContentRating') - content_rating.text = my_show['contentrating'] + if getattr(my_show, u'contentrating', None): + content_rating = etree.SubElement(tv_node, u'ContentRating') + content_rating.text = my_show[u'contentrating'] - mpaa = etree.SubElement(tv_node, 'MPAARating') - mpaa.text = my_show['contentrating'] + mpaa = etree.SubElement(tv_node, u'MPAARating') + mpaa.text = my_show[u'contentrating'] - certification = etree.SubElement(tv_node, 'certification') - certification.text = my_show['contentrating'] + certification = etree.SubElement(tv_node, u'certification') + certification.text = my_show[u'contentrating'] - metadata_type = etree.SubElement(tv_node, 'Type') - metadata_type.text = 'Series' + metadata_type = etree.SubElement(tv_node, u'Type') + metadata_type.text = u'Series' - if getattr(my_show, 'overview', None): - overview = etree.SubElement(tv_node, 'Overview') - overview.text = my_show['overview'] + if getattr(my_show, u'overview', None): + overview = etree.SubElement(tv_node, u'Overview') + overview.text = my_show[u'overview'] - if getattr(my_show, 'firstaired', None): - premiere_date = etree.SubElement(tv_node, 'PremiereDate') - premiere_date.text = my_show['firstaired'] + if getattr(my_show, u'firstaired', None): + premiere_date = etree.SubElement(tv_node, u'PremiereDate') + premiere_date.text = my_show[u'firstaired'] - if getattr(my_show, 'rating', None): - rating = etree.SubElement(tv_node, 'Rating') - rating.text = my_show['rating'] + if getattr(my_show, u'rating', None): + rating = etree.SubElement(tv_node, u'Rating') + rating.text = my_show[u'rating'] - if getattr(my_show, 'firstaired', None): + if getattr(my_show, u'firstaired', None): try: - year_text = str(datetime.datetime.strptime(my_show['firstaired'], dateFormat).year) + year_text = str(datetime.datetime.strptime(my_show[u'firstaired'], dateFormat).year) if year_text: - production_year = etree.SubElement(tv_node, 'ProductionYear') + production_year = etree.SubElement(tv_node, u'ProductionYear') production_year.text = year_text except Exception: pass - if getattr(my_show, 'runtime', None): - running_time = etree.SubElement(tv_node, 'RunningTime') - running_time.text = my_show['runtime'] + if getattr(my_show, u'runtime', None): + running_time = etree.SubElement(tv_node, u'RunningTime') + running_time.text = my_show[u'runtime'] - runtime = etree.SubElement(tv_node, 'Runtime') - runtime.text = my_show['runtime'] + runtime = etree.SubElement(tv_node, u'Runtime') + runtime.text = my_show[u'runtime'] - if getattr(my_show, 'imdb_id', None): - imdb_id = etree.SubElement(tv_node, 'IMDB_ID') - imdb_id.text = my_show['imdb_id'] + if getattr(my_show, u'imdb_id', None): + imdb_id = etree.SubElement(tv_node, u'IMDB_ID') + imdb_id.text = my_show[u'imdb_id'] - imdb_id = etree.SubElement(tv_node, 'IMDB') - imdb_id.text = my_show['imdb_id'] + imdb_id = etree.SubElement(tv_node, u'IMDB') + imdb_id.text = my_show[u'imdb_id'] - imdb_id = etree.SubElement(tv_node, 'IMDbId') - imdb_id.text = my_show['imdb_id'] + imdb_id = etree.SubElement(tv_node, u'IMDbId') + imdb_id.text = my_show[u'imdb_id'] - if getattr(my_show, 'zap2it_id', None): - zap2it_id = etree.SubElement(tv_node, 'Zap2ItId') - zap2it_id.text = my_show['zap2it_id'] + if getattr(my_show, u'zap2it_id', None): + zap2it_id = etree.SubElement(tv_node, u'Zap2ItId') + zap2it_id.text = my_show[u'zap2it_id'] - if getattr(my_show, 'genre', None) and isinstance(my_show['genre'], string_types): - genres = etree.SubElement(tv_node, 'Genres') - for genre in my_show['genre'].split('|'): + if getattr(my_show, u'genre', None) and isinstance(my_show[u'genre'], string_types): + genres = etree.SubElement(tv_node, u'Genres') + for genre in my_show[u'genre'].split(u'|'): if genre.strip(): - cur_genre = etree.SubElement(genres, 'Genre') + cur_genre = etree.SubElement(genres, u'Genre') cur_genre.text = genre.strip() - genre = etree.SubElement(tv_node, 'Genre') - genre.text = '|'.join([x.strip() for x in my_show['genre'].split('|') if x.strip()]) + genre = etree.SubElement(tv_node, u'Genre') + genre.text = u'|'.join([x.strip() for x in my_show[u'genre'].split(u'|') if x.strip()]) - if getattr(my_show, 'network', None): - studios = etree.SubElement(tv_node, 'Studios') - studio = etree.SubElement(studios, 'Studio') - studio.text = my_show['network'] + if getattr(my_show, u'network', None): + studios = etree.SubElement(tv_node, u'Studios') + studio = etree.SubElement(studios, u'Studio') + studio.text = my_show[u'network'] - if getattr(my_show, '_actors', None): - persons = etree.SubElement(tv_node, 'Persons') - for actor in my_show['_actors']: - if not ('name' in actor and actor['name'].strip()): + if getattr(my_show, u'_actors', None): + persons = etree.SubElement(tv_node, u'Persons') + for actor in my_show[u'_actors']: + if not (u'name' in actor and actor[u'name'].strip()): continue - cur_actor = etree.SubElement(persons, 'Person') + cur_actor = etree.SubElement(persons, u'Person') - cur_actor_name = etree.SubElement(cur_actor, 'Name') - cur_actor_name.text = actor['name'].strip() + cur_actor_name = etree.SubElement(cur_actor, u'Name') + cur_actor_name.text = actor[u'name'].strip() - cur_actor_type = etree.SubElement(cur_actor, 'Type') - cur_actor_type.text = 'Actor' + cur_actor_type = etree.SubElement(cur_actor, u'Type') + cur_actor_type.text = u'Actor' - if 'role' in actor and actor['role'].strip(): - cur_actor_role = etree.SubElement(cur_actor, 'Role') - cur_actor_role.text = actor['role'].strip() + if u'role' in actor and actor[u'role'].strip(): + cur_actor_role = etree.SubElement(cur_actor, u'Role') + cur_actor_role.text = actor[u'role'].strip() helpers.indent_xml(tv_node) @@ -375,16 +361,16 @@ def _ep_data(self, ep_obj): eps_to_write = [ep_obj] + ep_obj.related_episodes persons_dict = { - 'Director': [], - 'GuestStar': [], - 'Writer': [] + u'Director': [], + u'GuestStar': [], + u'Writer': [] } my_show = self._get_show_data(ep_obj.show) if not my_show: return None - root_node = etree.Element('Item') + root_node = etree.Element(u'Item') # write an MediaBrowser XML containing info for all matching episodes for ep_to_write in eps_to_write: @@ -392,97 +378,99 @@ def _ep_data(self, ep_obj): try: my_ep = my_show[ep_to_write.season][ep_to_write.episode] except (IndexerEpisodeNotFound, IndexerSeasonNotFound): - logger.log(u'Unable to find episode {ep_num} on {indexer}... ' - u'has it been removed? Should I delete from db?'.format - (ep_num=episode_num(ep_to_write.season, ep_to_write.episode), - indexer=indexerApi(ep_obj.show.indexer).name)) + log.info( + u'Unable to find episode {number} on {indexer}... has it been removed? Should I delete from db?', { + u'number': episode_num(ep_to_write.season, ep_to_write.episode), + u'indexer': indexerApi(ep_obj.show.indexer).name + } + ) return None if ep_to_write == ep_obj: # root (or single) episode # default to today's date for specials if firstaired is not set - if ep_to_write.season == 0 and not getattr(my_ep, 'firstaired', None): - my_ep['firstaired'] = str(datetime.date.fromordinal(1)) + if ep_to_write.season == 0 and not getattr(my_ep, u'firstaired', None): + my_ep[u'firstaired'] = str(datetime.date.fromordinal(1)) - if not (getattr(my_ep, 'episodename', None) and getattr(my_ep, 'firstaired', None)): + if not (getattr(my_ep, u'episodename', None) and getattr(my_ep, u'firstaired', None)): return None episode = root_node if ep_to_write.name: - episode_name = etree.SubElement(episode, 'EpisodeName') + episode_name = etree.SubElement(episode, u'EpisodeName') episode_name.text = ep_to_write.name - episode_number = etree.SubElement(episode, 'EpisodeNumber') + episode_number = etree.SubElement(episode, u'EpisodeNumber') episode_number.text = str(ep_obj.episode) if ep_obj.related_episodes: - episode_number_end = etree.SubElement(episode, 'EpisodeNumberEnd') + episode_number_end = etree.SubElement(episode, u'EpisodeNumberEnd') episode_number_end.text = str(ep_to_write.episode) - season_number = etree.SubElement(episode, 'SeasonNumber') + season_number = etree.SubElement(episode, u'SeasonNumber') season_number.text = str(ep_to_write.season) - if not ep_obj.related_episodes and getattr(my_ep, 'absolute_number', None): - absolute_number = etree.SubElement(episode, 'absolute_number') - absolute_number.text = str(my_ep['absolute_number']) + if not ep_obj.related_episodes and getattr(my_ep, u'absolute_number', None): + absolute_number = etree.SubElement(episode, u'absolute_number') + absolute_number.text = str(my_ep[u'absolute_number']) if ep_to_write.airdate != datetime.date.fromordinal(1): - first_aired = etree.SubElement(episode, 'FirstAired') + first_aired = etree.SubElement(episode, u'FirstAired') first_aired.text = str(ep_to_write.airdate) - metadata_type = etree.SubElement(episode, 'Type') - metadata_type.text = 'Episode' + metadata_type = etree.SubElement(episode, u'Type') + metadata_type.text = u'Episode' if ep_to_write.description: - overview = etree.SubElement(episode, 'Overview') + overview = etree.SubElement(episode, u'Overview') overview.text = ep_to_write.description if not ep_obj.related_episodes: - if getattr(my_ep, 'rating', None): - rating = etree.SubElement(episode, 'Rating') - rating.text = my_ep['rating'] + if getattr(my_ep, u'rating', None): + rating = etree.SubElement(episode, u'Rating') + rating.text = my_ep[u'rating'] - if getattr(my_show, 'imdb_id', None): - IMDB_ID = etree.SubElement(episode, 'IMDB_ID') - IMDB_ID.text = my_show['imdb_id'] + if getattr(my_show, u'imdb_id', None): + IMDB_ID = etree.SubElement(episode, u'IMDB_ID') + IMDB_ID.text = my_show[u'imdb_id'] - IMDB = etree.SubElement(episode, 'IMDB') - IMDB.text = my_show['imdb_id'] + IMDB = etree.SubElement(episode, u'IMDB') + IMDB.text = my_show[u'imdb_id'] - IMDbId = etree.SubElement(episode, 'IMDbId') - IMDbId.text = my_show['imdb_id'] + IMDbId = etree.SubElement(episode, u'IMDbId') + IMDbId.text = my_show[u'imdb_id'] - indexer_id = etree.SubElement(episode, 'id') + indexer_id = etree.SubElement(episode, u'id') indexer_id.text = str(ep_to_write.indexerid) - persons = etree.SubElement(episode, 'Persons') + persons = etree.SubElement(episode, u'Persons') - if getattr(my_show, '_actors', None): - for actor in my_show['_actors']: - if not ('name' in actor and actor['name'].strip()): + if getattr(my_show, u'_actors', None): + for actor in my_show[u'_actors']: + if not (u'name' in actor and actor[u'name'].strip()): continue - cur_actor = etree.SubElement(persons, 'Person') + cur_actor = etree.SubElement(persons, u'Person') - cur_actor_name = etree.SubElement(cur_actor, 'Name') - cur_actor_name.text = actor['name'].strip() + cur_actor_name = etree.SubElement(cur_actor, u'Name') + cur_actor_name.text = actor[u'name'].strip() - cur_actor_type = etree.SubElement(cur_actor, 'Type') - cur_actor_type.text = 'Actor' + cur_actor_type = etree.SubElement(cur_actor, u'Type') + cur_actor_type.text = u'Actor' - if 'role' in actor and actor['role'].strip(): - cur_actor_role = etree.SubElement(cur_actor, 'Role') - cur_actor_role.text = actor['role'].strip() + if u'role' in actor and actor[u'role'].strip(): + cur_actor_role = etree.SubElement(cur_actor, u'Role') + cur_actor_role.text = actor[u'role'].strip() - language = etree.SubElement(episode, 'Language') + language = etree.SubElement(episode, u'Language') try: - language.text = my_ep['language'] + language.text = my_ep[u'language'] except Exception: language.text = app.INDEXER_DEFAULT_LANGUAGE # tvrage api doesn't provide language so we must assume a value here - thumb = etree.SubElement(episode, 'filename') + thumb = etree.SubElement(episode, u'filename') # TODO: See what this is needed for.. if its still needed # just write this to the NFO regardless of whether it actually exists or not # note: renaming files after nfo generation will break this, tough luck @@ -507,22 +495,22 @@ def _ep_data(self, ep_obj): overview.text = u'\r'.join([overview.text, ep_to_write.description]) # collect all directors, guest stars and writers - if getattr(my_ep, 'director', None): - persons_dict['Director'] += [x.strip() for x in my_ep['director'].split('|') if x.strip()] - if getattr(my_ep, 'gueststars', None): - persons_dict['GuestStar'] += [x.strip() for x in my_ep['gueststars'].split('|') if x.strip()] - if getattr(my_ep, 'writer', None): - persons_dict['Writer'] += [x.strip() for x in my_ep['writer'].split('|') if x.strip()] + if getattr(my_ep, u'director', None): + persons_dict[u'Director'] += [x.strip() for x in my_ep[u'director'].split(u'|') if x.strip()] + if getattr(my_ep, u'gueststars', None): + persons_dict[u'GuestStar'] += [x.strip() for x in my_ep[u'gueststars'].split(u'|') if x.strip()] + if getattr(my_ep, u'writer', None): + persons_dict[u'Writer'] += [x.strip() for x in my_ep[u'writer'].split(u'|') if x.strip()] # fill in Persons section with collected directors, guest starts and writers for person_type, names in iteritems(persons_dict): # remove doubles names = list(set(names)) for cur_name in names: - person = etree.SubElement(persons, 'Person') - cur_person_name = etree.SubElement(person, 'Name') + person = etree.SubElement(persons, u'Person') + cur_person_name = etree.SubElement(person, u'Name') cur_person_name.text = cur_name - cur_person_type = etree.SubElement(person, 'Type') + cur_person_type = etree.SubElement(person, u'Type') cur_person_type.text = person_type # Make it purdy @@ -532,5 +520,5 @@ def _ep_data(self, ep_obj): return data -# present a standard 'interface' from the module +# present a standard interface from the module metadata_class = MediaBrowserMetadata diff --git a/medusa/metadata/ps3.py b/medusa/metadata/ps3.py index ecf92d4437..992d03a6ea 100644 --- a/medusa/metadata/ps3.py +++ b/medusa/metadata/ps3.py @@ -1,25 +1,9 @@ # coding=utf-8 -# Author: Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . import os -from ..metadata import generic +from medusa.metadata import generic class PS3Metadata(generic.GenericMetadata): diff --git a/medusa/metadata/tivo.py b/medusa/metadata/tivo.py index 2899563cf4..9ea039a89b 100644 --- a/medusa/metadata/tivo.py +++ b/medusa/metadata/tivo.py @@ -1,33 +1,22 @@ # coding=utf-8 -# Author: Nic Wolfe -# Author: Gordon Turner -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . +from __future__ import unicode_literals import datetime import io +import logging import os -from .. import helpers, logger -from ..helper.common import episode_num -from ..helper.exceptions import ex -from ..indexers.indexer_api import indexerApi -from ..indexers.indexer_exceptions import IndexerEpisodeNotFound, IndexerSeasonNotFound -from ..metadata import generic +from medusa import helpers +from medusa.helper.common import episode_num +from medusa.helper.exceptions import ex +from medusa.indexers.indexer_api import indexerApi +from medusa.indexers.indexer_exceptions import IndexerEpisodeNotFound, IndexerSeasonNotFound +from medusa.logger.adapters.style import BraceAdapter +from medusa.metadata import generic + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class TIVOMetadata(generic.GenericMetadata): @@ -144,8 +133,8 @@ def get_episode_file_path(self, ep_obj): metadata_dir_name = os.path.join(os.path.dirname(ep_obj.location), '.meta') metadata_file_path = os.path.join(metadata_dir_name, metadata_file_name) else: - logger.log(u'Episode location does not exist: {path}'.format - (path=ep_obj.location), logger.DEBUG) + log.debug(u'Episode location does not exist: {path}', + {'path': ep_obj.location}) return u'' return metadata_file_path @@ -180,10 +169,12 @@ def _ep_data(self, ep_obj): try: my_ep = my_show[ep_to_write.season][ep_to_write.episode] except (IndexerEpisodeNotFound, IndexerSeasonNotFound): - logger.log(u'Unable to find episode {ep_num} on {indexer}... ' - u'has it been removed? Should I delete from db?'.format - (ep_num=episode_num(ep_to_write.season, ep_to_write.episode), - indexer=indexerApi(ep_obj.show.indexer).name)) + log.debug( + u'Unable to find episode {number} on {indexer}... has it been removed? Should I delete from db?', { + 'number': episode_num(ep_to_write.season, ep_to_write.episode), + 'indexer': indexerApi(ep_obj.show.indexer).name, + } + ) return None if ep_obj.season == 0 and not getattr(my_ep, 'firstaired', None): @@ -303,13 +294,13 @@ def write_ep_file(self, ep_obj): try: if not os.path.isdir(nfo_file_dir): - logger.log(u'Metadata directory did not exist, creating it at {path}'.format - (path=nfo_file_dir), logger.DEBUG) + log.debug(u'Metadata directory missing, creating it at {location}', + {'location': nfo_file_dir}) os.makedirs(nfo_file_dir) helpers.chmod_as_parent(nfo_file_dir) - logger.log(u'Writing episode nfo file to {path}'.format - (path=nfo_file_path), logger.DEBUG) + log.debug(u'Writing episode nfo file to {location}', + {'location': nfo_file_path}) with io.open(nfo_file_path, 'wb') as nfo_file: # Calling encode directly, b/c often descriptions have wonky characters. @@ -318,9 +309,10 @@ def write_ep_file(self, ep_obj): helpers.chmod_as_parent(nfo_file_path) except EnvironmentError as e: - logger.log(u'Unable to write file to {path} - ' - u'are you sure the folder is writable? {exception}'.format - (path=nfo_file_path, exception=ex(e)), logger.ERROR) + log.error( + u'Unable to write file to {path} - are you sure the folder is writable? {error}', + {'path': nfo_file_path, 'error': ex(e)} + ) return False return True diff --git a/medusa/metadata/wdtv.py b/medusa/metadata/wdtv.py index 522b3b1b92..643493fa84 100644 --- a/medusa/metadata/wdtv.py +++ b/medusa/metadata/wdtv.py @@ -1,36 +1,27 @@ # coding=utf-8 -# Author: Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . + +from __future__ import unicode_literals import datetime +import logging import os import re -from .. import helpers, logger -from ..helper.common import dateFormat, episode_num as ep_num, replace_extension -from ..indexers.indexer_api import indexerApi -from ..indexers.indexer_exceptions import IndexerEpisodeNotFound, IndexerSeasonNotFound -from ..metadata import generic +from medusa import helpers +from medusa.helper.common import dateFormat, episode_num as ep_num, replace_extension +from medusa.indexers.indexer_api import indexerApi +from medusa.indexers.indexer_exceptions import IndexerEpisodeNotFound, IndexerSeasonNotFound +from medusa.logger.adapters.style import BraceAdapter +from medusa.metadata import generic try: import xml.etree.cElementTree as etree except ImportError: import xml.etree.ElementTree as etree +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) + class WDTVMetadata(generic.GenericMetadata): """ @@ -70,9 +61,7 @@ def __init__(self, season_all_banner) self.name = 'WDTV' - self._ep_nfo_extension = 'xml' - self.poster_name = 'folder.jpg' # web-ui metadata template @@ -162,12 +151,11 @@ def get_season_poster_path(show_obj, season): break if not season_dir: - logger.log(u'Unable to find a season directory for season {season_num}'.format - (season_num=season), logger.DEBUG) + log.debug('Unable to find a season directory for season {0}', season) return None - logger.log(u'Using {path}/folder.jpg as season dir for season {season_num}'.format - (path=season_dir, season_num=season), logger.DEBUG) + log.debug('Using {location}/folder.jpg as season dir for season {number}', + {'location': season_dir, 'number': season}) return os.path.join(show_obj.location, season_dir, 'folder.jpg') @@ -193,10 +181,12 @@ def _ep_data(self, ep_obj): try: my_ep = my_show[ep_to_write.season][ep_to_write.episode] except (IndexerEpisodeNotFound, IndexerSeasonNotFound): - logger.log(u'Unable to find episode {ep_num} on {indexer}... ' - u'has it been removed? Should I delete from db?'.format - (ep_num=ep_num(ep_to_write.season, ep_to_write.episode), - indexer=indexerApi(ep_obj.show.indexer).name)) + log.info( + 'Unable to find episode {number} on {indexer}... has it been removed? Should I delete from db?', { + 'number': ep_num(ep_to_write.season, ep_to_write.episode), + 'indexer': indexerApi(ep_obj.show.indexer).name, + } + ) return None if ep_obj.season == 0 and not getattr(my_ep, 'firstaired', None): @@ -282,5 +272,5 @@ def _ep_data(self, ep_obj): return data -# present a standard 'interface' from the module +# present a standard interface from the module metadata_class = WDTVMetadata diff --git a/medusa/name_cache.py b/medusa/name_cache.py index db3118ef41..128201684c 100644 --- a/medusa/name_cache.py +++ b/medusa/name_cache.py @@ -113,7 +113,7 @@ def _cache_name(show): # Add scene exceptions to name cache name_cache.update(names) - logger.log(u'Internal name cache for {show} set to: [{names}]'.format( + logger.log(u'Internal name cache for {show} set to: {names}'.format( show=show.name, names=names.keys() ), logger.DEBUG) diff --git a/medusa/name_parser/__init__.py b/medusa/name_parser/__init__.py index f20e891d18..2dcc4aa5cd 100644 --- a/medusa/name_parser/__init__.py +++ b/medusa/name_parser/__init__.py @@ -1,2 +1,3 @@ # coding=utf-8 + """Name Parser module.""" diff --git a/medusa/name_parser/guessit_parser.py b/medusa/name_parser/guessit_parser.py index 0864853fb7..bea0b7a8fe 100644 --- a/medusa/name_parser/guessit_parser.py +++ b/medusa/name_parser/guessit_parser.py @@ -7,8 +7,9 @@ from time import time from guessit.rules.common.date import valid_year -from .rules import default_api -from .. import app + +from medusa import app +from medusa.name_parser.rules import default_api EXPECTED_TITLES_EXPIRATION_TIME = timedelta(days=1).total_seconds() @@ -16,13 +17,9 @@ # release group exception list expected_groups = [ # release groups with numbers - # https://github.com/guessit-io/guessit/issues/294 'TV2LAX9', - # https://github.com/guessit-io/guessit/issues/352 - 'S4L', - - # https://github.com/guessit-io/guessit/issues/356 + # episode titles in the wrong place 'DHD', ] @@ -30,6 +27,7 @@ 'de', 'en', 'es', + 'ca', 'fr', 'he', 'hu', @@ -42,6 +40,8 @@ 'ru', 'sv', 'uk', + 'mul', # multi language + 'und', # undetermined } allowed_countries = { diff --git a/medusa/name_parser/parser.py b/medusa/name_parser/parser.py index 06730fba36..8bf144522f 100644 --- a/medusa/name_parser/parser.py +++ b/medusa/name_parser/parser.py @@ -1,21 +1,5 @@ # coding=utf-8 -# -# Author: Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . + """Parser module which contains NameParser class.""" from __future__ import unicode_literals @@ -26,13 +10,25 @@ import guessit -from .. import common, db, helpers, scene_exceptions, scene_numbering -from ..helper.common import episode_num -from ..indexers.indexer_api import indexerApi -from ..indexers.indexer_exceptions import IndexerEpisodeNotFound, IndexerError, IndexerException +from medusa import ( + common, + db, + helpers, + scene_exceptions, + scene_numbering, +) +from medusa.helper.common import episode_num +from medusa.indexers.indexer_api import indexerApi +from medusa.indexers.indexer_exceptions import ( + IndexerEpisodeNotFound, + IndexerError, + IndexerException, +) +from medusa.logger.adapters.style import BraceAdapter -logger = logging.getLogger(__name__) +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class NameParser(object): @@ -80,7 +76,7 @@ def _parse_string(self, name): # if we have an air-by-date show and the result is air-by-date, # then get the real season/episode numbers if result.show.air_by_date and result.is_air_by_date: - logger.debug('Show {name} is air by date', name=result.show.name) + log.debug('Series {name} is air by date', {'name': result.show.name}) airdate = result.air_date.toordinal() main_db_con = db.DBConnection() sql_result = main_db_con.select( @@ -93,52 +89,81 @@ def _parse_string(self, name): if sql_result: season_number = int(sql_result[0][0]) episode_numbers = [int(sql_result[0][1])] - logger.debug('Database info for show {name}: Season: {season} Episode(s): {episodes}', - name=result.show.name, season=season_number, episodes=episode_numbers) + + # Use the next query item if we have multiple results + # and the current one is a special episode (season 0) + if season_number == 0 and len(sql_result) > 1: + season_number = int(sql_result[1][0]) + episode_numbers = [int(sql_result[1][1])] + + log.debug( + 'Database info for series {name}: Season: {season} Episode(s): {episodes}', { + 'name': result.show.name, + 'season': season_number, + 'episodes': episode_numbers + } + ) if season_number is None or not episode_numbers: - logger.debug('Show {name} has no season or episodes, using indexer...', name=result.show.name) - indexer_api = indexerApi(result.show.indexer) + log.debug('Series {name} has no season or episodes, using indexer', + {'name': result.show.name}) try: - indexer_api_params = indexer_api.api_params.copy() + indexer_api_params = indexerApi(result.show.indexer).api_params.copy() if result.show.lang: indexer_api_params['language'] = result.show.lang - t = indexerApi(result.show.indexer).indexer(**indexer_api_params) - tv_episode = t[result.show.indexerid].aired_on(result.air_date)[0] + indexer_api = indexerApi(result.show.indexer).indexer(**indexer_api_params) + tv_episode = indexer_api[result.show.indexerid].aired_on(result.air_date)[0] season_number = int(tv_episode['seasonnumber']) episode_numbers = [int(tv_episode['episodenumber'])] - logger.debug('Indexer info for show {name}: {ep}', - name=result.show.name, ep=episode_num(season_number, episode_numbers[0])) + log.debug( + 'Indexer info for series {name}: {ep}', { + 'name': result.show.name, + 'ep': episode_num(season_number, episode_numbers[0]), + } + ) except IndexerEpisodeNotFound: - logger.warning("Unable to find episode with date {date} for show '{name}'. Skipping", - date=result.air_date, name=result.show.name) + log.warning( + 'Unable to find episode with date {date} for series {name}. Skipping', + {'date': result.air_date, 'name': result.show.name} + ) episode_numbers = [] - except IndexerError as e: - logger.warning('Unable to contact {indexer_api.name}: {ex!r}', indexer_api=indexer_api, ex=e) + except IndexerError as error: + log.warning( + 'Unable to contact {indexer_api.name}: {error}', + {'indexer_api': indexer_api, 'error': error.message} + ) episode_numbers = [] - except IndexerException as e: - logger.warning('Indexer exception: {indexer_api.name}: {ex!r}', indexer_api=indexer_api, ex=e) + except IndexerException as error: + log.warning( + 'Indexer exception: {indexer_api.name}: {error}', + {'indexer_api': indexer_api, 'error': error.message} + ) episode_numbers = [] for episode_number in episode_numbers: - s = season_number - e = episode_number + season = season_number + episode = episode_number if result.show.is_scene: - (s, e) = scene_numbering.get_indexer_numbering(result.show.indexerid, - result.show.indexer, - season_number, - episode_number) - logger.debug("Scene numbering enabled show '{name}', using indexer numbering: {ep}", - name=result.show.name, ep=episode_num(s, e)) - new_episode_numbers.append(e) - new_season_numbers.append(s) + (season, episode) = scene_numbering.get_indexer_numbering( + result.show.indexerid, + result.show.indexer, + season_number, + episode_number, + ) + log.debug( + 'Scene numbering enabled series {name}, using indexer numbering: {ep}', + {'name': result.show.name, 'ep': episode_num(season, episode)} + ) + new_episode_numbers.append(episode) + new_season_numbers.append(season) elif result.show.is_anime and result.is_anime: - logger.debug("Scene numbering enabled show '{name}' is anime", name=result.show.name) + log.debug('Scene numbering enabled series {name} is anime', + {'name': result.show.name}) scene_season = scene_exceptions.get_scene_exception_by_name(result.series_name)[1] for absolute_episode in result.ab_episode_numbers: a = absolute_episode @@ -149,53 +174,59 @@ def _parse_string(self, name): True, scene_season) # Translate the absolute episode number, back to the indexers season and episode. - (s, e) = helpers.get_all_episodes_from_absolute_number(result.show, [a]) - logger.debug("Scene numbering enabled show '{name}' using indexer for absolute {absolute}: {ep}", - name=result.show.name, absolute=a, ep=episode_num(s, e, 'absolute')) + (season, episode) = helpers.get_all_episodes_from_absolute_number(result.show, [a]) + log.debug( + 'Scene numbering enabled series {name} using indexer for absolute {absolute}: {ep}', + {'name': result.show.name, 'absolute': a, 'ep': episode_num(season, episode, 'absolute')} + ) new_absolute_numbers.append(a) - new_episode_numbers.extend(e) - new_season_numbers.append(s) + new_episode_numbers.extend(episode) + new_season_numbers.append(season) elif result.season_number and result.episode_numbers: for episode_number in result.episode_numbers: - s = result.season_number - e = episode_number + season = result.season_number + episode = episode_number if result.show.is_scene: - (s, e) = scene_numbering.get_indexer_numbering(result.show.indexerid, - result.show.indexer, - result.season_number, - episode_number) - logger.debug("Scene numbering enabled show '{name}' using indexer numbering: {ep}", - name=result.show.name, ep=episode_num(s, e)) + (season, episode) = scene_numbering.get_indexer_numbering( + result.show.indexerid, + result.show.indexer, + result.season_number, + episode_number + ) + log.debug( + 'Scene numbering enabled series {name} using indexer numbering: {ep}', + {'name': result.show.name, 'ep': episode_num(season, episode)} + ) if result.show.is_anime: - a = helpers.get_absolute_number_from_season_and_episode(result.show, s, e) + a = helpers.get_absolute_number_from_season_and_episode(result.show, season, episode) if a: new_absolute_numbers.append(a) - logger.debug("Scene numbering enabled anime show '{name}' using indexer with absolute {absolute}: {ep}", - name=result.show.name, absolute=a, ep=episode_num(s, e, 'absolute')) + log.debug( + 'Scene numbering enabled anime {name} using indexer with absolute {absolute}: {ep}', + {'name': result.show.name, 'absolute': a, 'ep': episode_num(season, episode, 'absolute')} + ) - new_episode_numbers.append(e) - new_season_numbers.append(s) + new_episode_numbers.append(episode) + new_season_numbers.append(season) # need to do a quick sanity check heregex. It's possible that we now have episodes # from more than one season (by tvdb numbering), and this is just too much # for the application, so we'd need to flag it. - new_season_numbers = list(set(new_season_numbers)) # remove duplicates + new_season_numbers = sorted(set(new_season_numbers)) # remove duplicates if len(new_season_numbers) > 1: raise InvalidNameException('Scene numbering results episodes from seasons {seasons}, (i.e. more than one) ' 'and Medusa does not support this. Sorry.'.format(seasons=new_season_numbers)) # If guess it's possible that we'd have duplicate episodes too, # so lets eliminate them - new_episode_numbers = list(set(new_episode_numbers)) - new_episode_numbers.sort() + new_episode_numbers = sorted(set(new_episode_numbers)) # maybe even duplicate absolute numbers so why not do them as well - new_absolute_numbers = list(set(new_absolute_numbers)) - new_absolute_numbers.sort() + new_absolute_numbers = sorted(set(new_absolute_numbers)) if new_absolute_numbers: result.ab_episode_numbers = new_absolute_numbers @@ -207,13 +238,17 @@ def _parse_string(self, name): # For anime that we still couldn't get a season, let's assume we should use 1. if result.show.is_anime and result.season_number is None and result.episode_numbers: result.season_number = 1 - logger.warn("For this anime show {name}, we couldn't parse a season number, " - "let's assume it's an absolute numbered anime show with season 1", - name=result.show.name) + log.warning( + 'Unable to parse season number for anime {name}, ' + 'assuming absolute numbered anime with season 1', + {'name': result.show.name} + ) if result.show.is_scene: - logger.debug('Converted parsed result {original} into {result}', original=result.original_name, - result=result) + log.debug( + 'Converted parsed result {original} into {result}', + {'original': result.original_name, 'result': result} + ) return result @@ -246,7 +281,7 @@ def parse(self, name, cache_result=True): if cache_result: name_parser_cache.add(name, result) - logger.debug("Parsed '{name}' into {result}", name=name, result=result) + log.debug('Parsed {name} into {result}', {'name': name, 'result': result}) return result @staticmethod @@ -257,11 +292,13 @@ def assert_supported(result): :type result: ParseResult """ if not result.show: - raise InvalidShowException('Unable to match {result.original_name} to a show in your database. ' + raise InvalidShowException('Unable to match {result.original_name} to a series in your database. ' 'Parser result: {result}'.format(result=result)) - logger.debug("Matched release '{release}' to a show in your database: '{name}'", - release=result.original_name, name=result.show.name) + log.debug( + 'Matched release {release} to a series in your database: {name}', + {'release': result.original_name, 'name': result.show.name} + ) if result.season_number is None and not result.episode_numbers and \ result.air_date is None and not result.ab_episode_numbers and not result.series_name: @@ -451,7 +488,7 @@ def get(self, name): :rtype: ParseResult """ if name in self.cache: - logger.debug("Using cached parse result for '{name}'", name=name) + log.debug('Using cached parse result for {name}', {'name': name}) return self.cache[name] diff --git a/medusa/name_parser/rules/__init__.py b/medusa/name_parser/rules/__init__.py index 777a64a1a7..9c807c3f5c 100644 --- a/medusa/name_parser/rules/__init__.py +++ b/medusa/name_parser/rules/__init__.py @@ -1,19 +1,21 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- + """Guessit customization.""" from guessit.api import default_api -from ...name_parser.rules.properties import ( - blacklist, container, format_, language, other, - screen_size, subtitle_language +from medusa.name_parser.rules.properties import ( + blacklist, + container, + format_, + other, + screen_size ) -from ...name_parser.rules.rules import rules +from medusa.name_parser.rules.rules import rules default_api.rebulk.rebulk(blacklist()) default_api.rebulk.rebulk(format_()) default_api.rebulk.rebulk(screen_size()) default_api.rebulk.rebulk(other()) -default_api.rebulk.rebulk(language()) -default_api.rebulk.rebulk(subtitle_language()) default_api.rebulk.rebulk(container()) default_api.rebulk.rebulk(rules()) diff --git a/medusa/name_parser/rules/properties.py b/medusa/name_parser/rules/properties.py index 219aaba215..51178ecea4 100644 --- a/medusa/name_parser/rules/properties.py +++ b/medusa/name_parser/rules/properties.py @@ -1,12 +1,13 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- + """Properties: This section contains additional properties to be guessed by guessit.""" + import re -import babelfish from guessit.reutils import build_or_pattern -from guessit.rules.common import alt_dash, dash -from guessit.rules.common.validators import seps, seps_surround +from guessit.rules.common import dash +from guessit.rules.common.validators import seps_surround from rebulk.processors import POST_PROCESS from rebulk.rebulk import Rebulk from rebulk.rules import RemoveMatch, Rule @@ -66,7 +67,6 @@ def screen_size(): rebulk = Rebulk().regex_defaults(flags=re.IGNORECASE) rebulk.defaults(name='screen_size', validator=seps_surround) - rebulk.regex('NetflixUHD', value='2160p') rebulk.regex(r'(?:\d{3,}(?:x|\*))?4320(?:p?x?)', value='4320p') return rebulk @@ -81,61 +81,12 @@ def other(): rebulk = Rebulk().regex_defaults(flags=re.IGNORECASE, abbreviations=[dash]) rebulk.defaults(name='other', validator=seps_surround) - rebulk.regex('DIRFIX', value='DirFix') - rebulk.regex('INTERNAL', value='Internal') - rebulk.regex(r'(?:HD)?iTunes(?:HD)?', value='iTunes') - rebulk.regex(r'UNCENSORED', value='Uncensored') - rebulk.regex(r'MULTi', value='Multi Language') - rebulk.regex('HC', value='Hardcoded subtitles') - rebulk.regex('F1', value='Formula One', conflict_solver=lambda match, other: other if other.name == 'film' else '__default__') # Discarded: rebulk.regex('DownRev', 'small-size', private=True) - rebulk.rules(ValidateHardcodedSubs) - - return rebulk - - -def language(): - """Language property. - - :return: - :rtype: Rebulk - """ - rebulk = Rebulk().regex_defaults(flags=re.IGNORECASE, abbreviations=[dash]) - rebulk.defaults(name='language', validator=seps_surround) - rebulk.regex('SPANISH-?AUDIO', r'(?:Espa[.]ol-)?castellano', value=babelfish.Language('spa')) - rebulk.regex('german-dubbed', 'dubbed-german', value=babelfish.Language('deu')) - rebulk.regex('english-dubbed', value=babelfish.Language('eng')) - rebulk.regex('dublado', value='und', formatter=babelfish.Language) - - return rebulk - - -def subtitle_language(): - """Subtitle language property. - - :return: - :rtype: Rebulk - """ - rebulk = Rebulk().regex_defaults(flags=re.IGNORECASE | re.UNICODE, abbreviations=[alt_dash]) - rebulk.defaults(name='subtitle_language', validator=seps_surround) - - # special handling - rebulk.regex(r'Legenda(?:s|do)?@PT-?BR', value=babelfish.Language('por', 'BR')) - rebulk.regex(r'Legenda(?:s|do)?@PT(?!-?BR)', value=babelfish.Language('por')) - rebulk.regex('Subtitulado@?ESP(?:a[nñ]ol)?@?Spanish', 'Subtitulado@?ESP(?:a[nñ]ol)?', value=babelfish.Language('spa'), - conflict_solver=lambda match, other: other if other.name == 'language' else '__default__') - - # undefined language - rebulk.regex('Subtitles', 'Legenda(?:s|do)', 'Subbed', 'Sub(?:title)?s?@Latino', - value='und', formatter=babelfish.Language, tags='subtitle.undefined') - - rebulk.rules(RemoveSubtitleUndefined) - return rebulk @@ -170,69 +121,6 @@ def container(): return rebulk -class ValidateHardcodedSubs(Rule): - """Validate HC matches.""" - - priority = 32 - consequence = RemoveMatch - - def when(self, matches, context): - """Remove `other: Hardcoded subtitles` if there's no subtitle_language matches as a neighbour. - - :param matches: - :type matches: rebulk.match.Matches - :param context: - :type context: dict - :return: - """ - to_remove = [] - for hc in matches.named('other', predicate=lambda match: match.value == 'Hardcoded subtitles'): - next_match = matches.next(hc, predicate=lambda match: match.name == 'subtitle_language', index=0) - if next_match and not matches.holes(hc.end, next_match.start, - predicate=lambda match: match.value.strip(seps)): - continue - - previous_match = matches.previous(hc, predicate=lambda match: match.name == 'subtitle_language', index=0) - if previous_match and not matches.holes(previous_match.end, hc.start, - predicate=lambda match: match.value.strip(seps)): - continue - - to_remove.append(hc) - - return to_remove - - -class RemoveSubtitleUndefined(Rule): - """Remove subtitle undefined when there's an actual subtitle language.""" - - priority = POST_PROCESS - 1000 - consequence = RemoveMatch - - def when(self, matches, context): - """Remove subtitle undefined if there's a subtitle language as a neighbor. - - :param matches: - :type matches: rebulk.match.Matches - :param context: - :type context: dict - :return: - """ - to_remove = [] - for und in matches.tagged('subtitle.undefined'): - next_match = matches.next(und, predicate=lambda match: match.name == 'subtitle_language', index=0) - if not next_match or matches.holes(und.end, next_match.start, - predicate=lambda match: match.value.strip(seps)): - previous_match = matches.previous(und, - predicate=lambda match: match.name == 'subtitle_language', index=0) - if not previous_match or matches.holes(previous_match.end, und.start, - predicate=lambda match: match.value.strip(seps)): - continue - - to_remove.append(und) - - return to_remove - - class ValidateBlacklist(Rule): """Validate blacklist pattern 03. It should appear after a container.""" diff --git a/medusa/name_parser/rules/rules.py b/medusa/name_parser/rules/rules.py index 4a802c039c..f1282afbce 100644 --- a/medusa/name_parser/rules/rules.py +++ b/medusa/name_parser/rules/rules.py @@ -1,5 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- + """Rules: This section contains rules that enhances guessit behavior. Coding guidelines: @@ -25,6 +26,7 @@ have a fixed execution order, that's why the rules() method should add the rules in the correct order (explicit). *** Rebulk API relies on the match.value, if you change them you'll get exceptions. """ + import copy import logging import re @@ -38,11 +40,10 @@ from rebulk.rules import AppendMatch, RemoveMatch, RenameMatch, Rule -logger = logging.getLogger(__name__) +log = logging.getLogger(__name__) simple_separator = ('.', 'and', ',.', '.,', '.,.', ',') range_separator = ('-', '~', '_-_', 'to', '.to.') -episode_range_separator = range_separator + ('_-_e', '-e', '.to.e', '_to_e') class BlacklistedReleaseGroup(Rule): @@ -64,78 +65,6 @@ def when(self, matches, context): return matches.named('release_group', predicate=lambda match: match.value.lower() in self.blacklist) -class EpisodeNumberRule(Rule): - """Episode numbers in episode title are wrongly detected as episodes. - - guessit -t episode "Show Name - S02E31 - Episode 55 (720p.HDTV)" - - Before the rule: - For: Show Name - S02E31 - Episode 55 (720p.HDTV) - GuessIt found: { - "title": "Show Name", - "season": 2, - "episode": [ - 31, - 55 - ], - "screen_size": "720p", - "format": "HDTV", - "type": "episode" - } - - After the rule: - For: Show Name - S02E31 - Episode 55 (720p.HDTV) - GuessIt found: { - "title": "Show Name", - "season": 2, - "episode": 31, - "screen_size": "720p", - "format": "HDTV", - "type": "episode" - } - """ - - priority = POST_PROCESS - consequence = [RemoveMatch, AppendMatch] - - def when(self, matches, context): - """Evaluate the rule. - - :param matches: - :type matches: rebulk.match.Matches - :param context: - :type context: dict - :return: - """ - fileparts = matches.markers.named('path') - for filepart in marker_sorted(fileparts, matches): - episodes = matches.range(filepart.start, filepart.end, predicate=lambda match: match.name == 'episode') - if len(episodes) < 2: - continue - - strong_episodes = [m for m in episodes if 'SxxExx' in m.tags] - weak_episodes = [m for m in episodes if 'SxxExx' not in m.tags] - if not strong_episodes or not weak_episodes: - continue - - numbers = [m.value for m in episodes] - # check if we have consecutive numbers - if sorted(numbers) == range(min(numbers), max(numbers) + 1): - continue - - to_remove = weak_episodes - to_append = [] - for e in weak_episodes: - if matches.previous(e, lambda match: match.name == 'episode' and 'SxxExx' in match.tags): - episode_title = copy.copy(e.initiator) - episode_title.name = 'episode_title' - episode_title.private = False - to_append.append(episode_title) - break - - return to_remove, to_append - - class FixAnimeReleaseGroup(Rule): """Choose the correct Anime release group. @@ -217,144 +146,6 @@ def when(self, matches, context): return to_remove, to_append -class SpanishNewpctReleaseName(Rule): - """Detect newpct release names. - - This rule is to handle the newpct release name style. - - e.g.: Show.Name.-.Temporada.1.720p.HDTV.x264[Cap.102]SPANISH.AUDIO-NEWPCT - - guessit -t episode "Show.Name.-.Temporada.1.720p.HDTV.x264[Cap.102]SPANISH.AUDIO-NEWPCT" - - without this rule: - For: Show.Name.-.Temporada.1.720p.HDTV.x264[Cap.102]SPANISH.AUDIO-NEWPCT - GuessIt found: { - "title": "Show Name", - "alternative_title": "Temporada", - "episode": [ - 1, - 2 - ], - "screen_size": "720p", - "format": "HDTV", - "video_codec": "h264", - "season": 1, - "language": "Spanish", - "episode_title": "AUDIO-NEWPCT", - "type": "episode" - } - - - with this rule: - For: Show.Name.-.Temporada.1.720p.HDTV.x264[Cap.102]SPANISH.AUDIO-NEWPCT - GuessIt found: { - "title": "Show Name", - "season": 1, - "episode": 2 - "screen_size": "720p", - "format": "HDTV", - "video_codec": "h264", - "language": "Spanish", - "release_group": "NEWPCT" - "type": "episode" - } - - """ - - priority = POST_PROCESS - consequence = [RemoveMatch, AppendMatch, RenameMatch('title')] - season_re = re.compile(r'^tem(p|porada)?\W*\d*$', flags=re.IGNORECASE) - prefix = '[cap.' - episode_re = re.compile(r'^\[cap\.(?P\d{1,2})(?P\d{2})' - r'(_((?P\d{1,2})(?P\d{2})))?.*\]', flags=re.IGNORECASE) - - def when(self, matches, context): - """Evaluate the rule. - - :param matches: - :type matches: rebulk.match.Matches - :param context: - :type context: dict - :return: - """ - season = matches.named('season', index=0) - if not season: - return - - alternative_titles = matches.named('alternative_title', - predicate=lambda match: self.season_re.match(match.value.lower())) - episode_titles = matches.named('episode_title', - predicate=lambda match: self.season_re.match(match.value.lower())) - - # skip if there isn't an alternative_title or episode_title with the word season in spanish - if not alternative_titles and not episode_titles: - return - - to_remove = [] - to_rename = [] - - titles = matches.named('title', predicate=lambda match: self.season_re.match(match.value.lower())) - if titles: - to_remove.extend(titles) - if not episode_titles: - to_rename.extend(matches.named('episode_title')) - - fileparts = matches.markers.named('path') - for filepart in marker_sorted(fileparts, matches): - # retrieve all groups - groups = matches.markers.range(filepart.start, filepart.end, predicate=lambda mk: mk.name == 'group') - for group in groups: - # then search the season and episode numbers: [Cap.102_103] - m = self.episode_re.search(group.raw) - g = m.groupdict() if m else None - # if found and the season numbers match... - if not g or int(g['season']) != season.value or ( - g['end_season'] and int(g['end_season']) != season.value): - continue - - if not context.get('show_type'): - # fix the show_type as this is not anime - context['show_type'] = 'normal' - - to_append = [] - - # remove "[Cap.] match, if any - to_remove.extend(matches.range(group.start, group.start + len(self.prefix))) - # remove the wrong alternative title - to_remove.extend(alternative_titles) - # remove the wrong episode title - to_remove.extend(episode_titles) - to_remove.extend(matches.range(filepart.start, filepart.end, predicate=lambda match: - match.name == 'episode_title' and - match.value.lower() == 'audio')) - # remove all episode matches, since we're rebuild them - to_remove.extend(matches.named('episode')) - - first_ep_num = int(g['episode']) - last_ep_num = int(g['end_episode']) if g['end_episode'] else first_ep_num - if 0 <= first_ep_num <= last_ep_num < 100: - start_index = group.start + len(g['season']) + len(self.prefix) - - # rebuild all episode matches - for ep_num in range(first_ep_num, last_ep_num + 1): - new_episode = copy.copy(season) - new_episode.name = 'episode' - new_episode.tags = ['newpct'] - new_episode.value = ep_num - if ep_num == first_ep_num: - new_episode.start = start_index - new_episode.end = new_episode.start + len(g['episode']) - elif ep_num != last_ep_num: - new_episode.start = start_index + len(g['episode']) - new_episode.end = new_episode.start + 1 - else: - new_episode.start = start_index + len(g['episode']) + len(g['end_season']) + 1 - new_episode.end = new_episode.start + len(g['end_episode']) - to_append.append(new_episode) - - return to_remove, to_append, to_rename - - class FixSeasonRangeWithGap(Rule): """Fix season range with gap. @@ -422,150 +213,6 @@ def when(self, matches, context): return to_append, to_remove -class RemoveInvalidEpisodes(Rule): - """Remove invalid episodes. - - guessit -t episode "Show.Name.S02E06.eps2.4.m4ster-s1ave.aes.1080p.AMZN.WEBRip.DD5.1.x264-GROUP" - - Without this fix: - For: Show.Name.S02E06.eps2.4.m4ster-s1ave.aes.1080p.AMZN.WEBRip.DD5.1.x264-GROUP - GuessIt found: { - "title": "Show Name", - "season": [ - 2, - 4, - 1 - ], - "episode": 6, - "episode_title": "eps2", - "screen_size": "1080p", - "format": "WEBRip", - "audio_codec": "DolbyDigital", - "audio_channels": "5.1", - "video_codec": "h264", - "release_group": "GROUP", - "type": "episode" - } - - - with this fix: - For: Show.Name.S02E06.eps2.4.m4ster-s1ave.aes.1080p.AMZN.WEBRip.DD5.1.x264-GROUP - GuessIt found: { - "title": "Show Name", - "season": 2, - "episode": 6, - "episode_title": "eps2", - "screen_size": "1080p", - "format": "WEBRip", - "audio_codec": "DolbyDigital", - "audio_channels": "5.1", - "video_codec": "h264", - "release_group": "GROUP", - "type": "episode" - } - """ - - priority = POST_PROCESS - consequence = RemoveMatch - - def when(self, matches, context): - """Evaluate the rule. - - :param matches: - :type matches: rebulk.match.Matches - :param context: - :type context: dict - :return: - """ - to_remove = [] - fileparts = matches.markers.named('path') - for filepart in marker_sorted(fileparts, matches): - episode = matches.range(filepart.start, filepart.end, index=0, - predicate=lambda match: match.name == 'episode' and 'SxxExx' in match.tags) - if not episode: - continue - - seasons = matches.range(filepart.start, filepart.end, - predicate=lambda match: match.name == 'season' and match.initiator != episode.initiator) - episodes = matches.range(filepart.start, filepart.end, - predicate=(lambda match: match.name == 'episode' and - 'SxxExx' not in match.tags and match.initiator.raw.startswith('eps'))) # mr robot episode titles - - to_remove.extend(seasons) - to_remove.extend(episodes) - - return to_remove - - -class FixSeasonAndEpisodeConflicts(Rule): - """Fix season and episode conflict. - - - Fix release group conflict with episode and or season. - - Certain release names contains a conflicting screen_size (e.g.: 720 without p). It confuses guessit: the guessed - season and episode needs to be removed. - Bug: https://github.com/guessit-io/guessit/issues/308 - - e.g.: "Show.Name.S02.REPACK.720p.BluRay.DD5.1.x264-4EVERHD" - "[SuperGroup].Show.Name.-.06.[720.Hi10p][1F5578AC]" - - guessit -t episode -G 4EVERHD "Show.Name.S02.REPACK.720p.BluRay.DD5.1.x264-4EVERHD" - guessit -t episode "[SuperGroup].Show.Name.-.06.[720.Hi10p][1F5578AC]" - - without this fix: - For: [SuperGroup].Show.Name.-.06.[720.Hi10p][1F5578AC] - GuessIt found: { - "release_group": "SuperGroup", - "title": "Show Name", - "episode": [ - 6, - 20 - ], - "season": 7, - "screen_size": "720p", - "video_profile": "10bit", - "crc32": "1F5578AC", - "type": "episode" - } - - with this fix: - For: [SuperGroup].Show.Name.-.06.[720.Hi10p][1F5578AC] - GuessIt found: { - "release_group": "SuperGroup", - "title": "Show Name", - "episode": 6, - "screen_size": "720p", - "video_profile": "10bit", - "crc32": "1F5578AC", - "type": "episode" - } - - """ - - priority = POST_PROCESS - consequence = RemoveMatch - - def when(self, matches, context): - """Evaluate the rule. - - :param matches: - :type matches: rebulk.match.Matches - :param context: - :type context: dict - :return: - """ - to_remove = [] - - screen_sizes = matches.named('screen_size') - for screen_size in screen_sizes: - to_remove.extend(matches.range(screen_size.start, screen_size.end, predicate=lambda match: match.name in ('season', 'episode'))) - - release_groups = matches.named('release_group') - for group in release_groups: - to_remove.extend(matches.range(group.start, group.end, predicate=lambda match: match.name in ('season', 'episode'))) - - return to_remove - - class FixInvalidTitleOrAlternativeTitle(Rule): """Fix invalid title/alternative title due to absolute episode numbers range. @@ -715,7 +362,6 @@ class CreateAliasWithAlternativeTitles(Rule): priority = POST_PROCESS consequence = AppendMatch - blacklist = ('temporada', 'temp', 'tem') def when(self, matches, context): """Evaluate the rule. @@ -736,10 +382,6 @@ def when(self, matches, context): if not title: continue - if matches.range(filepart.start, filepart.end, predicate=lambda match: - (match.name == 'alternative_title' and match.value.lower() in self.blacklist)): - continue - alternative_titles = matches.range(filepart.start, filepart.end, predicate=lambda match: match.name == 'alternative_title') if not alternative_titles: @@ -963,7 +605,7 @@ def when(self, matches, context): :type context: dict :return: """ - if context.get('show_type') == 'normal' or not matches.tagged('anime') or matches.tagged('newpct'): + if context.get('show_type') == 'normal' or not matches.tagged('anime'): return fileparts = matches.markers.named('path') @@ -1061,7 +703,7 @@ def when(self, matches, context): """ weak_duplicate = matches.tagged('weak-duplicate', index=0) # only for shows that seems to be animes - if context.get('show_type') == 'normal' or not weak_duplicate or matches.tagged('newpct'): + if context.get('show_type') == 'normal' or not weak_duplicate: return # if it's not detected as anime and season (weak_duplicate) is not 0, then skip. @@ -1175,7 +817,7 @@ def when(self, matches, context): :return: """ # if it seems to be anime and it doesn't have season - if context.get('show_type') != 'normal' and not matches.named('season') and not matches.tagged('newpct'): + if context.get('show_type') != 'normal' and not matches.named('season'): episodes = matches.named('episode') to_remove = [] to_append = [] @@ -1580,8 +1222,8 @@ def when(self, matches, context): to_remove.extend(matches.named('title', predicate=lambda match: match.value != values[0].value)) continue - logger.info(u"Guessed more than one '%s' for '%s': %s", - name, matches.input_string, u','.join(unique_values), exc_info=False) + log.info(u"Guessed more than one '%s' for '%s': %s", + name, matches.input_string, u','.join(unique_values), exc_info=False) to_remove.extend(values) return to_remove @@ -1630,16 +1272,13 @@ class ReleaseGroupPostProcessor(Rule): # italian release: drop everything after [CURA] re.compile(r'\[CURA\].*$', flags=re.IGNORECASE), - # NLSubs-word - re.compile(r'\W*\b([a-z]{1,3}[\.\-]?)?(subs?)\b\W*', flags=re.IGNORECASE), - # https://github.com/guessit-io/guessit/issues/302 - re.compile(r'\W*\b(obfuscated|dual|audio)\b\W*', flags=re.IGNORECASE), - re.compile(r'\W*\b(vtv|sd|avc|rp|norar|re\-?up(loads?)?)\b\W*', flags=re.IGNORECASE), + re.compile(r'\W*\b(obfuscated)\b\W*', flags=re.IGNORECASE), + re.compile(r'\W*\b(vtv|sd|rp|norar|re-?up(loads?)?)\b\W*', flags=re.IGNORECASE), re.compile(r'\W*\b(hebits)\b\W*', flags=re.IGNORECASE), # [word], (word), {word} - re.compile(r'(?<=.)\W*[\[\(\{].+[\}\)\]]?\W*$', flags=re.IGNORECASE), + re.compile(r'(?<=.)\W*[\[({].+[\})\]]?\W*$', flags=re.IGNORECASE), # https://github.com/guessit-io/guessit/issues/301 # vol255+101 @@ -1658,10 +1297,10 @@ class ReleaseGroupPostProcessor(Rule): re.compile(r'(?<=[a-z0-9]{3})\.([a-z]\d{2,3})$', flags=re.IGNORECASE), # word-1234, word-456 - re.compile(r'(?<=[a-z0-9]{3})\-(\d{3,4})$', flags=re.IGNORECASE), + re.compile(r'(?<=[a-z0-9]{3})-(\d{3,4})$', flags=re.IGNORECASE), # word-fansub - re.compile(r'(?<=[a-z0-9]{3})\-((fan)?sub(s)?)$', flags=re.IGNORECASE), + re.compile(r'(?<=[a-z0-9]{3})-((fan)?sub(s)?)$', flags=re.IGNORECASE), # ...word re.compile(r'^\W+', flags=re.IGNORECASE), @@ -1719,13 +1358,9 @@ def rules(): return Rebulk().rules( BlacklistedReleaseGroup, FixTvChaosUkWorkaround, - EpisodeNumberRule, FixAnimeReleaseGroup, - SpanishNewpctReleaseName, FixInvalidTitleOrAlternativeTitle, - FixSeasonAndEpisodeConflicts, FixSeasonRangeWithGap, - RemoveInvalidEpisodes, AnimeWithSeasonAbsoluteEpisodeNumbers, AnimeAbsoluteEpisodeNumbers, AbsoluteEpisodeNumbers, diff --git a/medusa/notifiers/__init__.py b/medusa/notifiers/__init__.py index 5eba8db7da..09de08a57b 100644 --- a/medusa/notifiers/__init__.py +++ b/medusa/notifiers/__init__.py @@ -1,26 +1,38 @@ # coding=utf-8 -# Author: Dustyn Gibson - -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . - -from .. import app -from ..notifiers import boxcar2, emailnotify, emby, freemobile, growl, kodi, libnotify, nma, nmj, nmjv2, plex, prowl, pushalot, pushbullet, pushover, \ - pytivo, synoindex, synology_notifier, telegram, trakt, tweet +import logging +import socket + +from medusa import app +from medusa.logger.adapters.style import BraceAdapter +from medusa.notifiers import ( + boxcar2, + emailnotify, + emby, + freemobile, + growl, + kodi, + libnotify, + nma, + nmj, + nmjv2, + plex, + prowl, + pushalot, + pushbullet, + pushover, + pytivo, + synoindex, + synology_notifier, + telegram, + trakt, + tweet, +) + +from requests.exceptions import RequestException + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) # home theater / nas kodi_notifier = kodi.Notifier() @@ -74,26 +86,41 @@ def notify_download(ep_name): for n in notifiers: - n.notify_download(ep_name) + try: + n.notify_download(ep_name) + except (RequestException, socket.gaierror) as error: + log.debug(u'Unable to send download notification. Error: {0}', error.message) def notify_subtitle_download(ep_name, lang): for n in notifiers: - n.notify_subtitle_download(ep_name, lang) + try: + n.notify_subtitle_download(ep_name, lang) + except (RequestException, socket.gaierror) as error: + log.debug(u'Unable to send download notification. Error: {0}', error.message) def notify_snatch(ep_name, is_proper): for n in notifiers: - n.notify_snatch(ep_name, is_proper) + try: + n.notify_snatch(ep_name, is_proper) + except (RequestException, socket.gaierror) as error: + log.debug(u'Unable to send snatch notification. Error: {0}', error.message) def notify_git_update(new_version=""): for n in notifiers: if app.NOTIFY_ON_UPDATE: - n.notify_git_update(new_version) + try: + n.notify_git_update(new_version) + except (RequestException, socket.gaierror) as error: + log.debug(u'Unable to send new update notification. Error: {0}', error.message) def notify_login(ipaddress): for n in notifiers: if app.NOTIFY_ON_LOGIN: - n.notify_login(ipaddress) + try: + n.notify_login(ipaddress) + except (RequestException, socket.gaierror) as error: + log.debug(u'Unable to new login notification. Error: {0}', error.message) diff --git a/medusa/notifiers/boxcar2.py b/medusa/notifiers/boxcar2.py index fa3ecaac7c..cca397a753 100644 --- a/medusa/notifiers/boxcar2.py +++ b/medusa/notifiers/boxcar2.py @@ -1,28 +1,17 @@ # coding=utf-8 -# Author: Rafael Silva -# Author: Marvin Pinto -# Author: Dennis Lutter -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . """Boxcar2 module.""" + from __future__ import unicode_literals -from .. import app, common, logger -from ..helpers import get_url, make_session +import logging + +from medusa import app, common +from medusa.helpers import get_url, make_session +from medusa.logger.adapters.style import BraceAdapter + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class Notifier(object): @@ -60,10 +49,10 @@ def _send_boxcar2(self, msg, title, accesstoken): response = get_url(self.url, post_data=post_data, session=self.session, timeout=60, returns='json') if not response: - logger.log('Boxcar2 notification failed.', logger.ERROR) + log.error('Boxcar2 notification failed.') return False - logger.log('Boxcar2 notification successful.', logger.DEBUG) + log.debug('Boxcar2 notification successful.') return True def notify_snatch(self, ep_name, is_proper): @@ -103,11 +92,11 @@ def _notify_boxcar2(self, title, message, accesstoken=None): accesstoken: to send to this device """ if not app.USE_BOXCAR2: - logger.log('Notification for Boxcar2 not enabled, skipping this notification', logger.DEBUG) + log.debug('Notification for Boxcar2 not enabled, skipping this notification') return False accesstoken = accesstoken or app.BOXCAR2_ACCESSTOKEN - logger.log('Sending notification for {}'.format(message), logger.DEBUG) + log.debug('Sending notification for {0}', message) return self._send_boxcar2(message, title, accesstoken) diff --git a/medusa/notifiers/emailnotify.py b/medusa/notifiers/emailnotify.py index a6817f32f6..cd4e11b93a 100644 --- a/medusa/notifiers/emailnotify.py +++ b/medusa/notifiers/emailnotify.py @@ -1,37 +1,21 @@ # coding=utf-8 -# Authors: -# Derek Battams -# Pedro Jose Pereira Vieito (@pvieito) -# -# This file is part of medusa. -# -# medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with medusa. If not, see . -# -############################################################################## - from __future__ import unicode_literals import ast +import logging import re import smtplib from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from email.utils import formatdate -from .. import app, db, logger -from ..helper.encoding import ss +from medusa import app, db +from medusa.helper.encoding import ss +from medusa.logger.adapters.style import BraceAdapter + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class Notifier(object): @@ -62,14 +46,14 @@ def notify_snatch(self, ep_name, is_proper, title='Snatched:'): # pylint: disab show = self._parseEp(ep_name) to = self._generate_recipients(show) if not to: - logger.log('Skipping email notify because there are no configured recipients', logger.DEBUG) + log.debug('Skipping email notify because there are no configured recipients') else: try: msg = MIMEMultipart('alternative') msg.attach(MIMEText( '' '

Medusa Notification - Snatched


' - '

Show: {}


Episode: {}



' + '

Show: {0}


Episode: {1}



' '
' 'Powered by Medusa.
'.format @@ -91,9 +75,10 @@ def notify_snatch(self, ep_name, is_proper, title='Snatched:'): # pylint: disab msg[b'Date'] = formatdate(localtime=True) if self._sendmail(app.EMAIL_HOST, app.EMAIL_PORT, app.EMAIL_FROM, app.EMAIL_TLS, app.EMAIL_USER, app.EMAIL_PASSWORD, to, msg): - logger.log('Snatch notification sent to [{}] for "{}"'.format(to, ep_name), logger.DEBUG) + log.debug('Snatch notification sent to {recipient} for {episode}', + {'recipient': to, 'episode': ep_name}) else: - logger.log('Snatch notification error: {}'.format(self.last_err), logger.WARNING) + log.warning('Snatch notification error: {0}', self.last_err) def notify_download(self, ep_name, title='Completed:'): # pylint: disable=unused-argument """ @@ -108,14 +93,14 @@ def notify_download(self, ep_name, title='Completed:'): # pylint: disable=unuse show = self._parseEp(ep_name) to = self._generate_recipients(show) if not to: - logger.log('Skipping email notify because there are no configured recipients', logger.DEBUG) + log.debug('Skipping email notify because there are no configured recipients') else: try: msg = MIMEMultipart('alternative') msg.attach(MIMEText( '' '

Medusa Notification - Downloaded


' - '

Show: {}


Episode: {}



' + '

Show: {0}


Episode: {1}



' '
' 'Powered by Medusa.
'.format @@ -137,9 +122,10 @@ def notify_download(self, ep_name, title='Completed:'): # pylint: disable=unuse msg[b'Date'] = formatdate(localtime=True) if self._sendmail(app.EMAIL_HOST, app.EMAIL_PORT, app.EMAIL_FROM, app.EMAIL_TLS, app.EMAIL_USER, app.EMAIL_PASSWORD, to, msg): - logger.log('Download notification sent to [{}] for "{}"'.format(to, ep_name), logger.DEBUG) + log.debug('Download notification sent to {recipient} for {episode}', + {'recipient': to, 'episode': ep_name}) else: - logger.log('Download notification error: {}'.format(self.last_err), logger.WARNING) + log.warning('Download notification error: {0}', self.last_err) def notify_subtitle_download(self, ep_name, lang, title='Downloaded subtitle:'): # pylint: disable=unused-argument """ @@ -154,15 +140,15 @@ def notify_subtitle_download(self, ep_name, lang, title='Downloaded subtitle:'): show = self._parseEp(ep_name) to = self._generate_recipients(show) if not to: - logger.log('Skipping email notify because there are no configured recipients', logger.DEBUG) + log.debug('Skipping email notify because there are no configured recipients') else: try: msg = MIMEMultipart('alternative') msg.attach(MIMEText( '' '

Medusa Notification - Subtitle Downloaded


' - '

Show: {}


Episode: {}


' - '

Language: {}



' + '

Show: {0}


Episode: {1}


' + '

Language: {2}



' '
' 'Powered by Medusa.
'.format @@ -182,9 +168,10 @@ def notify_subtitle_download(self, ep_name, lang, title='Downloaded subtitle:'): msg[b'To'] = ','.join(to) if self._sendmail(app.EMAIL_HOST, app.EMAIL_PORT, app.EMAIL_FROM, app.EMAIL_TLS, app.EMAIL_USER, app.EMAIL_PASSWORD, to, msg): - logger.log('Download notification sent to [{}] for "{}"'.format(to, ep_name), logger.DEBUG) + log.debug('Download notification sent to {recipient} for {episode}', + {'recipient': to, 'episode': ep_name}) else: - logger.log('Download notification error: {}'.format(self.last_err), logger.WARNING) + log.warning('Download notification error: {0}', self.last_err) def notify_git_update(self, new_version='??'): """ @@ -194,14 +181,14 @@ def notify_git_update(self, new_version='??'): if app.USE_EMAIL: to = self._generate_recipients(None) if not to: - logger.log('Skipping email notify because there are no configured recipients', logger.DEBUG) + log.debug('Skipping email notify because there are no configured recipients') else: try: msg = MIMEMultipart('alternative') msg.attach(MIMEText( '' '

Medusa Notification - Updated


' - '

Commit: {}



' + '

Commit: {0}



' '
' 'Powered by Medusa.
'.format @@ -213,15 +200,16 @@ def notify_git_update(self, new_version='??'): except Exception: msg = MIMEText('Medusa updated') - msg[b'Subject'] = 'Updated: {}'.format(new_version) + msg[b'Subject'] = 'Updated: {0}'.format(new_version) msg[b'From'] = app.EMAIL_FROM msg[b'To'] = ','.join(to) msg[b'Date'] = formatdate(localtime=True) if self._sendmail(app.EMAIL_HOST, app.EMAIL_PORT, app.EMAIL_FROM, app.EMAIL_TLS, app.EMAIL_USER, app.EMAIL_PASSWORD, to, msg): - logger.log('Update notification sent to [{}]'.format(to), logger.DEBUG) + log.debug('Update notification sent to {recipient}', + {'recipient': to}) else: - logger.log('Update notification error: {}'.format(self.last_err), logger.WARNING) + log.warning('Update notification error: {0}', self.last_err) def notify_login(self, ipaddress=''): """ @@ -231,7 +219,7 @@ def notify_login(self, ipaddress=''): if app.USE_EMAIL: to = self._generate_recipients(None) if not to: - logger.log('Skipping email notify because there are no configured recipients', logger.DEBUG) + log.debug('Skipping email notify because there are no configured recipients') else: try: msg = MIMEMultipart('alternative') @@ -250,15 +238,15 @@ def notify_login(self, ipaddress=''): except Exception: msg = MIMEText('Medusa Remote Login') - msg[b'Subject'] = 'New Login from IP: {}'.format(ipaddress) + msg[b'Subject'] = 'New Login from IP: {0}'.format(ipaddress) msg[b'From'] = app.EMAIL_FROM msg[b'To'] = ','.join(to) msg[b'Date'] = formatdate(localtime=True) if self._sendmail(app.EMAIL_HOST, app.EMAIL_PORT, app.EMAIL_FROM, app.EMAIL_TLS, app.EMAIL_USER, app.EMAIL_PASSWORD, to, msg): - logger.log('Login notification sent to [{}]'.format(to), logger.DEBUG) + log.debug('Login notification sent to {recipient}', {'recipient': to}) else: - logger.log('Login notification error: {}'.format(self.last_err), logger.WARNING) + log.warning('Login notification error: {0}', self.last_err) @staticmethod def _generate_recipients(show): # pylint: disable=too-many-branches @@ -274,7 +262,11 @@ def _generate_recipients(show): # pylint: disable=too-many-branches # Grab the per-show-notification recipients if show is not None: for s in show: - for subs in main_db_con.select('SELECT notify_list FROM tv_shows WHERE show_name = ?', (s,)): + for subs in main_db_con.select( + 'SELECT notify_list ' + 'FROM tv_shows ' + 'WHERE show_name = ?', + (s,)): if subs[b'notify_list']: if subs[b'notify_list'][0] == '{': entries = dict(ast.literal_eval(subs[b'notify_list'])) @@ -287,39 +279,49 @@ def _generate_recipients(show): # pylint: disable=too-many-branches addrs.append(addr) addrs = set(addrs) - logger.log('Notification recipients: {}'.format(addrs), logger.DEBUG) + log.debug('Notification recipients: {0}', addrs) return addrs def _sendmail(self, host, port, smtp_from, use_tls, user, pwd, to, msg, smtpDebug=False): # pylint: disable=too-many-arguments - logger.log('HOST: {}; PORT: {}; FROM: {}, TLS: {}, USER: {}, PWD: {}, TO: {}'.format( - host, port, smtp_from, use_tls, user, pwd, to), logger.DEBUG) + log.debug( + 'HOST: {host}; PORT: {port}; FROM: {sender}, TLS: {tls},' + ' USER: {user}, PWD: {password}, TO: {recipient}', { + 'host': host, + 'port': port, + 'sender': smtp_from, + 'tls': use_tls, + 'user': user, + 'password': pwd, + 'recipient': to, + } + ) try: srv = smtplib.SMTP(host, int(port)) - except Exception as e: - logger.log('Exception generated while sending e-mail: ' + str(e), logger.WARNING) + except Exception as error: + log.warning('Exception generated while sending e-mail: {0}', error) # logger.log(traceback.format_exc(), logger.DEBUG) - self.last_err = '{}'.format(e) + self.last_err = '{0}'.format(error) return False if smtpDebug: srv.set_debuglevel(1) try: if use_tls in ('1', True) or (user and pwd): - logger.log('Sending initial EHLO command!', logger.DEBUG) + log.debug('Sending initial EHLO command!') srv.ehlo() if use_tls in ('1', True): - logger.log('Sending STARTTLS command!', logger.DEBUG) + log.debug('Sending STARTTLS command!') srv.starttls() srv.ehlo() if user and pwd: - logger.log('Sending LOGIN command!', logger.DEBUG) + log.debug('Sending LOGIN command!') srv.login(user.encode('utf-8'), pwd.encode('utf-8')) srv.sendmail(smtp_from, to, msg.as_string()) srv.quit() return True - except Exception as e: - self.last_err = '{}'.format(e) + except Exception as error: + self.last_err = '{0}'.format(error) return False @staticmethod @@ -329,5 +331,5 @@ def _parseEp(ep_name): sep = ' - ' titles = ep_name.split(sep) titles.sort(key=len, reverse=True) - logger.log('TITLES: {}'.format(titles), logger.DEBUG) + log.debug('TITLES: {0}', titles) return titles diff --git a/medusa/notifiers/emby.py b/medusa/notifiers/emby.py index aa984efb7d..5e284b295e 100644 --- a/medusa/notifiers/emby.py +++ b/medusa/notifiers/emby.py @@ -1,29 +1,18 @@ # coding=utf-8 -# Author: Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . - import json +import logging + +from medusa import app +from medusa.helper.exceptions import ex +from medusa.logger.adapters.style import BraceAdapter from requests.compat import urlencode from six.moves.urllib.error import URLError from six.moves.urllib.request import Request, urlopen -from .. import app, logger -from ..helper.exceptions import ex + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class Notifier(object): @@ -54,11 +43,12 @@ def _notify_emby(self, message, host=None, emby_apikey=None): result = response.read() response.close() - logger.log(u'EMBY: HTTP response: ' + result.replace('\n', ''), logger.DEBUG) + log.debug(u'EMBY: HTTP response: {0}', result.replace('\n', '')) return True - except (URLError, IOError) as e: - logger.log(u'EMBY: Warning: Couldn\'t contact Emby at ' + url + ' ' + ex(e), logger.WARNING) + except (URLError, IOError) as error: + log.warning(u'EMBY: Warning: Unable to contact Emby at {url}: {error}', + {'url': url, 'error': ex(error)}) return False @@ -80,17 +70,17 @@ def update_library(self, show=None): if app.USE_EMBY: if not app.EMBY_HOST: - logger.log(u'EMBY: No host specified, check your settings', logger.DEBUG) + log.debug(u'EMBY: No host specified, check your settings') return False if show: if show.indexer == 1: provider = 'tvdb' elif show.indexer == 2: - logger.log(u'EMBY: TVRage Provider no longer valid', logger.WARNING) + log.warning(u'EMBY: TVRage Provider no longer valid') return False else: - logger.log(u'EMBY: Provider unknown', logger.WARNING) + log.warning(u'EMBY: Provider unknown') return False query = '?%sid=%s' % (provider, show.indexerid) else: @@ -107,9 +97,10 @@ def update_library(self, show=None): result = response.read() response.close() - logger.log(u'EMBY: HTTP response: ' + result.replace('\n', ''), logger.DEBUG) + log.debug(u'EMBY: HTTP response: {0}', result.replace('\n', '')) return True - except (URLError, IOError) as e: - logger.log(u'EMBY: Warning: Couldn\'t contact Emby at ' + url + ' ' + ex(e), logger.WARNING) + except (URLError, IOError) as error: + log.warning(u'EMBY: Warning: Unable to contact Emby at {url}: {error}', + {'url': url, 'error': ex(error)}) return False diff --git a/medusa/notifiers/freemobile.py b/medusa/notifiers/freemobile.py index cdf2d03b5f..b79ab53ff0 100644 --- a/medusa/notifiers/freemobile.py +++ b/medusa/notifiers/freemobile.py @@ -1,34 +1,30 @@ # coding=utf-8 -# Author: Marvin Pinto -# Author: Dennis Lutter -# Author: Aaron Bieber -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . +import logging +from medusa import app +from medusa.common import ( + NOTIFY_DOWNLOAD, + NOTIFY_GIT_UPDATE, + NOTIFY_GIT_UPDATE_TEXT, + NOTIFY_LOGIN, + NOTIFY_LOGIN_TEXT, + NOTIFY_SNATCH, + NOTIFY_SNATCH_PROPER, + NOTIFY_SUBTITLE_DOWNLOAD, + notifyStrings, +) +from medusa.logger.adapters.style import BraceAdapter from requests.compat import quote from six.moves.urllib.request import Request, urlopen -from .. import app, logger -from ..common import NOTIFY_DOWNLOAD, NOTIFY_GIT_UPDATE, NOTIFY_GIT_UPDATE_TEXT, NOTIFY_LOGIN, NOTIFY_LOGIN_TEXT, NOTIFY_SNATCH, NOTIFY_SNATCH_PROPER, \ - NOTIFY_SUBTITLE_DOWNLOAD, notifyStrings + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class Notifier(object): def test_notify(self, cust_id=None, apiKey=None): - return self._notifyFreeMobile('Test', "This is a test notification from Medusa", cust_id, apiKey, force=True) + return self._notifyFreeMobile('Test', 'This is a test notification from Medusa', cust_id, apiKey, force=True) def _sendFreeMobileSMS(self, title, msg, cust_id=None, apiKey=None): """ @@ -46,12 +42,12 @@ def _sendFreeMobileSMS(self, title, msg, cust_id=None, apiKey=None): if apiKey is None: apiKey = app.FREEMOBILE_APIKEY - logger.log(u"Free Mobile in use with API KEY: " + apiKey, logger.DEBUG) + log.debug(u'Free Mobile in use with API KEY: {0}', apiKey) # build up the URL and parameters msg = msg.strip() - msg_quoted = quote(title.encode('utf-8') + ": " + msg.encode('utf-8')) - URL = "https://smsapi.free-mobile.fr/sendmsg?user=" + cust_id + "&pass=" + apiKey + "&msg=" + msg_quoted + msg_quoted = quote(title.encode('utf-8') + ': ' + msg.encode('utf-8')) + URL = 'https://smsapi.free-mobile.fr/sendmsg?user=' + cust_id + '&pass=' + apiKey + '&msg=' + msg_quoted req = Request(URL) # send the request to Free Mobile @@ -59,29 +55,23 @@ def _sendFreeMobileSMS(self, title, msg, cust_id=None, apiKey=None): urlopen(req) except IOError as e: if hasattr(e, 'code'): - if e.code == 400: - message = "Missing parameter(s)." - logger.log(message, logger.ERROR) - return False, message - if e.code == 402: - message = "Too much SMS sent in a short time." - logger.log(message, logger.ERROR) - return False, message - if e.code == 403: - message = "API service isn't enabled in your account or ID / API key is incorrect." - logger.log(message, logger.ERROR) - return False, message - if e.code == 500: - message = "Server error. Please retry in few moment." - logger.log(message, logger.ERROR) + error_message = { + 400: 'Missing parameter(s).', + 402: 'Too much SMS sent in a short time.', + 403: 'API service is not enabled in your account or ID / API key is incorrect.', + 500: 'Server error. Please retry in few moment.', + } + message = error_message.get(e.code) + if message: + log.error(message) return False, message except Exception as e: - message = u"Error while sending SMS: {0}".format(e) - logger.log(message, logger.ERROR) + message = u'Error while sending SMS: {0}'.format(e) + log.error(message) return False, message - message = "Free Mobile SMS successful." - logger.log(message, logger.INFO) + message = 'Free Mobile SMS successful.' + log.info(message) return True, message def notify_snatch(self, ep_name, is_proper): @@ -95,15 +85,15 @@ def notify_download(self, ep_name, title=notifyStrings[NOTIFY_DOWNLOAD]): def notify_subtitle_download(self, ep_name, lang, title=notifyStrings[NOTIFY_SUBTITLE_DOWNLOAD]): if app.FREEMOBILE_NOTIFY_ONSUBTITLEDOWNLOAD: - self._notifyFreeMobile(title, ep_name + ": " + lang) + self._notifyFreeMobile(title, ep_name + ': ' + lang) - def notify_git_update(self, new_version="??"): + def notify_git_update(self, new_version='??'): if app.USE_FREEMOBILE: update_text = notifyStrings[NOTIFY_GIT_UPDATE_TEXT] title = notifyStrings[NOTIFY_GIT_UPDATE] self._notifyFreeMobile(title, update_text + new_version) - def notify_login(self, ipaddress=""): + def notify_login(self, ipaddress=''): if app.USE_FREEMOBILE: update_text = notifyStrings[NOTIFY_LOGIN_TEXT] title = notifyStrings[NOTIFY_LOGIN] @@ -121,9 +111,9 @@ def _notifyFreeMobile(self, title, message, cust_id=None, apiKey=None, force=Fal """ if not app.USE_FREEMOBILE and not force: - logger.log(u"Notification for Free Mobile not enabled, skipping this notification", logger.DEBUG) - return False, "Disabled" + log.debug(u'Notification for Free Mobile not enabled, skipping this notification') + return False, 'Disabled' - logger.log(u"Sending a SMS for " + message, logger.DEBUG) + log.debug(u'Sending a SMS for {0}', message) return self._sendFreeMobileSMS(title, message, cust_id, apiKey) diff --git a/medusa/notifiers/growl.py b/medusa/notifiers/growl.py index fa586f0d75..55d134ae2b 100644 --- a/medusa/notifiers/growl.py +++ b/medusa/notifiers/growl.py @@ -1,35 +1,23 @@ # coding=utf-8 -# Author: Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . - from __future__ import print_function +import logging import socket from libgrowl import gntp -from .. import app, common, logger -from ..helper.exceptions import ex +from medusa import app, common +from medusa.helper.exceptions import ex +from medusa.logger.adapters.style import BraceAdapter + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class Notifier(object): def test_notify(self, host, password): self._sendRegistration(host, password) - return self._sendGrowl("Test Growl", "Testing Growl settings from Medusa", "Test", host, password, + return self._sendGrowl('Test Growl', 'Testing Growl settings from Medusa', 'Test', host, password, force=True) def notify_snatch(self, ep_name, is_proper): @@ -42,14 +30,14 @@ def notify_download(self, ep_name): def notify_subtitle_download(self, ep_name, lang): if app.GROWL_NOTIFY_ONSUBTITLEDOWNLOAD: - self._sendGrowl(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], ep_name + ": " + lang) + self._sendGrowl(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], ep_name + ': ' + lang) - def notify_git_update(self, new_version="??"): + def notify_git_update(self, new_version='??'): update_text = common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT] title = common.notifyStrings[common.NOTIFY_GIT_UPDATE] self._sendGrowl(title, update_text + new_version) - def notify_login(self, ipaddress=""): + def notify_login(self, ipaddress=''): update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] title = common.notifyStrings[common.NOTIFY_LOGIN] self._sendGrowl(title, update_text.format(ipaddress)) @@ -97,7 +85,7 @@ def _send(host, port, data, debug=False): return response - def _sendGrowl(self, title="Medusa Notification", message=None, name=None, host=None, password=None, + def _sendGrowl(self, title='Medusa Notification', message=None, name=None, host=None, password=None, force=False): if not app.USE_GROWL and not force: return False @@ -136,7 +124,10 @@ def _sendGrowl(self, title="Medusa Notification", message=None, name=None, host= for pc in growlHosts: opts['host'] = pc[0] opts['port'] = pc[1] - logger.log(u"GROWL: Sending message '" + message + "' to " + opts['host'] + ":" + str(opts['port']), logger.DEBUG) + log.debug( + u'GROWL: Sending growl to {host}:{port} - {msg!r}', + {'msg': message, 'host': opts['host'], 'port': opts['port']} + ) try: if self._send_growl(opts, message): return True @@ -145,8 +136,11 @@ def _sendGrowl(self, title="Medusa Notification", message=None, name=None, host= return self._send_growl(opts, message) else: return False - except Exception as e: - logger.log(u"GROWL: Unable to send growl to " + opts['host'] + ":" + str(opts['port']) + " - " + ex(e), logger.WARNING) + except Exception as error: + log.warning( + u'GROWL: Unable to send growl to {host}:{port} - {msg!r}', + {'msg': ex(error), 'host': opts['host'], 'port': opts['port']} + ) return False def _sendRegistration(self, host=None, password=None): @@ -188,6 +182,9 @@ def _sendRegistration(self, host=None, password=None): try: return self._send(opts['host'], opts['port'], register.encode(), opts['debug']) - except Exception as e: - logger.log(u"GROWL: Unable to send growl to " + opts['host'] + ":" + str(opts['port']) + " - " + ex(e), logger.WARNING) + except Exception as error: + log.warning( + u'GROWL: Unable to send growl to {host}:{port} - {msg!r}', + {'msg': ex(error), 'host': opts['host'], 'port': opts['port']} + ) return False diff --git a/medusa/notifiers/kodi.py b/medusa/notifiers/kodi.py index 820008028c..9d68dcfb2a 100644 --- a/medusa/notifiers/kodi.py +++ b/medusa/notifiers/kodi.py @@ -1,43 +1,33 @@ # coding=utf-8 -# Author: Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . - import base64 import json +import logging import socket import time +from medusa import app, common +from medusa.helper.encoding import ss +from medusa.helper.exceptions import ex +from medusa.logger.adapters.style import BraceAdapter + from requests.compat import quote, unquote, unquote_plus, urlencode from six import text_type from six.moves.http_client import BadStatusLine from six.moves.urllib.error import URLError from six.moves.urllib.request import Request, urlopen -from .. import app, common, logger -from ..helper.encoding import ss -from ..helper.exceptions import ex try: import xml.etree.cElementTree as etree except ImportError: import xml.etree.ElementTree as etree +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) + + class Notifier(object): - def _get_kodi_version(self, host, username, password, dest_app="KODI"): + def _get_kodi_version(self, host, username, password, dest_app='KODI'): """Returns KODI JSON-RPC API version (odd # = dev, even # = stable) Sends a request to the KODI host using the JSON-RPC to determine if @@ -68,14 +58,18 @@ def _get_kodi_version(self, host, username, password, dest_app="KODI"): # override socket timeout to reduce delay for this call alone socket.setdefaulttimeout(10) - checkCommand = '{"jsonrpc":"2.0","method":"JSONRPC.Version","id":1}' + checkCommand = json.dumps({ + 'jsonrpc': '2.0', + 'method': 'JSONRPC.Version', + 'id': 1, + }) result = self._send_to_kodi_json(checkCommand, host, username, password, dest_app) # revert back to default socket timeout socket.setdefaulttimeout(app.SOCKET_TIMEOUT) if result: - return result["result"]["version"] + return result['result']['version'] else: # fallback to legacy HTTPAPI method testCommand = {'command': 'Help'} @@ -86,7 +80,7 @@ def _get_kodi_version(self, host, username, password, dest_app="KODI"): else: return False - def _notify_kodi(self, message, title="Medusa", host=None, username=None, password=None, force=False, dest_app="KODI"): # pylint: disable=too-many-arguments + def _notify_kodi(self, message, title='Medusa', host=None, username=None, password=None, force=False, dest_app='KODI'): # pylint: disable=too-many-arguments """Internal wrapper for the notify_snatch and notify_download functions Detects JSON-RPC version then branches the logic for either the JSON-RPC or legacy HTTP API methods. @@ -115,33 +109,53 @@ def _notify_kodi(self, message, title="Medusa", host=None, username=None, passwo # suppress notifications if the notifier is disabled but the notify options are checked if not app.USE_KODI and not force: - logger.log(u"Notification for %s not enabled, skipping this notification" % dest_app, logger.DEBUG) + log.debug(u'Notification for {app} not enabled, skipping this notification', + {'app': dest_app}) return False result = '' - for curHost in [x.strip() for x in host.split(",") if x.strip()]: - logger.log(u"Sending %s notification to '%s' - %s" % (dest_app, curHost, message), logger.DEBUG) + for curHost in [x.strip() for x in host.split(',') if x.strip()]: + log.debug(u'Sending {app} notification to {host} - {msg}', + {'app': dest_app, 'host': curHost, 'msg': message}) kodiapi = self._get_kodi_version(curHost, username, password, dest_app) if kodiapi: if kodiapi <= 4: - logger.log(u"Detected %s version <= 11, using %s HTTP API" % (dest_app, dest_app), logger.DEBUG) - command = {'command': 'ExecBuiltIn', - 'parameter': 'Notification(' + title.encode("utf-8") + ',' + message.encode( - "utf-8") + ')'} + log.debug(u'Detected {app} version <= 11, using {app} HTTP API', + {'app': dest_app}) + command = { + 'command': 'ExecBuiltIn', + 'parameter': 'Notification({title},{msg})'.format( + title=title.encode('utf-8'), + msg=message.encode('utf-8'), + ) + } notifyResult = self._send_to_kodi(command, curHost, username, password) if notifyResult: result += curHost + ':' + str(notifyResult) else: - logger.log(u"Detected %s version >= 12, using %s JSON API" % (dest_app, dest_app), logger.DEBUG) - command = '{"jsonrpc":"2.0","method":"GUI.ShowNotification","params":{"title":"%s","message":"%s", "image": "%s"},"id":1}' % ( - title.encode("utf-8"), message.encode("utf-8"), app.LOGO_URL) + log.debug(u'Detected {app} version >= 12, using {app} JSON API', + {'app': dest_app}) + command = json.dumps({ + 'jsonrpc': '2.0', + 'method': 'GUI.ShowNotification', + 'params': { + 'title': title.encode('utf-8'), + 'message': message.encode('utf-8'), + 'image': app.LOGO_URL, + }, + 'id': '1', + }) notifyResult = self._send_to_kodi_json(command, curHost, username, password, dest_app) if notifyResult and notifyResult.get('result'): # pylint: disable=no-member - result += curHost + ':' + notifyResult["result"].decode(app.SYS_ENCODING) + result += curHost + ':' + notifyResult['result'].decode(app.SYS_ENCODING) else: if app.KODI_ALWAYS_ON or force: - logger.log(u"Failed to detect %s version for '%s', check configuration and try again." % (dest_app, curHost), logger.WARNING) + log.warning( + u'Failed to detect {app} version for {host},' + u' check configuration and try again.', + {'app': dest_app, 'host': curHost} + ) result += curHost + ':False' return result @@ -160,26 +174,21 @@ def _send_update_library(self, host, showName=None): """ - logger.log(u"Sending request to update library for KODI host: '%s'" % host, logger.DEBUG) + log.debug(u'Sending request to update library for KODI host: {0}', host) kodiapi = self._get_kodi_version(host, app.KODI_USERNAME, app.KODI_PASSWORD) if kodiapi: - if kodiapi <= 4: - # try to update for just the show, if it fails, do full update if enabled - if not self._update_library(host, showName) and app.KODI_UPDATE_FULL: - logger.log(u"Single show update failed, falling back to full update", logger.DEBUG) - return self._update_library(host) - else: - return True + update = self._update_library if kodiapi <= 4 else self._update_library_json + # try to update for just the show, if it fails, do full update if enabled + if not update(host, showName) and app.KODI_UPDATE_FULL: + log.debug(u'Single show update failed, falling back to full update') + return update(host) else: - # try to update for just the show, if it fails, do full update if enabled - if not self._update_library_json(host, showName) and app.KODI_UPDATE_FULL: - logger.log(u"Single show update failed, falling back to full update", logger.DEBUG) - return self._update_library_json(host) - else: - return True + return True elif app.KODI_ALWAYS_ON: - logger.log(u"Failed to detect KODI version for '" + host + "', check configuration and try again.", logger.WARNING) + log.warning(u'Failed to detect KODI version for {host},' + u' check configuration and try again.', + {'host': host}) return False @@ -188,7 +197,7 @@ def _send_update_library(self, host, showName=None): ############################################################################## @staticmethod - def _send_to_kodi(command, host=None, username=None, password=None, dest_app="KODI"): # pylint: disable=too-many-arguments + def _send_to_kodi(command, host=None, username=None, password=None, dest_app='KODI'): # pylint: disable=too-many-arguments """Handles communication to KODI servers via HTTP API Args: @@ -203,13 +212,12 @@ def _send_to_kodi(command, host=None, username=None, password=None, dest_app="KO """ # fill in omitted parameters - if not username: - username = app.KODI_USERNAME - if not password: - password = app.KODI_PASSWORD + username = username or app.KODI_USERNAME + password = password or app.KODI_PASSWORD if not host: - logger.log(u'No %s host passed, aborting update' % dest_app, logger.WARNING) + log.warning(u'No {app} host passed, aborting update', + {'app': dest_app}) return False for key in command: @@ -217,7 +225,8 @@ def _send_to_kodi(command, host=None, username=None, password=None, dest_app="KO command[key] = command[key].encode('utf-8') enc_command = urlencode(command) - logger.log(u"%s encoded API command: %r" % (dest_app, enc_command), logger.DEBUG) + log.debug(u'{app} encoded API command: {cmd!r}', + {'app': dest_app, 'cmd': enc_command}) # url = 'http://%s/xbmcCmds/xbmcHttp/?%s' % (host, enc_command) # maybe need for old plex? url = 'http://%s/kodiCmds/kodiHttp/?%s' % (host, enc_command) @@ -226,26 +235,26 @@ def _send_to_kodi(command, host=None, username=None, password=None, dest_app="KO # if we have a password, use authentication if password: base64string = base64.encodestring('%s:%s' % (username, password))[:-1] - authheader = "Basic %s" % base64string - req.add_header("Authorization", authheader) - logger.log(u"Contacting %s (with auth header) via url: %s" % (dest_app, ss(url)), logger.DEBUG) + authheader = 'Basic %s' % base64string + req.add_header('Authorization', authheader) + log.debug(u'Contacting {0} (with auth header) via url: {1}', dest_app, ss(url)) else: - logger.log(u"Contacting %s via url: %s" % (dest_app, ss(url)), logger.DEBUG) + log.debug(u'Contacting {0} via url: {1}', dest_app, ss(url)) try: response = urlopen(req) except (BadStatusLine, URLError) as e: - logger.log(u"Couldn't contact %s HTTP at %r : %r" % (dest_app, url, ex(e)), logger.DEBUG) + log.debug(u'Unable to contact {0} HTTP at {1!r} : {2!r}', dest_app, url, ex(e)) return False result = response.read().decode(app.SYS_ENCODING) response.close() - logger.log(u"%s HTTP response: %s" % (dest_app, result.replace('\n', '')), logger.DEBUG) + log.debug(u'{0} HTTP response: {1}', dest_app, result.replace('\n', '')) return result except Exception as e: - logger.log(u"Couldn't contact %s HTTP at %r : %r" % (dest_app, url, ex(e)), logger.DEBUG) + log.debug(u'Unable to contact {0} HTTP at {1!r} : {2!r}', dest_app, url, ex(e)) return False def _update_library(self, host=None, showName=None): # pylint: disable=too-many-locals, too-many-return-statements @@ -264,22 +273,27 @@ def _update_library(self, host=None, showName=None): # pylint: disable=too-many """ if not host: - logger.log(u'No KODI host passed, aborting update', logger.WARNING) + log.warning(u'No KODI host passed, aborting update') return False - logger.log(u"Updating KODI library via HTTP method for host: " + host, logger.DEBUG) + log.debug(u'Updating KODI library via HTTP method for host: {0}', host) # if we're doing per-show if showName: - logger.log(u"Updating library in KODI via HTTP method for show " + showName, logger.DEBUG) + log.debug(u'Updating library in KODI via HTTP method for show {0}', showName) - pathSql = 'select path.strPath from path, tvshow, tvshowlinkpath where ' \ - 'tvshow.c00 = "%s" and tvshowlinkpath.idShow = tvshow.idShow ' \ - 'and tvshowlinkpath.idPath = path.idPath' % showName + pathSql = ( + "SELECT path.strPath " + "FROM path, tvshow, tvshowlinkpath " + "WHERE tvshow.c00 = '%s'" + " AND tvshowlinkpath.idShow = tvshow.idShow" + " AND tvshowlinkpath.idPath = path.idPath" % showName + ) # use this to get xml back for the path lookups xmlCommand = { - 'command': 'SetResponseFormat(webheader;false;webfooter;false;header;;footer;;opentag;;closetag;;closefinaltag;false)'} + 'command': 'SetResponseFormat(webheader;false;webfooter;false;header;;footer;;opentag;;closetag;;closefinaltag;false)' + } # sql used to grab path(s) sqlCommand = {'command': 'QueryVideoDatabase(%s)' % pathSql} # set output back to default @@ -294,42 +308,42 @@ def _update_library(self, host=None, showName=None): # pylint: disable=too-many request = self._send_to_kodi(resetCommand, host) if not sqlXML: - logger.log(u"Invalid response for " + showName + " on " + host, logger.DEBUG) + log.debug(u'Invalid response for {0} on {1}', showName, host) return False encSqlXML = quote(sqlXML, ':\\/<>') try: et = etree.fromstring(encSqlXML) except SyntaxError as e: - logger.log(u"Unable to parse XML returned from KODI: " + ex(e), logger.ERROR) + log.error(u'Unable to parse XML returned from KODI: {0}', ex(e)) return False paths = et.findall('.//field') if not paths: - logger.log(u"No valid paths found for " + showName + " on " + host, logger.DEBUG) + log.debug(u'No valid paths found for {0} on {1}', showName, host) return False for path in paths: # we do not need it double-encoded, gawd this is dumb unEncPath = unquote(path.text).decode(app.SYS_ENCODING) - logger.log(u"KODI Updating " + showName + " on " + host + " at " + unEncPath, logger.DEBUG) + log.debug(u'KODI Updating {0} on {1} at {2}', showName, host, unEncPath) updateCommand = {'command': 'ExecBuiltIn', 'parameter': 'KODI.updatelibrary(video, %s)' % unEncPath} request = self._send_to_kodi(updateCommand, host) if not request: - logger.log(u"Update of show directory failed on " + showName + " on " + host + " at " + unEncPath, logger.WARNING) + log.warning(u'Update of show directory failed on {0} on {1} at {2}', showName, host, unEncPath) return False # sleep for a few seconds just to be sure kodi has a chance to finish each directory if len(paths) > 1: time.sleep(5) # do a full update if requested else: - logger.log(u"Doing Full Library KODI update on host: " + host, logger.DEBUG) + log.debug(u'Doing Full Library KODI update on host: {0}', host) updateCommand = {'command': 'ExecBuiltIn', 'parameter': 'KODI.updatelibrary(video)'} request = self._send_to_kodi(updateCommand, host) if not request: - logger.log(u"KODI Full Library update failed on: " + host, logger.WARNING) + log.warning(u'KODI Full Library update failed on: {0}', host) return False return True @@ -339,7 +353,7 @@ def _update_library(self, host=None, showName=None): # pylint: disable=too-many ############################################################################## @staticmethod - def _send_to_kodi_json(command, host=None, username=None, password=None, dest_app="KODI"): + def _send_to_kodi_json(command, host=None, username=None, password=None, dest_app='KODI'): """Handles communication to KODI servers via JSONRPC Args: @@ -360,45 +374,45 @@ def _send_to_kodi_json(command, host=None, username=None, password=None, dest_ap password = app.KODI_PASSWORD if not host: - logger.log(u'No %s host passed, aborting update' % dest_app, logger.WARNING) + log.warning(u'No {0} host passed, aborting update', dest_app) return False command = command.encode('utf-8') - logger.log(u"%s JSON command: %s" % (dest_app, command), logger.DEBUG) + log.debug(u'{0} JSON command: {1}', dest_app, command) url = 'http://%s/jsonrpc' % host try: req = Request(url, command) - req.add_header("Content-type", "application/json") + req.add_header('Content-type', 'application/json') # if we have a password, use authentication if password: base64string = base64.encodestring('%s:%s' % (username, password))[:-1] - authheader = "Basic %s" % base64string - req.add_header("Authorization", authheader) - logger.log(u"Contacting %s (with auth header) via url: %s" % (dest_app, ss(url)), logger.DEBUG) + authheader = 'Basic %s' % base64string + req.add_header('Authorization', authheader) + log.debug(u'Contacting {0} (with auth header) via url: {1}', dest_app, ss(url)) else: - logger.log(u"Contacting %s via url: %s" % (dest_app, ss(url)), logger.DEBUG) + log.debug(u'Contacting {0} via url: {1}', dest_app, ss(url)) try: response = urlopen(req) except (BadStatusLine, URLError) as e: if app.KODI_ALWAYS_ON: - logger.log(u"Error while trying to retrieve %s API version for %s: %r" % (dest_app, host, ex(e)), logger.WARNING) + log.warning(u'Error while trying to retrieve {0} API version for {1}: {2!r}', dest_app, host, ex(e)) return False # parse the json result try: result = json.load(response) response.close() - logger.log(u"%s JSON response: %s" % (dest_app, result), logger.DEBUG) + log.debug(u'{0} JSON response: {1}', dest_app, result) return result # need to return response for parsing except ValueError as e: - logger.log(u"Unable to decode JSON: " + str(response.read()), logger.WARNING) + log.warning(u'Unable to decode JSON: {0}', response.read()) return False except IOError as e: if app.KODI_ALWAYS_ON: - logger.log(u"Warning: Couldn't contact %s JSON API at %s: %r" % (dest_app, ss(url), ex(e)), logger.WARNING) + log.warning(u'Warning: Unable to contact {0} JSON API at {1}: {2!r}', dest_app, ss(url), ex(e)) return False def clean_library(self): @@ -407,12 +421,19 @@ def clean_library(self): return True clean_library = True for host in [x.strip() for x in app.KODI_HOST.split(',')]: - logger.log(u'Cleaning KODI library via JSON method for host: {0}'.format(host), logger.INFO) - update_command = '{"jsonrpc":"2.0","method":"VideoLibrary.Clean","params": {"showdialogs": false},"id":1}' + log.info(u'Cleaning KODI library via JSON method for host: {0}', host) + update_command = json.dumps({ + 'jsonrpc': '2.0', + 'method': 'VideoLibrary.Clean', + 'params': { + 'showdialogs': False, + }, + 'id': 1, + }) request = self._send_to_kodi_json(update_command, host) if not request: if app.KODI_ALWAYS_ON: - logger.log(u'KODI library clean failed for host: {0}'.format(host), logger.WARNING) + log.warning(u'KODI library clean failed for host: {0}', host) clean_library = False if app.KODI_UPDATE_ONLYFIRST: break @@ -423,7 +444,7 @@ def clean_library(self): for r in request: if 'error' in r: if app.KODI_ALWAYS_ON: - logger.log(u'Error while attempting to clean library for host: {0}'.format(host), logger.WARNING) + log.warning(u'Error while attempting to clean library for host: {0}', host) clean_library = False if app.KODI_UPDATE_ONLYFIRST: break @@ -447,10 +468,10 @@ def _update_library_json(self, host=None, showName=None): # pylint: disable=too """ if not host: - logger.log(u'No KODI host passed, aborting update', logger.WARNING) + log.warning(u'No KODI host passed, aborting update') return False - logger.log(u"Updating KODI library via JSON method for host: " + host, logger.INFO) + log.info(u'Updating KODI library via JSON method for host: {0}', host) # if we're doing per-show if showName: @@ -458,33 +479,49 @@ def _update_library_json(self, host=None, showName=None): # pylint: disable=too tvshowid = -1 path = '' - logger.log(u"Updating library in KODI via JSON method for show " + showName, logger.DEBUG) + log.debug(u'Updating library in KODI via JSON method for show {0}', showName) # let's try letting kodi filter the shows - showsCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.GetTVShows","params":{"filter":{"field":"title","operator":"is","value":"%s"},"properties":["title"]},"id":"Medusa"}' + showsCommand = json.dumps({ + 'jsonrpc': '2.0', + 'method': 'VideoLibrary.GetTVShows', + 'params': { + 'filter': { + 'field': 'title', + 'operator': 'is', + 'value': showName, + }, + 'properties': ['title'], + }, + 'id': 'Medusa', + }) # get tvshowid by showName - showsResponse = self._send_to_kodi_json(showsCommand % showName, host) + showsResponse = self._send_to_kodi_json(showsCommand, host) - if showsResponse and "result" in showsResponse and "tvshows" in showsResponse["result"]: - shows = showsResponse["result"]["tvshows"] + if showsResponse and 'result' in showsResponse and 'tvshows' in showsResponse['result']: + shows = showsResponse['result']['tvshows'] else: # fall back to retrieving the entire show list - showsCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.GetTVShows","id":1}' + showsCommand = json.dumps({ + 'jsonrpc': '2.0', + 'method': 'VideoLibrary.GetTVShows', + 'id': 1, + }) showsResponse = self._send_to_kodi_json(showsCommand, host) - if showsResponse and "result" in showsResponse and "tvshows" in showsResponse["result"]: - shows = showsResponse["result"]["tvshows"] + if showsResponse and 'result' in showsResponse and 'tvshows' in showsResponse['result']: + shows = showsResponse['result']['tvshows'] else: - logger.log(u"KODI: No tvshows in KODI TV show list", logger.DEBUG) + log.debug(u'KODI: No tvshows in KODI TV show list') return False for show in shows: - if ("label" in show and show["label"] == showName) or ("title" in show and show["title"] == showName): - tvshowid = show["tvshowid"] + if ('label' in show and show['label'] == showName) or ('title' in show and show['title'] == showName): + tvshowid = show['tvshowid'] # set the path is we have it already - if "file" in show: - path = show["file"] + if 'file' in show: + path = show['file'] break @@ -493,43 +530,62 @@ def _update_library_json(self, host=None, showName=None): # pylint: disable=too # we didn't find the show (exact match), thus revert to just doing a full update if enabled if tvshowid == -1: - logger.log(u'Exact show name not matched in KODI TV show list', logger.DEBUG) + log.debug(u'Exact show name not matched in KODI TV show list') return False # lookup tv-show path if we don't already know it if not path: - pathCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.GetTVShowDetails","params":{"tvshowid":%d, "properties": ["file"]},"id":1}' % tvshowid + pathCommand = json.dumps({ + 'jsonrpc': '2.0', + 'method': 'VideoLibrary.GetTVShowDetails', + 'params': { + 'tvshowid': tvshowid, + 'properties': ['file'], + }, + 'id': 1, + }) pathResponse = self._send_to_kodi_json(pathCommand, host) - path = pathResponse["result"]["tvshowdetails"]["file"] + path = pathResponse['result']['tvshowdetails']['file'] - logger.log(u"Received Show: " + showName + " with ID: " + str(tvshowid) + " Path: " + path, logger.DEBUG) + log.debug(u'Received Show: {0} with ID: {1} Path: {2}', showName, tvshowid, path) if not path: - logger.log(u"No valid path found for " + showName + " with ID: " + str(tvshowid) + " on " + host, logger.WARNING) + log.warning(u'No valid path found for {0} with ID: {1} on {2}', showName, tvshowid, host) return False - logger.log(u"KODI Updating " + showName + " on " + host + " at " + path, logger.DEBUG) - updateCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.Scan","params":{"directory":%s},"id":1}' % (json.dumps(path)) + log.debug(u'KODI Updating {0} on {1} at {2}', showName, host, path) + updateCommand = json.dumps({ + 'jsonrpc': '2.0', + 'method': 'VideoLibrary.Scan', + 'params': { + 'directory': path, + }, + 'id': 1, + }) request = self._send_to_kodi_json(updateCommand, host) if not request: - logger.log(u"Update of show directory failed on " + showName + " on " + host + " at " + path, logger.WARNING) + log.warning(u'Update of show directory failed on {0} on {1} at {2}', showName, host, path) return False # catch if there was an error in the returned request for r in request: if 'error' in r: - logger.log(u"Error while attempting to update show directory for " + showName + " on " + host + " at " + path, logger.WARNING) + log.warning(u'Error while attempting to update show directory for {0} on {1} at {2} ', showName, host, path) return False # do a full update if requested else: - logger.log(u"Doing Full Library KODI update on host: " + host, logger.DEBUG) - updateCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.Scan","id":1}' + log.debug(u'Doing Full Library KODI update on host: {0}', host) + updateCommand = json.dumps({ + 'jsonrpc': '2.0', + 'method': 'VideoLibrary.Scan', + 'id': 1, + }) request = self._send_to_kodi_json(updateCommand, host) if not request: - logger.log(u"KODI Full Library update failed on: " + host, logger.WARNING) + log.warning(u'KODI Full Library update failed on: {0}', host) return False return True @@ -548,22 +604,22 @@ def notify_download(self, ep_name): def notify_subtitle_download(self, ep_name, lang): if app.KODI_NOTIFY_ONSUBTITLEDOWNLOAD: - self._notify_kodi(ep_name + ": " + lang, common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD]) + self._notify_kodi(ep_name + ': ' + lang, common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD]) - def notify_git_update(self, new_version="??"): + def notify_git_update(self, new_version='??'): if app.USE_KODI: update_text = common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT] title = common.notifyStrings[common.NOTIFY_GIT_UPDATE] self._notify_kodi(update_text + new_version, title) - def notify_login(self, ipaddress=""): + def notify_login(self, ipaddress=''): if app.USE_KODI: update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] title = common.notifyStrings[common.NOTIFY_LOGIN] self._notify_kodi(update_text.format(ipaddress), title) def test_notify(self, host, username, password): - return self._notify_kodi("Testing KODI notifications from Medusa", "Test Notification", host, username, password, force=True) + return self._notify_kodi('Testing KODI notifications from Medusa', 'Test Notification', host, username, password, force=True) def update_library(self, showName=None): """Public wrapper for the update library functions to branch the logic for JSON-RPC or legacy HTTP API @@ -583,19 +639,19 @@ def update_library(self, showName=None): if app.USE_KODI and app.KODI_UPDATE_LIBRARY: if not app.KODI_HOST: - logger.log(u"No KODI hosts specified, check your settings", logger.DEBUG) + log.debug(u'No KODI hosts specified, check your settings') return False # either update each host, or only attempt to update until one successful result result = 0 - for host in [x.strip() for x in app.KODI_HOST.split(",")]: + for host in [x.strip() for x in app.KODI_HOST.split(',')]: if self._send_update_library(host, showName): if app.KODI_UPDATE_ONLYFIRST: - logger.log(u"Successfully updated '" + host + "', stopped sending update library commands.", logger.DEBUG) + log.debug(u'Successfully updated {0}, stopped sending update library commands.', host) return True else: if app.KODI_ALWAYS_ON: - logger.log(u"Failed to detect KODI version for '" + host + "', check configuration and try again.", logger.WARNING) + log.warning(u'Failed to detect KODI version for {0}, check configuration and try again.', host) result += 1 # needed for the 'update kodi' submenu command diff --git a/medusa/notifiers/libnotify.py b/medusa/notifiers/libnotify.py index 3456f7ebdd..d3148793f8 100644 --- a/medusa/notifiers/libnotify.py +++ b/medusa/notifiers/libnotify.py @@ -1,26 +1,14 @@ # coding=utf-8 -# Author: Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . - import cgi +import logging import os -from .. import app, common, logger +from medusa import app, common +from medusa.logger.adapters.style import BraceAdapter + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) def diagnose(): @@ -68,15 +56,15 @@ def init_notify(self): try: from gi.repository import Notify except ImportError: - logger.log(u"Unable to import Notify from gi.repository. libnotify notifications won't work.", logger.ERROR) + log.error(u"Unable to import Notify from gi.repository. libnotify notifications won't work.") return False try: from gi.repository import GObject except ImportError: - logger.log(u"Unable to import GObject from gi.repository. We can't catch a GError in display.", logger.ERROR) + log.error(u"Unable to import GObject from gi.repository. We can't catch a GError in display.") return False if not Notify.init('Medusa'): - logger.log(u"Initialization of Notify failed. libnotify notifications won't work.", logger.ERROR) + log.error(u"Initialization of Notify failed. libnotify notifications won't work.") return False self.Notify = Notify self.gobject = GObject diff --git a/medusa/notifiers/nma.py b/medusa/notifiers/nma.py index ac79e183d0..6d20fbabe8 100644 --- a/medusa/notifiers/nma.py +++ b/medusa/notifiers/nma.py @@ -1,12 +1,19 @@ # coding=utf-8 +import logging + +from medusa import app, common +from medusa.logger.adapters.style import BraceAdapter + from pynma import pynma -from .. import app, common, logger + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class Notifier(object): def test_notify(self, nma_api, nma_priority): - return self._sendNMA(nma_api, nma_priority, event="Test", message="Testing NMA settings from Medusa", + return self._sendNMA(nma_api, nma_priority, event='Test', message='Testing NMA settings from Medusa', force=True) def notify_snatch(self, ep_name, is_proper): @@ -22,15 +29,15 @@ def notify_download(self, ep_name): def notify_subtitle_download(self, ep_name, lang): if app.NMA_NOTIFY_ONSUBTITLEDOWNLOAD: self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], - message=ep_name + ": " + lang) + message=ep_name + ': ' + lang) - def notify_git_update(self, new_version="??"): + def notify_git_update(self, new_version='??'): if app.USE_NMA: update_text = common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT] title = common.notifyStrings[common.NOTIFY_GIT_UPDATE] self._sendNMA(nma_api=None, nma_priority=None, event=title, message=update_text + new_version) - def notify_login(self, ipaddress=""): + def notify_login(self, ipaddress=''): if app.USE_NMA: update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] title = common.notifyStrings[common.NOTIFY_LOGIN] @@ -58,12 +65,13 @@ def _sendNMA(self, nma_api=None, nma_priority=None, event=None, message=None, fo if len(keys) > 1: batch = True - logger.log(u"NMA: Sending notice with details: event=\"%s\", message=\"%s\", priority=%s, batch=%s" % (event, message, nma_priority, batch), logger.DEBUG) + log.debug(u'NMA: Sending notice with details: event="{0}, message="{1}", priority={2}, batch={3}', + event, message, nma_priority, batch) response = p.push(application=title, event=event, description=message, priority=nma_priority, batch_mode=batch) if not response[nma_api][u'code'] == u'200': - logger.log(u'Could not send notification to NotifyMyAndroid', logger.ERROR) + log.error(u'Could not send notification to NotifyMyAndroid') return False else: - logger.log(u"NMA: Notification sent to NotifyMyAndroid", logger.INFO) + log.info(u'NMA: Notification sent to NotifyMyAndroid') return True diff --git a/medusa/notifiers/nmj.py b/medusa/notifiers/nmj.py index 1954d1ed02..4f0b50ef36 100644 --- a/medusa/notifiers/nmj.py +++ b/medusa/notifiers/nmj.py @@ -1,35 +1,24 @@ # coding=utf-8 -# Author: Nico Berlee http://nico.berlee.nl/ -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . - +import logging import re import telnetlib +from medusa import app +from medusa.helper.exceptions import ex +from medusa.logger.adapters.style import BraceAdapter + from requests.compat import urlencode from six.moves.urllib.request import Request, urlopen -from .. import app, logger -from ..helper.exceptions import ex try: import xml.etree.cElementTree as etree except ImportError: import xml.etree.ElementTree as etree +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) + class Notifier(object): def notify_settings(self, host): @@ -45,40 +34,39 @@ def notify_settings(self, host): try: terminal = telnetlib.Telnet(host) except Exception: - logger.log(u"Warning: unable to get a telnet session to %s" % host, logger.WARNING) + log.warning(u'Warning: unable to get a telnet session to {0}', host) return False # tell the terminal to output the necessary info to the screen so we can search it later - logger.log(u"Connected to %s via telnet" % host, logger.DEBUG) - terminal.read_until("sh-3.00# ") - terminal.write("cat /tmp/source\n") - terminal.write("cat /tmp/netshare\n") - terminal.write("exit\n") + log.debug(u'Connected to {0} via telnet', host) + terminal.read_until('sh-3.00# ') + terminal.write('cat /tmp/source\n') + terminal.write('cat /tmp/netshare\n') + terminal.write('exit\n') tnoutput = terminal.read_all() - match = re.search(r"(.+\.db)\r\n?(.+)(?=sh-3.00# cat /tmp/netshare)", tnoutput) + match = re.search(r'(.+\.db)\r\n?(.+)(?=sh-3.00# cat /tmp/netshare)', tnoutput) # if we found the database in the terminal output then save that database to the config if match: database = match.group(1) device = match.group(2) - logger.log(u"Found NMJ database %s on device %s" % (database, device), logger.DEBUG) + log.debug(u'Found NMJ database {0} on device {1}', database, device) app.NMJ_DATABASE = database else: - logger.log(u"Could not get current NMJ database on %s, NMJ is probably not running!" % host, logger.WARNING) + log.warning(u'Could not get current NMJ database on {0}, NMJ is probably not running!', host) return False # if the device is a remote host then try to parse the mounting URL and save it to the config - if device.startswith("NETWORK_SHARE/"): - match = re.search(".*(?=\r\n?%s)" % (re.escape(device[14:])), tnoutput) + if device.startswith('NETWORK_SHARE/'): + match = re.search('.*(?=\r\n?%s)' % (re.escape(device[14:])), tnoutput) if match: - mount = match.group().replace("127.0.0.1", host) - logger.log(u"Found mounting url on the Popcorn Hour in configuration: %s" % mount, logger.DEBUG) + mount = match.group().replace('127.0.0.1', host) + log.debug(u'Found mounting url on the Popcorn Hour in configuration: {0}', mount) app.NMJ_MOUNT = mount else: - logger.log(u"Detected a network share on the Popcorn Hour, but could not get the mounting url", - logger.WARNING) + log.warning(u'Detected a network share on the Popcorn Hour, but could not get the mounting url') return False return True @@ -99,7 +87,7 @@ def notify_git_update(self, new_version): return False # Not implemented, no reason to start scanner. - def notify_login(self, ipaddress=""): + def notify_login(self, ipaddress=''): return False def test_notify(self, host, database, mount): @@ -120,59 +108,59 @@ def _sendNMJ(self, host, database, mount=None): if mount: try: req = Request(mount) - logger.log(u"Try to mount network drive via url: %s" % mount, logger.DEBUG) + log.debug(u'Try to mount network drive via url: {0}', mount) handle = urlopen(req) - except IOError as e: - if hasattr(e, 'reason'): - logger.log(u"NMJ: Could not contact Popcorn Hour on host %s: %s" % (host, e.reason), logger.WARNING) - elif hasattr(e, 'code'): - logger.log(u"NMJ: Problem with Popcorn Hour on host %s: %s" % (host, e.code), logger.WARNING) + except IOError as error: + if hasattr(error, 'reason'): + log.warning(u'NMJ: Could not contact Popcorn Hour on host {0}: {1}', host, error.reason) + elif hasattr(error, 'code'): + log.warning(u'NMJ: Problem with Popcorn Hour on host {0}: {1}', host, error.code) return False - except Exception as e: - logger.log(u"NMJ: Unknown exception: " + ex(e), logger.ERROR) + except Exception as error: + log.error(u'NMJ: Unknown exception: {0}', ex(error)) return False # build up the request URL and parameters - UPDATE_URL = "http://%(host)s:8008/metadata_database?%(params)s" + UPDATE_URL = 'http://%(host)s:8008/metadata_database?%(params)s' params = { - "arg0": "scanner_start", - "arg1": database, - "arg2": "background", - "arg3": "" + 'arg0': 'scanner_start', + 'arg1': database, + 'arg2': 'background', + 'arg3': '' } params = urlencode(params) - updateUrl = UPDATE_URL % {"host": host, "params": params} + updateUrl = UPDATE_URL % {'host': host, 'params': params} # send the request to the server try: req = Request(updateUrl) - logger.log(u"Sending NMJ scan update command via url: %s" % updateUrl, logger.DEBUG) + log.debug(u'Sending NMJ scan update command via url: {0}', updateUrl) handle = urlopen(req) response = handle.read() - except IOError as e: - if hasattr(e, 'reason'): - logger.log(u"NMJ: Could not contact Popcorn Hour on host %s: %s" % (host, e.reason), logger.WARNING) - elif hasattr(e, 'code'): - logger.log(u"NMJ: Problem with Popcorn Hour on host %s: %s" % (host, e.code), logger.WARNING) + except IOError as error: + if hasattr(error, 'reason'): + log.warning(u'NMJ: Could not contact Popcorn Hour on host {0}: {1}', host, error.reason) + elif hasattr(error, 'code'): + log.warning(u'NMJ: Problem with Popcorn Hour on host {0}: {1}', host, error.code) return False - except Exception as e: - logger.log(u"NMJ: Unknown exception: " + ex(e), logger.ERROR) + except Exception as error: + log.error(u'NMJ: Unknown exception: {0}', ex(error)) return False # try to parse the resulting XML try: et = etree.fromstring(response) - result = et.findtext("returnValue") - except SyntaxError as e: - logger.log(u"Unable to parse XML returned from the Popcorn Hour: %s" % e, logger.ERROR) + result = et.findtext('returnValue') + except SyntaxError as error: + log.error(u'Unable to parse XML returned from the Popcorn Hour: {0}', error) return False # if the result was a number then consider that an error if int(result) > 0: - logger.log(u"Popcorn Hour returned an error code: %s" % result, logger.ERROR) + log.error(u'Popcorn Hour returned an error code: {0!r}', result) return False else: - logger.log(u"NMJ started background scan", logger.INFO) + log.info(u'NMJ started background scan') return True def _notifyNMJ(self, host=None, database=None, mount=None, force=False): @@ -185,7 +173,7 @@ def _notifyNMJ(self, host=None, database=None, mount=None, force=False): force: If True then the notification will be sent even if NMJ is disabled in the config """ if not app.USE_NMJ and not force: - logger.log(u"Notification for NMJ scan update not enabled, skipping this notification", logger.DEBUG) + log.debug(u'Notification for NMJ scan update not enabled, skipping this notification') return False # fill in omitted parameters @@ -196,6 +184,6 @@ def _notifyNMJ(self, host=None, database=None, mount=None, force=False): if not mount: mount = app.NMJ_MOUNT - logger.log(u"Sending scan command for NMJ ", logger.DEBUG) + log.debug(u'Sending scan command for NMJ ') return self._sendNMJ(host, database, mount) diff --git a/medusa/notifiers/nmjv2.py b/medusa/notifiers/nmjv2.py index 4f0c89ed58..55d4f9f31e 100644 --- a/medusa/notifiers/nmjv2.py +++ b/medusa/notifiers/nmjv2.py @@ -1,35 +1,22 @@ # coding=utf-8 -# Author: Jasper Lanting -# Based on nmj.py by Nico Berlee: http://nico.berlee.nl/ -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . - - +import logging import time from xml.dom.minidom import parseString +from medusa import app +from medusa.logger.adapters.style import BraceAdapter + from six.moves.urllib.request import Request, urlopen -from .. import app, logger try: import xml.etree.cElementTree as etree except ImportError: import xml.etree.ElementTree as etree +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) + class Notifier(object): def notify_snatch(self, ep_name, is_proper): # pylint: disable=unused-argument @@ -46,7 +33,7 @@ def notify_git_update(self, new_version): # pylint: disable=unused-argument return False # Not implemented, no reason to start scanner. - def notify_login(self, ipaddress=""): # pylint: disable=unused-argument + def notify_login(self, ipaddress=''): # pylint: disable=unused-argument return False def test_notify(self, host): @@ -63,7 +50,7 @@ def notify_settings(self, host, dbloc, instance): Returns: True if the settings were retrieved successfully, False otherwise """ try: - url_loc = "http://{}:8008/file_operation?arg0=list_user_storage_file&arg1=&arg2={}&arg3=20&arg4=true&arg5=true&arg6=true&arg7=all&arg8=name_asc&arg9=false&arg10=false".format(host, instance) + url_loc = 'http://{}:8008/file_operation?arg0=list_user_storage_file&arg1=&arg2={}&arg3=20&arg4=true&arg5=true&arg6=true&arg7=all&arg8=name_asc&arg9=false&arg10=false'.format(host, instance) req = Request(url_loc) handle1 = urlopen(req) response1 = handle1.read() @@ -72,27 +59,27 @@ def notify_settings(self, host, dbloc, instance): for node in xml.getElementsByTagName('path'): xmlTag = node.toxml() xmlData = xmlTag.replace('', '').replace('', '').replace('[=]', '') - url_db = "http://" + host + ":8008/metadata_database?arg0=check_database&arg1=" + xmlData + url_db = 'http://' + host + ':8008/metadata_database?arg0=check_database&arg1=' + xmlData reqdb = Request(url_db) handledb = urlopen(reqdb) responsedb = handledb.read() xmldb = parseString(responsedb) returnvalue = xmldb.getElementsByTagName('returnValue')[0].toxml().replace('', '').replace( '', '') - if returnvalue == "0": + if returnvalue == '0': DB_path = xmldb.getElementsByTagName('database_path')[0].toxml().replace( '', '').replace('', '').replace('[=]', '') - if dbloc == "local" and DB_path.find("localhost") > -1: + if dbloc == 'local' and DB_path.find('localhost') > -1: app.NMJv2_HOST = host app.NMJv2_DATABASE = DB_path return True - if dbloc == "network" and DB_path.find("://") > -1: + if dbloc == 'network' and DB_path.find('://') > -1: app.NMJv2_HOST = host app.NMJv2_DATABASE = DB_path return True except IOError as e: - logger.log(u"Warning: Couldn't contact popcorn hour on host %s: %s" % (host, e), logger.WARNING) + log.warning(u'Warning: Unable to contact popcorn hour on host {0}: {1}', host, e) return False return False @@ -109,10 +96,10 @@ def _sendNMJ(self, host): # if a host is provided then attempt to open a handle to that URL try: - url_scandir = "http://" + host + ":8008/metadata_database?arg0=update_scandir&arg1=" + app.NMJv2_DATABASE + "&arg2=&arg3=update_all" - logger.log(u"NMJ scan update command sent to host: %s" % host, logger.DEBUG) - url_updatedb = "http://" + host + ":8008/metadata_database?arg0=scanner_start&arg1=" + app.NMJv2_DATABASE + "&arg2=background&arg3=" - logger.log(u"Try to mount network drive via url: %s" % host, logger.DEBUG) + url_scandir = 'http://' + host + ':8008/metadata_database?arg0=update_scandir&arg1=' + app.NMJv2_DATABASE + '&arg2=&arg3=update_all' + log.debug(u'NMJ scan update command sent to host: {0}', host) + url_updatedb = 'http://' + host + ':8008/metadata_database?arg0=scanner_start&arg1=' + app.NMJv2_DATABASE + '&arg2=background&arg3=' + log.debug(u'Try to mount network drive via url: {0}', host) prereq = Request(url_scandir) req = Request(url_updatedb) handle1 = urlopen(prereq) @@ -120,42 +107,42 @@ def _sendNMJ(self, host): time.sleep(300.0 / 1000.0) handle2 = urlopen(req) response2 = handle2.read() - except IOError as e: - logger.log(u"Warning: Couldn't contact popcorn hour on host %s: %s" % (host, e), logger.WARNING) + except IOError as error: + log.warning(u'Warning: Unable to contact popcorn hour on host {0}: {1}', host, error) return False try: et = etree.fromstring(response1) - result1 = et.findtext("returnValue") - except SyntaxError as e: - logger.log(u"Unable to parse XML returned from the Popcorn Hour: update_scandir, %s" % e, logger.ERROR) + result1 = et.findtext('returnValue') + except SyntaxError as error: + log.error(u'Unable to parse XML returned from the Popcorn Hour: update_scandir, {0}', error) return False try: et = etree.fromstring(response2) - result2 = et.findtext("returnValue") - except SyntaxError as e: - logger.log(u"Unable to parse XML returned from the Popcorn Hour: scanner_start, %s" % e, logger.ERROR) + result2 = et.findtext('returnValue') + except SyntaxError as error: + log.error(u'Unable to parse XML returned from the Popcorn Hour: scanner_start, {0}', error) return False # if the result was a number then consider that an error - error_codes = ["8", "11", "22", "49", "50", "51", "60"] - error_messages = ["Invalid parameter(s)/argument(s)", - "Invalid database path", - "Insufficient size", - "Database write error", - "Database read error", - "Open fifo pipe failed", - "Read only file system"] + error_codes = ['8', '11', '22', '49', '50', '51', '60'] + error_messages = ['Invalid parameter(s)/argument(s)', + 'Invalid database path', + 'Insufficient size', + 'Database write error', + 'Database read error', + 'Open fifo pipe failed', + 'Read only file system'] if int(result1) > 0: index = error_codes.index(result1) - logger.log(u"Popcorn Hour returned an error: %s" % (error_messages[index]), logger.ERROR) + log.error(u'Popcorn Hour returned an error: {0}', error_messages[index]) return False else: if int(result2) > 0: index = error_codes.index(result2) - logger.log(u"Popcorn Hour returned an error: %s" % (error_messages[index]), logger.ERROR) + log.error(u'Popcorn Hour returned an error: {0}', error_messages[index]) return False else: - logger.log(u"NMJv2 started background scan", logger.INFO) + log.info(u'NMJv2 started background scan') return True def _notifyNMJ(self, host=None, force=False): @@ -168,13 +155,13 @@ def _notifyNMJ(self, host=None, force=False): force: If True then the notification will be sent even if NMJ is disabled in the config """ if not app.USE_NMJv2 and not force: - logger.log(u"Notification for NMJ scan update not enabled, skipping this notification", logger.DEBUG) + log.debug(u'Notification for NMJ scan update not enabled, skipping this notification') return False # fill in omitted parameters if not host: host = app.NMJv2_HOST - logger.log(u"Sending scan command for NMJ ", logger.DEBUG) + log.debug(u'Sending scan command for NMJ') return self._sendNMJ(host) diff --git a/medusa/notifiers/plex.py b/medusa/notifiers/plex.py index b0e775bc61..e6143792ce 100644 --- a/medusa/notifiers/plex.py +++ b/medusa/notifiers/plex.py @@ -1,35 +1,23 @@ # coding=utf-8 -# Author: Dustyn Gibson - -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . - +import logging import re +from medusa import app, common +from medusa.helper.exceptions import ex +from medusa.helpers import get_url, make_session +from medusa.logger.adapters.style import BraceAdapter + from six import iteritems -from .. import app, common, logger -from ..helper.exceptions import ex -from ..helpers import get_url, make_session try: import xml.etree.cElementTree as etree except ImportError: import xml.etree.ElementTree as etree +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) + class Notifier(object): def __init__(self): @@ -67,7 +55,7 @@ def _notify_pht(message, title='Medusa', host=None, username=None, password=None username = username or app.PLEX_CLIENT_USERNAME password = password or app.PLEX_CLIENT_PASSWORD - return kodi_notifier._notify_kodi(message, title=title, host=host, username=username, password=password, force=force, dest_app="PLEX") # pylint: disable=protected-access + return kodi_notifier._notify_kodi(message, title=title, host=host, username=username, password=password, force=force, dest_app='PLEX') # pylint: disable=protected-access ############################################################################## # Public functions @@ -92,7 +80,7 @@ def notify_git_update(self, new_version='??'): if update_text and title and new_version: self._notify_pht(update_text + new_version, title) - def notify_login(self, ipaddress=""): + def notify_login(self, ipaddress=''): if app.NOTIFY_ON_LOGIN: update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] title = common.notifyStrings[common.NOTIFY_LOGIN] @@ -125,11 +113,11 @@ def update_library(self, ep_obj=None, host=None, # pylint: disable=too-many-arg host = host or app.PLEX_SERVER_HOST if not host: - logger.log(u'PLEX: No Plex Media Server host specified, check your settings', logger.DEBUG) + log.debug(u'PLEX: No Plex Media Server host specified, check your settings') return False if not self.get_token(username, password, plex_server_token): - logger.log(u'PLEX: Error getting auth token for Plex Media Server, check your settings', logger.WARNING) + log.warning(u'PLEX: Error getting auth token for Plex Media Server, check your settings') return False file_location = '' if not ep_obj else ep_obj.location @@ -143,30 +131,26 @@ def update_library(self, ep_obj=None, host=None, # pylint: disable=too-many-arg try: xml_response = get_url(url, headers=self.headers, session=self.session, returns='text') if not xml_response: - logger.log(u'PLEX: Error while trying to contact Plex Media Server: {0}'.format - (cur_host), logger.WARNING) + log.warning(u'PLEX: Error while trying to contact Plex Media Server: {0}', cur_host) hosts_failed.add(cur_host) continue media_container = etree.fromstring(xml_response) except IOError as error: - logger.log(u'PLEX: Error while trying to contact Plex Media Server: {0}'.format - (ex(error)), logger.WARNING) + log.warning(u'PLEX: Error while trying to contact Plex Media Server: {0}', ex(error)) hosts_failed.add(cur_host) continue except Exception as error: if 'invalid token' in str(error): - logger.log(u'PLEX: Please set TOKEN in Plex settings: ', logger.WARNING) + log.warning(u'PLEX: Please set TOKEN in Plex settings: ') else: - logger.log(u'PLEX: Error while trying to contact Plex Media Server: {0}'.format - (ex(error)), logger.WARNING) + log.warning(u'PLEX: Error while trying to contact Plex Media Server: {0}', ex(error)) hosts_failed.add(cur_host) continue sections = media_container.findall('.//Directory') if not sections: - logger.log(u'PLEX: Plex Media Server not running on: {0}'.format - (cur_host), logger.DEBUG) + log.debug(u'PLEX: Plex Media Server not running on: {0}', cur_host) hosts_failed.add(cur_host) continue @@ -191,9 +175,9 @@ def update_library(self, ep_obj=None, host=None, # pylint: disable=too-many-arg return (', '.join(set(hosts_failed)), None)[not len(hosts_failed)] if hosts_match: - logger.log(u'PLEX: Updating hosts where TV section paths match the downloaded show: ' + ', '.join(set(hosts_match)), logger.DEBUG) + log.debug(u'PLEX: Updating hosts where TV section paths match the downloaded show: {0}', ', '.join(set(hosts_match))) else: - logger.log(u'PLEX: Updating all hosts with TV sections: ' + ', '.join(set(hosts_all)), logger.DEBUG) + log.debug(u'PLEX: Updating all hosts with TV sections: {0}', ', '.join(set(hosts_all))) hosts_try = (hosts_match.copy(), hosts_all.copy())[not len(hosts_match)] for section_key, cur_host in iteritems(hosts_try): @@ -202,8 +186,7 @@ def update_library(self, ep_obj=None, host=None, # pylint: disable=too-many-arg try: get_url(url, headers=self.headers, session=self.session, returns='text') except Exception as error: - logger.log(u'PLEX: Error updating library section for Plex Media Server: {0}'.format - (ex(error)), logger.WARNING) + log.warning(u'PLEX: Error updating library section for Plex Media Server: {0}', ex(error)) hosts_failed.add(cur_host) return (', '.join(set(hosts_failed)), None)[not len(hosts_failed)] @@ -222,7 +205,7 @@ def get_token(self, username=None, password=None, plex_server_token=None): if not (username and password): return True - logger.log(u'PLEX: fetching plex.tv credentials for user: ' + username, logger.DEBUG) + log.debug(u'PLEX: fetching plex.tv credentials for user: {0}', username) params = { 'user[login]': username, @@ -240,7 +223,6 @@ def get_token(self, username=None, password=None, plex_server_token=None): except Exception as error: self.headers.pop('X-Plex-Token', '') - logger.log(u'PLEX: Error fetching credentials from from plex.tv for user {0}: {1}'.format - (username, error), logger.DEBUG) + log.debug(u'PLEX: Error fetching credentials from from plex.tv for user {0}: {1}', username, error) return 'X-Plex-Token' in self.headers diff --git a/medusa/notifiers/prowl.py b/medusa/notifiers/prowl.py index 4ee68dc446..3516207762 100644 --- a/medusa/notifiers/prowl.py +++ b/medusa/notifiers/prowl.py @@ -1,32 +1,16 @@ # coding=utf-8 -# Author: Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . -# -############################################################################## - import ast +import logging import socket import time +from medusa import app, common, db +from medusa.helper.encoding import ss +from medusa.logger.adapters.style import BraceAdapter + from requests.compat import urlencode from six.moves.http_client import HTTPException, HTTPSConnection -from .. import app, common, db, logger -from ..helper.encoding import ss try: # this only exists in 2.6 @@ -36,10 +20,13 @@ class SSLError(Exception): pass +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) + class Notifier(object): def test_notify(self, prowl_api, prowl_priority): - return self._send_prowl(prowl_api, prowl_priority, event="Test", message="Testing Prowl settings from Medusa", force=True) + return self._send_prowl(prowl_api, prowl_priority, event='Test', message='Testing Prowl settings from Medusa', force=True) def notify_snatch(self, ep_name, is_proper): ep_name = ss(ep_name) @@ -47,11 +34,11 @@ def notify_snatch(self, ep_name, is_proper): show = self._parse_episode(ep_name) recipients = self._generate_recipients(show) if not recipients: - logger.log('Skipping prowl notify because there are no configured recipients', logger.DEBUG) + log.debug('Skipping prowl notify because there are no configured recipients') else: for api in recipients: self._send_prowl(prowl_api=api, prowl_priority=None, event=common.notifyStrings[(common.NOTIFY_SNATCH, common.NOTIFY_SNATCH_PROPER)[is_proper]], - message=ep_name + " :: " + time.strftime(app.DATE_PRESET + " " + app.TIME_PRESET)) + message=ep_name + ' :: ' + time.strftime(app.DATE_PRESET + ' ' + app.TIME_PRESET)) def notify_download(self, ep_name): ep_name = ss(ep_name) @@ -59,11 +46,11 @@ def notify_download(self, ep_name): show = self._parse_episode(ep_name) recipients = self._generate_recipients(show) if not recipients: - logger.log('Skipping prowl notify because there are no configured recipients', logger.DEBUG) + log.debug('Skipping prowl notify because there are no configured recipients') else: for api in recipients: self._send_prowl(prowl_api=api, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], - message=ep_name + " :: " + time.strftime(app.DATE_PRESET + " " + app.TIME_PRESET)) + message=ep_name + ' :: ' + time.strftime(app.DATE_PRESET + ' ' + app.TIME_PRESET)) def notify_subtitle_download(self, ep_name, lang): ep_name = ss(ep_name) @@ -71,20 +58,20 @@ def notify_subtitle_download(self, ep_name, lang): show = self._parse_episode(ep_name) recipients = self._generate_recipients(show) if not recipients: - logger.log('Skipping prowl notify because there are no configured recipients', logger.DEBUG) + log.debug('Skipping prowl notify because there are no configured recipients') else: for api in recipients: self._send_prowl(prowl_api=api, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], - message=ep_name + " [" + lang + "] :: " + time.strftime(app.DATE_PRESET + " " + app.TIME_PRESET)) + message=ep_name + ' [' + lang + '] :: ' + time.strftime(app.DATE_PRESET + ' ' + app.TIME_PRESET)) - def notify_git_update(self, new_version="??"): + def notify_git_update(self, new_version='??'): if app.USE_PROWL: update_text = common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT] title = common.notifyStrings[common.NOTIFY_GIT_UPDATE] self._send_prowl(prowl_api=None, prowl_priority=None, event=title, message=update_text + new_version) - def notify_login(self, ipaddress=""): + def notify_login(self, ipaddress=''): if app.USE_PROWL: update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] title = common.notifyStrings[common.NOTIFY_LOGIN] @@ -105,7 +92,7 @@ def _generate_recipients(show=None): # Grab the per-show-notification recipients if show is not None: for value in show: - for subs in mydb.select("SELECT notify_list FROM tv_shows WHERE show_name = ?", (value,)): + for subs in mydb.select('SELECT notify_list FROM tv_shows WHERE show_name = ?', (value,)): if subs['notify_list']: if subs['notify_list'][0] == '{': # legacy format handling entries = dict(ast.literal_eval(subs['notify_list'])) @@ -132,10 +119,10 @@ def _send_prowl(prowl_api=None, prowl_priority=None, event=None, message=None, f title = app.PROWL_MESSAGE_TITLE - logger.log(u"PROWL: Sending notice with details: title=\"%s\" event=\"%s\", message=\"%s\", priority=%s, api=%s" - % (title, event, message, prowl_priority, prowl_api), logger.DEBUG) + log.debug(u'PROWL: Sending notice with details: title="{0}" event="{1}", message="{2}", priority={3}, api={4}', + title, event, message, prowl_priority, prowl_api) - http_handler = HTTPSConnection("api.prowlapp.com") + http_handler = HTTPSConnection('api.prowlapp.com') data = {'apikey': prowl_api, 'application': title, @@ -144,32 +131,32 @@ def _send_prowl(prowl_api=None, prowl_priority=None, event=None, message=None, f 'priority': prowl_priority} try: - http_handler.request("POST", - "/publicapi/add", - headers={'Content-type': "application/x-www-form-urlencoded"}, + http_handler.request('POST', + '/publicapi/add', + headers={'Content-type': 'application/x-www-form-urlencoded'}, body=urlencode(data)) except (SSLError, HTTPException, socket.error): - logger.log(u"Prowl notification failed.", logger.ERROR) + log.error(u'Prowl notification failed.') return False response = http_handler.getresponse() request_status = response.status if request_status == 200: - logger.log(u"Prowl notifications sent.", logger.INFO) + log.info(u'Prowl notifications sent.') return True elif request_status == 401: - logger.log(u"Prowl auth failed: %s" % response.reason, logger.ERROR) + log.error(u'Prowl auth failed: {0}', response.reason) return False else: - logger.log(u"Prowl notification failed.", logger.ERROR) + log.error(u'Prowl notification failed.') return False @staticmethod def _parse_episode(ep_name): ep_name = ss(ep_name) - sep = " - " + sep = ' - ' titles = ep_name.split(sep) titles.sort(key=len, reverse=True) - logger.log("TITLES: %s" % titles, logger.DEBUG) + log.debug('TITLES: {0}', titles) return titles diff --git a/medusa/notifiers/pushalot.py b/medusa/notifiers/pushalot.py index 58bc884d76..2cce437d08 100644 --- a/medusa/notifiers/pushalot.py +++ b/medusa/notifiers/pushalot.py @@ -1,26 +1,14 @@ # coding=utf-8 -# Author: Maciej Olesinski (https://github.com/molesinski/) -# Based on prowl.py by Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . - from __future__ import unicode_literals -from .. import app, common, helpers, logger +import logging + +from medusa import app, common, helpers +from medusa.logger.adapters.style import BraceAdapter + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class Notifier(object): @@ -84,9 +72,9 @@ def _sendPushalot(self, pushalot_authorizationtoken=None, event=None, message=No pushalot_authorizationtoken = pushalot_authorizationtoken or app.PUSHALOT_AUTHORIZATIONTOKEN - logger.log('Pushalot event: {}'.format(event), logger.DEBUG) - logger.log('Pushalot message: {}'.format(message), logger.DEBUG) - logger.log('Pushalot api: {}'.format(pushalot_authorizationtoken), logger.DEBUG) + log.debug('Pushalot event: {0}', event) + log.debug('Pushalot message: {0}', message) + log.debug('Pushalot api: {0}', pushalot_authorizationtoken) post_data = { 'AuthorizationToken': pushalot_authorizationtoken, @@ -104,11 +92,12 @@ def _sendPushalot(self, pushalot_authorizationtoken=None, event=None, message=No success = jdata.pop('Success', False) if success: - logger.log('Pushalot notifications sent.', logger.DEBUG) + log.debug('Pushalot notifications sent.') else: - logger.log('Pushalot notification failed: {} {}'.format( + log.error( + 'Pushalot notification failed: {0} {1}', jdata.get('Status', ''), jdata.get('Description', 'Unknown') - ), logger.ERROR) + ) return success diff --git a/medusa/notifiers/pushbullet.py b/medusa/notifiers/pushbullet.py index adff9a0735..d9d5f86723 100644 --- a/medusa/notifiers/pushbullet.py +++ b/medusa/notifiers/pushbullet.py @@ -1,27 +1,17 @@ -# -*- coding: utf-8 -* -# Author: Pedro Correia (http://github.com/pedrocorreia/) -# Based on pushalot.py by Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . +# coding=utf-8 from __future__ import unicode_literals +import logging import re + +from medusa import app, common, helpers +from medusa.logger.adapters.style import BraceAdapter + from requests.compat import urljoin -from .. import app, common, helpers, logger + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class Notifier(object): @@ -31,7 +21,7 @@ def __init__(self): self.url = 'https://api.pushbullet.com/v2/' def test_notify(self, pushbullet_api): - logger.log('Sending a test Pushbullet notification.', logger.DEBUG) + log.debug('Sending a test Pushbullet notification.') return self._sendPushbullet( pushbullet_api, event='Test', @@ -40,7 +30,7 @@ def test_notify(self, pushbullet_api): ) def get_devices(self, pushbullet_api): - logger.log('Testing Pushbullet authentication and retrieving the device list.', logger.DEBUG) + log.debug('Testing Pushbullet authentication and retrieving the device list.') headers = {'Access-Token': pushbullet_api, 'Content-Type': 'application/json'} try: @@ -102,10 +92,10 @@ def _sendPushbullet( # pylint: disable=too-many-arguments pushbullet_api = pushbullet_api or app.PUSHBULLET_API pushbullet_device = pushbullet_device or app.PUSHBULLET_DEVICE - logger.log('Pushbullet event: %r' % event, logger.DEBUG) - logger.log('Pushbullet message: %r' % message, logger.DEBUG) - logger.log('Pushbullet api: %r' % pushbullet_api, logger.DEBUG) - logger.log('Pushbullet devices: %r' % pushbullet_device, logger.DEBUG) + log.debug('Pushbullet event: {0!r}', event) + log.debug('Pushbullet message: {0!r}', message) + log.debug('Pushbullet api: {0!r}', pushbullet_api) + log.debug('Pushbullet devices: {0!r}', pushbullet_device) post_data = { 'title': event, @@ -124,16 +114,16 @@ def _sendPushbullet( # pylint: disable=too-many-arguments try: response = r.json() except ValueError: - logger.log('Pushbullet notification failed. Could not parse pushbullet response.', logger.WARNING) + log.warning('Pushbullet notification failed. Could not parse pushbullet response.') push_result['error'] = 'Pushbullet notification failed. Could not parse pushbullet response.' return push_result failed = response.pop('error', {}) if failed: - logger.log('Pushbullet notification failed: {0}'.format(failed.get('message')), logger.WARNING) + log.warning('Pushbullet notification failed: {0}', failed.get('message')) push_result['error'] = 'Pushbullet notification failed: {0}'.format(failed.get('message')) else: - logger.log('Pushbullet notification sent.', logger.DEBUG) + log.debug('Pushbullet notification sent.') push_result['success'] = True return push_result diff --git a/medusa/notifiers/pushover.py b/medusa/notifiers/pushover.py index d51158aef9..fb924e022b 100644 --- a/medusa/notifiers/pushover.py +++ b/medusa/notifiers/pushover.py @@ -1,34 +1,31 @@ # coding=utf-8 -# Author: Marvin Pinto -# Author: Dennis Lutter -# Author: Aaron Bieber -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . +import logging import time +from medusa import app +from medusa.common import ( + NOTIFY_DOWNLOAD, + NOTIFY_GIT_UPDATE, + NOTIFY_GIT_UPDATE_TEXT, + NOTIFY_LOGIN, + NOTIFY_LOGIN_TEXT, + NOTIFY_SNATCH, + NOTIFY_SNATCH_PROPER, + NOTIFY_SUBTITLE_DOWNLOAD, + notifyStrings, +) +from medusa.helper.exceptions import ex +from medusa.logger.adapters.style import BraceAdapter + from requests.compat import urlencode from six.moves.http_client import HTTPSConnection from six.moves.urllib.error import HTTPError -from .. import app, logger -from ..common import NOTIFY_DOWNLOAD, NOTIFY_GIT_UPDATE, NOTIFY_GIT_UPDATE_TEXT, NOTIFY_LOGIN, NOTIFY_LOGIN_TEXT, NOTIFY_SNATCH, NOTIFY_SNATCH_PROPER, \ - NOTIFY_SUBTITLE_DOWNLOAD, notifyStrings -from ..helper.exceptions import ex -API_URL = "https://api.pushover.net/1/messages.json" +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) + +API_URL = 'https://api.pushover.net/1/messages.json' class Notifier(object): @@ -36,7 +33,7 @@ def __init__(self): pass def test_notify(self, userKey=None, apiKey=None): - return self._notifyPushover("This is a test notification from Medusa", 'Test', userKey=userKey, apiKey=apiKey, force=True) + return self._notifyPushover('This is a test notification from Medusa', 'Test', userKey=userKey, apiKey=apiKey, force=True) def _sendPushover(self, msg, title, sound=None, userKey=None, apiKey=None): """ @@ -59,54 +56,54 @@ def _sendPushover(self, msg, title, sound=None, userKey=None, apiKey=None): if sound is None: sound = app.PUSHOVER_SOUND - logger.log(u"Pushover API KEY in use: " + apiKey, logger.DEBUG) + log.debug(u'Pushover API KEY in use: {0}', apiKey) # build up the URL and parameters msg = msg.strip() # send the request to pushover try: - if app.PUSHOVER_SOUND != "default": + if app.PUSHOVER_SOUND != 'default': args = { - "token": apiKey, - "user": userKey, - "title": title.encode('utf-8'), - "message": msg.encode('utf-8'), - "timestamp": int(time.time()), - "retry": 60, - "expire": 3600, - "sound": sound, + 'token': apiKey, + 'user': userKey, + 'title': title.encode('utf-8'), + 'message': msg.encode('utf-8'), + 'timestamp': int(time.time()), + 'retry': 60, + 'expire': 3600, + 'sound': sound, } else: # sound is default, so don't send it args = { - "token": apiKey, - "user": userKey, - "title": title.encode('utf-8'), - "message": msg.encode('utf-8'), - "timestamp": int(time.time()), - "retry": 60, - "expire": 3600, + 'token': apiKey, + 'user': userKey, + 'title': title.encode('utf-8'), + 'message': msg.encode('utf-8'), + 'timestamp': int(time.time()), + 'retry': 60, + 'expire': 3600, } if app.PUSHOVER_DEVICE: - args["device"] = app.PUSHOVER_DEVICE + args['device'] = app.PUSHOVER_DEVICE - conn = HTTPSConnection("api.pushover.net:443") - conn.request("POST", "/1/messages.json", - urlencode(args), {"Content-type": "application/x-www-form-urlencoded"}) + conn = HTTPSConnection('api.pushover.net:443') + conn.request('POST', '/1/messages.json', + urlencode(args), {'Content-type': 'application/x-www-form-urlencoded'}) except HTTPError as e: # if we get an error back that doesn't have an error code then who knows what's really happening if not hasattr(e, 'code'): - logger.log(u"Pushover notification failed." + ex(e), logger.ERROR) + log.error(u'Pushover notification failed. {}', ex(e)) return False else: - logger.log(u"Pushover notification failed. Error code: " + str(e.code), logger.ERROR) + log.error(u'Pushover notification failed. Error code: {0}', e.code) # HTTP status 404 if the provided email address isn't a Pushover user. if e.code == 404: - logger.log(u"Username is wrong/not a pushover email. Pushover will send an email to it", logger.WARNING) + log.warning(u'Username is wrong/not a pushover email. Pushover will send an email to it') return False # For HTTP status code 401's, it is because you are passing in either an invalid token, or the user has not added your service. @@ -115,23 +112,23 @@ def _sendPushover(self, msg, title, sound=None, userKey=None, apiKey=None): # HTTP status 401 if the user doesn't have the service added subscribeNote = self._sendPushover(msg, title, sound=sound, userKey=userKey, apiKey=apiKey) if subscribeNote: - logger.log(u"Subscription sent", logger.DEBUG) + log.debug(u'Subscription sent') return True else: - logger.log(u"Subscription could not be sent", logger.ERROR) + log.error(u'Subscription could not be sent') return False # If you receive an HTTP status code of 400, it is because you failed to send the proper parameters elif e.code == 400: - logger.log(u"Wrong data sent to pushover", logger.ERROR) + log.error(u'Wrong data sent to pushover') return False # If you receive a HTTP status code of 429, it is because the message limit has been reached (free limit is 7,500) elif e.code == 429: - logger.log(u"Pushover API message limit reached - try a different API key", logger.ERROR) + log.error(u'Pushover API message limit reached - try a different API key') return False - logger.log(u"Pushover notification successful.", logger.INFO) + log.info(u'Pushover notification successful.') return True def notify_snatch(self, ep_name, is_proper): @@ -145,15 +142,15 @@ def notify_download(self, ep_name, title=notifyStrings[NOTIFY_DOWNLOAD]): def notify_subtitle_download(self, ep_name, lang, title=notifyStrings[NOTIFY_SUBTITLE_DOWNLOAD]): if app.PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD: - self._notifyPushover(title, ep_name + ": " + lang) + self._notifyPushover(title, ep_name + ': ' + lang) - def notify_git_update(self, new_version="??"): + def notify_git_update(self, new_version='??'): if app.USE_PUSHOVER: update_text = notifyStrings[NOTIFY_GIT_UPDATE_TEXT] title = notifyStrings[NOTIFY_GIT_UPDATE] self._notifyPushover(title, update_text + new_version) - def notify_login(self, ipaddress=""): + def notify_login(self, ipaddress=''): if app.USE_PUSHOVER: update_text = notifyStrings[NOTIFY_LOGIN_TEXT] title = notifyStrings[NOTIFY_LOGIN] @@ -172,9 +169,9 @@ def _notifyPushover(self, title, message, sound=None, userKey=None, apiKey=None, """ if not app.USE_PUSHOVER and not force: - logger.log(u"Notification for Pushover not enabled, skipping this notification", logger.DEBUG) + log.debug(u'Notification for Pushover not enabled, skipping this notification') return False - logger.log(u"Sending notification for " + message, logger.DEBUG) + log.debug(u'Sending notification for {0}', message) return self._sendPushover(message, title, sound=sound, userKey=userKey, apiKey=apiKey) diff --git a/medusa/notifiers/pytivo.py b/medusa/notifiers/pytivo.py index 399764eb40..c3c0a70280 100644 --- a/medusa/notifiers/pytivo.py +++ b/medusa/notifiers/pytivo.py @@ -1,29 +1,18 @@ # coding=utf-8 -# Author: Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . - +import logging import os +from medusa import app +from medusa.helper.exceptions import ex +from medusa.logger.adapters.style import BraceAdapter + from requests.compat import urlencode from six.moves.urllib.error import HTTPError from six.moves.urllib.request import Request, urlopen -from .. import app, logger -from ..helper.exceptions import ex + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class Notifier(object): @@ -39,7 +28,7 @@ def notify_subtitle_download(self, ep_name, lang): def notify_git_update(self, new_version): pass - def notify_login(self, ipaddress=""): + def notify_login(self, ipaddress=''): pass def update_library(self, ep_obj): @@ -73,19 +62,19 @@ def update_library(self, ep_obj): # Some show names have colons in them which are illegal in a path location, so strip them out. # (Are there other characters?) - showName = showName.replace(":", "") + showName = showName.replace(':', '') - root = showPath.replace(showName, "") - showAndSeason = rootShowAndSeason.replace(root, "") + root = showPath.replace(showName, '') + showAndSeason = rootShowAndSeason.replace(root, '') - container = shareName + "/" + showAndSeason - filename = "/" + absPath.replace(root, "") + container = shareName + '/' + showAndSeason + filename = '/' + absPath.replace(root, '') # Finally create the url and make request - requestUrl = "http://" + host + "/TiVoConnect?" + urlencode( + requestUrl = 'http://' + host + '/TiVoConnect?' + urlencode( {'Command': 'Push', 'Container': container, 'File': filename, 'tsn': tsn}) - logger.log(u"pyTivo notification: Requesting " + requestUrl, logger.DEBUG) + log.debug(u'pyTivo notification: Requesting {0}', requestUrl) request = Request(requestUrl) @@ -93,14 +82,14 @@ def update_library(self, ep_obj): urlopen(request) except HTTPError as e: if hasattr(e, 'reason'): - logger.log(u"pyTivo notification: Error, failed to reach a server - " + e.reason, logger.ERROR) + log.error(u'pyTivo notification: Error, failed to reach a server - {0}', e.reason) return False elif hasattr(e, 'code'): - logger.log(u"pyTivo notification: Error, the server couldn't fulfill the request - " + e.code, logger.ERROR) + log.error(u'pyTivo notification: Error, the server could not fulfill the request - {0}', e.code) return False except Exception as e: - logger.log(u"PYTIVO: Unknown exception: " + ex(e), logger.ERROR) + log.error(u'PYTIVO: Unknown exception: {0}', ex(e)) return False else: - logger.log(u"pyTivo notification: Successfully requested transfer of file") + log.info(u'pyTivo notification: Successfully requested transfer of file') return True diff --git a/medusa/notifiers/synoindex.py b/medusa/notifiers/synoindex.py index 28397a7482..9c107ce898 100644 --- a/medusa/notifiers/synoindex.py +++ b/medusa/notifiers/synoindex.py @@ -1,27 +1,15 @@ # coding=utf-8 -# Author: Sebastien Erard -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . - +import logging import os import subprocess -from .. import app, logger -from ..helper.exceptions import ex +from medusa import app +from medusa.helper.exceptions import ex +from medusa.logger.adapters.style import BraceAdapter + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class Notifier(object): @@ -37,7 +25,7 @@ def notify_subtitle_download(self, ep_name, lang): def notify_git_update(self, new_version): pass - def notify_login(self, ipaddress=""): + def notify_login(self, ipaddress=''): pass def moveFolder(self, old_path, new_path): @@ -50,15 +38,15 @@ def moveObject(self, old_path, new_path): if app.USE_SYNOINDEX: synoindex_cmd = ['/usr/syno/bin/synoindex', '-N', os.path.abspath(new_path), os.path.abspath(old_path)] - logger.log(u"Executing command " + str(synoindex_cmd), logger.DEBUG) - logger.log(u"Absolute path to command: " + os.path.abspath(synoindex_cmd[0]), logger.DEBUG) + log.debug(u'Executing command {0}', synoindex_cmd) + log.debug(u'Absolute path to command: {0}', os.path.abspath(synoindex_cmd[0])) try: p = subprocess.Popen(synoindex_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=app.PROG_DIR) out, _ = p.communicate() - logger.log(u"Script result: " + str(out), logger.DEBUG) + log.debug(u'Script result: {0}', out) except OSError as e: - logger.log(u"Unable to run synoindex: " + ex(e), logger.ERROR) + log.error(u'Unable to run synoindex: {0}', ex(e)) def deleteFolder(self, cur_path): self.makeObject('-D', cur_path) @@ -75,12 +63,12 @@ def addFile(self, cur_file): def makeObject(self, cmd_arg, cur_path): if app.USE_SYNOINDEX: synoindex_cmd = ['/usr/syno/bin/synoindex', cmd_arg, os.path.abspath(cur_path)] - logger.log(u"Executing command " + str(synoindex_cmd), logger.DEBUG) - logger.log(u"Absolute path to command: " + os.path.abspath(synoindex_cmd[0]), logger.DEBUG) + log.debug(u'Executing command {0}', synoindex_cmd) + log.debug(u'Absolute path to command: {0}', os.path.abspath(synoindex_cmd[0])) try: p = subprocess.Popen(synoindex_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=app.PROG_DIR) out, _ = p.communicate() - logger.log(u"Script result: " + str(out), logger.DEBUG) + log.debug(u'Script result: {0}', out) except OSError as e: - logger.log(u"Unable to run synoindex: " + ex(e), logger.ERROR) + log.error(u'Unable to run synoindex: {0}', ex(e)) diff --git a/medusa/notifiers/synology_notifier.py b/medusa/notifiers/synology_notifier.py index 3925132a68..a5d5979258 100644 --- a/medusa/notifiers/synology_notifier.py +++ b/medusa/notifiers/synology_notifier.py @@ -1,27 +1,15 @@ # coding=utf-8 -# Author: Nyaran -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . - +import logging import os import subprocess -from .. import app, common, logger -from ..helper.exceptions import ex +from medusa import app, common +from medusa.helper.exceptions import ex +from medusa.logger.adapters.style import BraceAdapter + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class Notifier(object): @@ -35,28 +23,28 @@ def notify_download(self, ep_name): def notify_subtitle_download(self, ep_name, lang): if app.SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD: - self._send_synologyNotifier(ep_name + ": " + lang, common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD]) + self._send_synologyNotifier(ep_name + ': ' + lang, common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD]) - def notify_git_update(self, new_version="??"): + def notify_git_update(self, new_version='??'): if app.USE_SYNOLOGYNOTIFIER: update_text = common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT] title = common.notifyStrings[common.NOTIFY_GIT_UPDATE] self._send_synologyNotifier(update_text + new_version, title) - def notify_login(self, ipaddress=""): + def notify_login(self, ipaddress=''): if app.USE_SYNOLOGYNOTIFIER: update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] title = common.notifyStrings[common.NOTIFY_LOGIN] self._send_synologyNotifier(update_text.format(ipaddress), title) def _send_synologyNotifier(self, message, title): - synodsmnotify_cmd = ["/usr/syno/bin/synodsmnotify", "@administrators", title, message] - logger.log(u"Executing command " + str(synodsmnotify_cmd)) - logger.log(u"Absolute path to command: " + os.path.abspath(synodsmnotify_cmd[0]), logger.DEBUG) + synodsmnotify_cmd = ['/usr/syno/bin/synodsmnotify', '@administrators', title, message] + log.info(u'Executing command {0}', synodsmnotify_cmd) + log.debug(u'Absolute path to command: {0}', os.path.abspath(synodsmnotify_cmd[0])) try: p = subprocess.Popen(synodsmnotify_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=app.PROG_DIR) out, _ = p.communicate() - logger.log(u"Script result: " + str(out), logger.DEBUG) + log.debug(u'Script result: {0}', out) except OSError as e: - logger.log(u"Unable to run synodsmnotify: " + ex(e)) + log.info(u'Unable to run synodsmnotify: {0}', ex(e)) diff --git a/medusa/notifiers/telegram.py b/medusa/notifiers/telegram.py index 9575bb29a0..62d13391fd 100644 --- a/medusa/notifiers/telegram.py +++ b/medusa/notifiers/telegram.py @@ -1,32 +1,29 @@ # coding=utf-8 -# Author: Marvin Pinto -# Author: Dennis Lutter -# Author: Aaron Bieber - -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . from __future__ import unicode_literals +import logging + +from medusa import app +from medusa.common import ( + NOTIFY_DOWNLOAD, + NOTIFY_GIT_UPDATE, + NOTIFY_GIT_UPDATE_TEXT, + NOTIFY_LOGIN, + NOTIFY_LOGIN_TEXT, + NOTIFY_SNATCH, + NOTIFY_SNATCH_PROPER, + NOTIFY_SUBTITLE_DOWNLOAD, + notifyStrings, +) +from medusa.helper.common import http_status_code +from medusa.logger.adapters.style import BraceAdapter + from requests.compat import urlencode from six.moves.urllib.request import Request, urlopen -from .. import app, logger -from ..common import NOTIFY_DOWNLOAD, NOTIFY_GIT_UPDATE, NOTIFY_GIT_UPDATE_TEXT, NOTIFY_LOGIN, NOTIFY_LOGIN_TEXT, NOTIFY_SNATCH, NOTIFY_SNATCH_PROPER, \ - NOTIFY_SUBTITLE_DOWNLOAD, notifyStrings -from ..helper.common import http_status_code + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class Notifier(object): @@ -58,7 +55,7 @@ def _send_telegram_msg(self, title, msg, user_id=None, api_key=None): user_id = app.TELEGRAM_ID if user_id is None else user_id api_key = app.TELEGRAM_APIKEY if api_key is None else api_key - logger.log('Telegram in use with API KEY: %s' % api_key, logger.DEBUG) + log.debug('Telegram in use with API KEY: {0}', api_key) message = '%s : %s' % (title.encode(), msg.encode()) payload = urlencode({'chat_id': user_id, 'text': message}) @@ -87,7 +84,7 @@ def _send_telegram_msg(self, title, msg, user_id=None, api_key=None): except Exception as e: message = 'Error while sending Telegram message: %s ' % e finally: - logger.log(message, logger.INFO) + log.info(message) return success, message def notify_snatch(self, ep_name, is_proper): @@ -158,9 +155,9 @@ def _notify_telegram(self, title, message, user_id=None, api_key=None, force=Fal """ if not (force or app.USE_TELEGRAM): - logger.log('Notification for Telegram not enabled, skipping this notification', logger.DEBUG) + log.debug('Notification for Telegram not enabled, skipping this notification') return False, 'Disabled' - logger.log('Sending a Telegram message for %s' % message, logger.DEBUG) + log.debug('Sending a Telegram message for {0}', message) return self._send_telegram_msg(title, message, user_id, api_key) diff --git a/medusa/notifiers/trakt.py b/medusa/notifiers/trakt.py index 7197a893fd..f46a0d521a 100644 --- a/medusa/notifiers/trakt.py +++ b/medusa/notifiers/trakt.py @@ -1,58 +1,54 @@ +"""Trakt notifier module.""" # coding=utf-8 -# Author: Dieter Blomme -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . +from __future__ import unicode_literals +import logging -from __future__ import unicode_literals +from medusa import app +from medusa.helpers import get_title_without_year +from medusa.indexers.indexer_config import get_trakt_indexer +from medusa.logger.adapters.style import BraceAdapter -from traktor import AuthException, ServerBusy, TraktApi, TraktException -from .. import app, logger -from ..helper.exceptions import ex -from ..indexers.indexer_api import indexerApi +from traktor import AuthException, TokenExpiredException, TraktApi, TraktException + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class Notifier(object): """A "notifier" for trakt.tv which keeps track of what has and hasn't been added to your library.""" def notify_snatch(self, ep_name, is_proper): + """Trakt don't support this method.""" pass def notify_download(self, ep_name): + """Trakt don't support this method.""" pass def notify_subtitle_download(self, ep_name, lang): + """Trakt don't support this method.""" pass def notify_git_update(self, new_version): + """Trakt don't support this method.""" pass def notify_login(self, ipaddress=''): + """Trakt don't support this method.""" pass @staticmethod def update_library(ep_obj): - """ - Sends a request to trakt indicating that the given episode is part of our library. + """Send a request to trakt indicating that the given episode is part of our library. ep_obj: The Episode object to add to trakt """ + # Check if TRAKT supports that indexer + if not get_trakt_indexer(ep_obj.show.indexer): + return - trakt_id = indexerApi(ep_obj.show.indexer).config['trakt_id'] # Create a trakt settings dict trakt_settings = {'trakt_api_secret': app.TRAKT_API_SECRET, 'trakt_api_key': app.TRAKT_API_KEY, @@ -64,20 +60,18 @@ def update_library(ep_obj): if app.USE_TRAKT: try: # URL parameters + title = get_title_without_year(ep_obj.show.name, ep_obj.show.start_year) data = { 'shows': [ { - 'title': ep_obj.show.name, + 'title': title, 'year': ep_obj.show.start_year, 'ids': {}, } ] } - if trakt_id == 'tvdb_id': - data['shows'][0]['ids']['tvdb'] = ep_obj.show.indexerid - else: - data['shows'][0]['ids']['tvrage'] = ep_obj.show.indexerid + data['shows'][0]['ids'][get_trakt_indexer(ep_obj.show.indexer)] = ep_obj.show.indexerid if app.TRAKT_SYNC_WATCHLIST: if app.TRAKT_REMOVE_SERIESLIST: @@ -96,14 +90,12 @@ def update_library(ep_obj): # update library trakt_api.request('sync/collection', data, method='POST') - except (TraktException, AuthException, ServerBusy) as trakt_ex: - logger.log('Could not connect to Trakt service: {0}'.format(ex(trakt_ex)), logger.WARNING) + except (TokenExpiredException, TraktException, AuthException) as error: + log.debug('Unable to update Trakt: {0}', error.message) @staticmethod def update_watchlist(show_obj=None, s=None, e=None, data_show=None, data_episode=None, update='add'): - - """ - Sends a request to trakt indicating that the given episode is part of our library. + """Send a request to trakt indicating that the given episode is part of our library. show_obj: The Series object to add to trakt s: season number @@ -112,6 +104,9 @@ def update_watchlist(show_obj=None, s=None, e=None, data_show=None, data_episode data_episode: structured object of episodes trakt type update: type o action add or remove """ + # Check if TRAKT supports that indexer + if not get_trakt_indexer(show_obj.indexer): + return trakt_settings = {'trakt_api_secret': app.TRAKT_API_SECRET, 'trakt_api_key': app.TRAKT_API_KEY, @@ -126,27 +121,24 @@ def update_watchlist(show_obj=None, s=None, e=None, data_show=None, data_episode try: # URL parameters if show_obj is not None: - trakt_id = indexerApi(show_obj.indexer).config['trakt_id'] + title = get_title_without_year(show_obj.name, show_obj.start_year) data = { 'shows': [ { - 'title': show_obj.name, + 'title': title, 'year': show_obj.start_year, 'ids': {}, } ] } - - if trakt_id == 'tvdb_id': - data['shows'][0]['ids']['tvdb'] = show_obj.indexerid - else: - data['shows'][0]['ids']['tvrage'] = show_obj.indexerid + data['shows'][0]['ids'][get_trakt_indexer(show_obj.indexer)] = show_obj.indexerid elif data_show is not None: data.update(data_show) else: - logger.log("There's a coding problem contact developer. " - "It's needed to be provided at least one of the two: data_show or show_obj", - logger.WARNING) + log.warning( + "There's a coding problem contact developer. It's needed to be provided at" + " least one of the two: data_show or show_obj", + ) return False if data_episode is not None: @@ -182,61 +174,54 @@ def update_watchlist(show_obj=None, s=None, e=None, data_show=None, data_episode trakt_api.request(trakt_url, data, method='POST') - except (TraktException, AuthException, ServerBusy) as trakt_ex: - logger.log('Could not connect to Trakt service: {0}'.format(ex(trakt_ex)), logger.WARNING) + except (TokenExpiredException, TraktException, AuthException) as error: + log.debug('Unable to update Trakt watchlist: {0}', error.message) return False return True @staticmethod def trakt_show_data_generate(data): - - showList = [] + """Build the JSON structure to send back to Trakt.""" + show_list = [] for indexer, indexerid, title, year in data: - trakt_id = indexerApi(indexer).config['trakt_id'] show = {'title': title, 'year': year, 'ids': {}} - if trakt_id == 'tvdb_id': - show['ids']['tvdb'] = indexerid - else: - show['ids']['tvrage'] = indexerid - showList.append(show) + show['ids'][get_trakt_indexer(indexer)] = indexerid + show_list.append(show) - post_data = {'shows': showList} + post_data = {'shows': show_list} return post_data @staticmethod def trakt_episode_data_generate(data): - + """Build the JSON structure to send back to Trakt.""" # Find how many unique season we have - uniqueSeasons = [] + unique_seasons = [] for season, episode in data: - if season not in uniqueSeasons: - uniqueSeasons.append(season) + if season not in unique_seasons: + unique_seasons.append(season) # build the query - seasonsList = [] - for searchedSeason in uniqueSeasons: - episodesList = [] + seasons_list = [] + for searchedSeason in unique_seasons: + episodes_list = [] for season, episode in data: if season == searchedSeason: - episodesList.append({'number': episode}) - seasonsList.append({'number': searchedSeason, 'episodes': episodesList}) + episodes_list.append({'number': episode}) + seasons_list.append({'number': searchedSeason, 'episodes': episodes_list}) - post_data = {'seasons': seasonsList} + post_data = {'seasons': seasons_list} return post_data @staticmethod def test_notify(username, blacklist_name=None): - """ - Sends a test notification to trakt with the given authentication info and returns a boolean - representing success. + """Send a test notification to trakt with the given authentication info and returns a boolean. api: The api string to use username: The username to use blacklist_name: slug of trakt list used to hide not interested show - Returns: True if the request succeeded, False otherwise """ try: @@ -257,6 +242,6 @@ def test_notify(username, blacklist_name=None): return "Trakt blacklist doesn't exists" else: return 'Test notice sent successfully to Trakt' - except (TraktException, AuthException, ServerBusy) as trakt_ex: - logger.log('Could not connect to Trakt service: {0}'.format(ex(trakt_ex)), logger.WARNING) - return 'Test notice failed to Trakt: {0}'.format(ex(trakt_ex)) + except (TokenExpiredException, TraktException, AuthException) as error: + log.warning('Unable to test TRAKT: {0}', error.message) + return 'Test notice failed to Trakt: {0}'.format(error.message) diff --git a/medusa/notifiers/tweet.py b/medusa/notifiers/tweet.py index d65a123170..404b589e91 100644 --- a/medusa/notifiers/tweet.py +++ b/medusa/notifiers/tweet.py @@ -1,31 +1,21 @@ # coding=utf-8 -# Author: Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . +import logging + +from medusa import app, common +from medusa.logger.adapters.style import BraceAdapter import oauth2 as oauth import pythontwitter as twitter from six.moves.urllib.parse import parse_qsl -from .. import app, common, logger + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class Notifier(object): - consumer_key = "vHHtcB6WzpWDG6KYlBMr8g" - consumer_secret = "zMqq5CB3f8cWKiRO2KzWPTlBanYmV0VYxSXZ0Pxds0E" + consumer_key = 'vHHtcB6WzpWDG6KYlBMr8g' + consumer_secret = 'zMqq5CB3f8cWKiRO2KzWPTlBanYmV0VYxSXZ0Pxds0E' REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token' ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token' @@ -44,39 +34,39 @@ def notify_subtitle_download(self, ep_name, lang): if app.TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD: self._notifyTwitter('{0} {1}: {2}'.format(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], ep_name, lang)) - def notify_git_update(self, new_version="??"): + def notify_git_update(self, new_version='??'): if app.USE_TWITTER: update_text = common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT] title = common.notifyStrings[common.NOTIFY_GIT_UPDATE] self._notifyTwitter('{0} - {1}{2}'.format(title, update_text, new_version)) - def notify_login(self, ipaddress=""): + def notify_login(self, ipaddress=''): if app.USE_TWITTER: update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] title = common.notifyStrings[common.NOTIFY_LOGIN] self._notifyTwitter('{0} - {1}'.format(title, update_text.format(ipaddress))) def test_notify(self): - return self._notifyTwitter("This is a test notification from Medusa", force=True) + return self._notifyTwitter('This is a test notification from Medusa', force=True) def _get_authorization(self): oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret) oauth_client = oauth.Client(oauth_consumer) - logger.log(u'Requesting temp token from Twitter', logger.DEBUG) + log.debug(u'Requesting temp token from Twitter') resp, content = oauth_client.request(self.REQUEST_TOKEN_URL, 'GET') if resp['status'] != '200': - logger.log(u'Invalid response from Twitter requesting temp token: {0}'.format(resp['status']), logger.ERROR) + log.error(u'Invalid response from Twitter requesting temp token: {0}', resp['status']) else: request_token = dict(parse_qsl(content)) app.TWITTER_USERNAME = request_token['oauth_token'] app.TWITTER_PASSWORD = request_token['oauth_token_secret'] - return self.AUTHORIZATION_URL + "?oauth_token=" + request_token['oauth_token'] + return self.AUTHORIZATION_URL + '?oauth_token=' + request_token['oauth_token'] def _get_credentials(self, key): request_token = { @@ -88,25 +78,25 @@ def _get_credentials(self, key): token = oauth.Token(request_token['oauth_token'], request_token['oauth_token_secret']) token.set_verifier(key) - logger.log(u'Generating and signing request for an access token using key {0}'.format(key), logger.DEBUG) + log.debug(u'Generating and signing request for an access token using key {0}', key) oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret) - logger.log(u'oauth_consumer: {0}'.format(oauth_consumer), logger.DEBUG) + log.debug(u'oauth_consumer: {0}', oauth_consumer) oauth_client = oauth.Client(oauth_consumer, token) - logger.log(u'oauth_client: {0}'.format(oauth_client), logger.DEBUG) + log.debug(u'oauth_client: {0}', oauth_client) resp, content = oauth_client.request(self.ACCESS_TOKEN_URL, method='POST', body='oauth_verifier=%s' % key) - logger.log(u'resp, content: {0}, {1}'.format(resp, content), logger.DEBUG) + log.debug(u'resp, content: {0}, {1}', resp, content) access_token = dict(parse_qsl(content)) - logger.log(u'access_token: {0}'.format(access_token), logger.DEBUG) + log.debug(u'access_token: {0}', access_token) - logger.log(u'resp[status] = {0}'.format(resp['status']), logger.DEBUG) + log.debug(u'resp[status] = {0}', resp['status']) if resp['status'] != '200': - logger.log(u'The request for a token with did not succeed: {0}'.format(resp['status']), logger.ERROR) + log.error(u'The request for a token with did not succeed: {0}', resp['status']) return False else: - logger.log(u'Your Twitter Access Token key: {0}'.format(access_token['oauth_token']), logger.DEBUG) - logger.log(u'Access Token secret: {0}'.format(access_token['oauth_token_secret']), logger.DEBUG) + log.debug(u'Your Twitter Access Token key: {0}', access_token['oauth_token']) + log.debug(u'Access Token secret: {0}', access_token['oauth_token_secret']) app.TWITTER_USERNAME = access_token['oauth_token'] app.TWITTER_PASSWORD = access_token['oauth_token_secret'] return True @@ -118,14 +108,14 @@ def _send_tweet(self, message=None): access_token_key = app.TWITTER_USERNAME access_token_secret = app.TWITTER_PASSWORD - logger.log(u'Sending tweet: {0}'.format(message), logger.DEBUG) + log.debug(u'Sending tweet: {0}', message) api = twitter.Api(username, password, access_token_key, access_token_secret) try: api.PostUpdate(message.encode('utf8')[:139]) except Exception as e: - logger.log(u'Error Sending Tweet: {!r}'.format(e), logger.ERROR) + log.error(u'Error Sending Tweet: {!r}', e) return False return True @@ -138,14 +128,14 @@ def _send_dm(self, message=None): access_token_key = app.TWITTER_USERNAME access_token_secret = app.TWITTER_PASSWORD - logger.log(u'Sending DM: {0} {1}'.format(dmdest, message), logger.DEBUG) + log.debug(u'Sending DM: {0} {1}', dmdest, message) api = twitter.Api(username, password, access_token_key, access_token_secret) try: api.PostDirectMessage(dmdest, message.encode('utf8')[:139]) - except Exception as e: - logger.log(u'Error Sending Tweet (DM): {!r}'.format(e), logger.ERROR) + except Exception as error: + log.error(u'Error Sending Tweet (DM): {!r}', error) return False return True @@ -157,6 +147,6 @@ def _notifyTwitter(self, message='', force=False): return False if app.TWITTER_USEDM and app.TWITTER_DMTO: - return self._send_dm(prefix + ": " + message) + return self._send_dm(prefix + ': ' + message) else: - return self._send_tweet(prefix + ": " + message) + return self._send_tweet(prefix + ': ' + message) diff --git a/medusa/post_processor.py b/medusa/post_processor.py index 6687dbe965..8b2de884ff 100644 --- a/medusa/post_processor.py +++ b/medusa/post_processor.py @@ -16,6 +16,7 @@ # You should have received a copy of the GNU General Public License # along with Medusa. If not, see . """Post processor module.""" + import fnmatch import os import re @@ -26,10 +27,14 @@ import adba +import rarfile + +from rarfile import Error as RarError, NeedFirstVolume + from six import text_type from . import app, common, db, failed_history, helpers, history, logger, notifiers, show_name_helpers -from .helper.common import episode_num, remove_extension +from .helper.common import episode_num, pretty_file_size, remove_extension from .helper.exceptions import (EpisodeNotFoundException, EpisodePostProcessingFailedException, ShowDirectoryNotFoundException) from .helpers import is_subtitle, verify_freespace @@ -89,6 +94,8 @@ def __init__(self, file_path, nzb_name=None, process_method=None, is_priority=No self.manually_searched = False + self.info_hash = None + self.item_resources = OrderedDict([('file name', self.file_name), ('relative path', self.rel_path), ('nzb name', self.nzb_name)]) @@ -120,147 +127,87 @@ def _get_rel_path(self): return self.file_path - def _check_for_existing_file(self, existing_file): + def _compare_file_size(self, existing_file): """ - Check if a file exists already. - - If it does whether it's bigger or smaller than the file we are post processing. + Compare size to existing file. - :param existing_file: The file to compare to + :param existing_file: file to compare :return: - DOESNT_EXIST if the file doesn't exist - EXISTS_LARGER if the file exists and is larger than the file we are post processing - EXISTS_SMALLER if the file exists and is smaller than the file we are post processing - EXISTS_SAME if the file exists and is the same size as the file we are post processing + DOESNT_EXIST if file doesn't exist + EXISTS_LARGER if existing file is larger + EXISTS_SMALLER if existing file is smaller + EXISTS_SAME if existing file is the same size """ - if not existing_file: - self._log(u"There is no existing file so there's no worries about replacing it", logger.DEBUG) - return PostProcessor.DOESNT_EXIST + new_size = os.path.getsize(self.file_path) - # if the new file exists, return the appropriate code depending on the size - if os.path.isfile(existing_file): + try: + old_size = os.path.getsize(existing_file) + except OSError: + self._log(u'New file: {}'.format(self.file_path)) + self._log(u'New size: {}'.format(pretty_file_size(new_size))) + self._log(u"There is no existing file so there's no worries about replacing it", logger.DEBUG) + return self.DOESNT_EXIST - # see if it's bigger than our old file - if os.path.getsize(existing_file) > os.path.getsize(self.file_path): - self._log(u'File {0} is larger than {1}'.format(existing_file, self.file_path), logger.DEBUG) - return PostProcessor.EXISTS_LARGER + delta_size = new_size - old_size - elif os.path.getsize(existing_file) == os.path.getsize(self.file_path): - self._log(u'File {0} is same size as {1}'.format(existing_file, self.file_path), logger.DEBUG) - return PostProcessor.EXISTS_SAME - - else: - self._log(u'File {0} is smaller than {1}'.format(existing_file, self.file_path), logger.DEBUG) - return PostProcessor.EXISTS_SMALLER + self._log(u'Old file: {}'.format(existing_file)) + self._log(u'New file: {}'.format(self.file_path)) + self._log(u'Old size: {}'.format(pretty_file_size(old_size))) + self._log(u'New size: {}'.format(pretty_file_size(new_size))) + if not delta_size: + self._log(u'New file is the same size.') + return self.EXISTS_SAME else: - self._log(u"File {0} doesn't exist so there's no worries about replacing it".format - (existing_file), logger.DEBUG) - return PostProcessor.DOESNT_EXIST - - @staticmethod - def _search_files(path, pattern='*', subfolders=None, base_name_only=None, sort=False): - """ - Search for files in a given path. - - :param path: path to file or folder (folder paths must end with slashes) - :type path: text_type - :param pattern: pattern used to match the files - :type pattern: text_type - :param subfolders: search for files in subfolders - :type subfolders: bool - :param base_name_only: only match files with the same name - :type base_name_only: bool - :param sort: return files sorted by size - :type sort: bool - :return: list with found files or empty list - :rtype: list - """ - directory = os.path.dirname(path) - - if base_name_only: - if os.path.isfile(path): - new_pattern = os.path.basename(path).rpartition('.')[0] - elif os.path.isdir(path): - new_pattern = os.path.split(directory)[1] - else: - return [] - - if any(char in new_pattern for char in ['[', '?', '*']): - # Escaping is done by wrapping any of "*?[" between square brackets. - # Modified from: https://hg.python.org/cpython/file/tip/Lib/glob.py#l161 - if isinstance(new_pattern, bytes): - new_pattern = re.compile(b'([*?[])').sub(br'[\1]', new_pattern) - else: - new_pattern = re.compile('([*?[])').sub(r'[\1]', new_pattern) + self._log(u'New file is {size} {difference}'.format( + size=pretty_file_size(abs(delta_size)), + difference=u'smaller' if new_size < old_size else u'larger', + )) + return self.EXISTS_LARGER if new_size < old_size else self.EXISTS_SMALLER - pattern = new_pattern + pattern - - found_files = [] - for root, __, filenames in os.walk(directory): - for filename in fnmatch.filter(filenames, pattern): - found_files.append(os.path.join(root, filename)) - if not subfolders: - break - - if sort: - found_files = sorted(found_files, key=os.path.getsize, reverse=True) - - return found_files - - def list_associated_files(self, file_path, base_name_only=False, subtitles_only=False, subfolders=False): + def list_associated_files(self, filepath, base_name_only=False, subtitles_only=False, subfolders=False): """ For a given file path search for files in the same directory and return their absolute paths. - :param file_path: The file to check for associated files - :param base_name_only: False add extra '.' (conservative search) to file_path minus extension + :param filepath: The file to check for associated files + :param base_name_only: list only files with the same basename :param subtitles_only: list only subtitles :param subfolders: check subfolders while listing files :return: A list containing all files which are associated to the given file """ + files = self._search_files(filepath, subfolders=subfolders, base_name_only=base_name_only) + # file path to the video file that is being processed (without extension) - processed_file_name = os.path.basename(file_path).rpartition('.')[0].lower() + processed_file_name = os.path.splitext(os.path.basename(filepath))[0].lower() - file_list = self._search_files(file_path, subfolders=subfolders, base_name_only=base_name_only) + processed_names = (processed_file_name,) + processed_names += filter(None, (self._rar_basename(filepath, files),)) # loop through all the files in the folder, and check if they are the same name # even when the cases don't match filelist = [] - rar_file = [os.path.basename(f).rpartition('.')[0].lower() for f in file_list - if helpers.get_extension(f).lower() == 'rar'] - for found_file in file_list: + for found_file in files: file_name = os.path.basename(found_file).lower() - if file_name.startswith(processed_file_name): - - # only add subtitles with valid languages to the list - if is_subtitle(found_file): - code = file_name.rsplit('.', 2)[1].replace('_', '-') - language = from_code(code, unknown='') or from_ietf_code(code, unknown='und') - if not language: - continue - - filelist.append(found_file) - # List associated files based on .RAR files like Show.101.720p-GROUP.nfo and Show.101.720p-GROUP.rar - elif any([file_name.startswith(r) for r in rar_file]): + if file_name.startswith(processed_names): filelist.append(found_file) file_path_list = [] extensions_to_delete = [] for associated_file_path in filelist: # Exclude the video file we are post-processing - if associated_file_path == file_path: - continue - - # Exlude non-subtitle files with the 'only subtitles' option - if subtitles_only and not is_subtitle(associated_file_path): + if associated_file_path == filepath: continue # Exclude .rar files from associated list if re.search(r'(^.+\.(rar|r\d+)$)', associated_file_path): continue + # Exlude non-subtitle files with the 'only subtitles' option + if subtitles_only and not is_subtitle(associated_file_path): + continue + # Add the extensions that the user doesn't allow to the 'extensions_to_delete' list if app.MOVE_ASSOCIATED_FILES: allowed_extensions = app.ALLOWED_EXTENSIONS.split(',') @@ -276,17 +223,85 @@ def list_associated_files(self, file_path, base_name_only=False, subtitles_only= if file_path_list: self._log(u'Found the following associated files for {0}: {1}'.format - (file_path, file_path_list), logger.DEBUG) + (filepath, file_path_list), logger.DEBUG) if extensions_to_delete: # Rebuild the 'file_path_list' list only with the extensions the user allows file_path_list = [associated_file for associated_file in file_path_list if associated_file not in extensions_to_delete] self._delete(extensions_to_delete) else: - self._log(u'No associated files for {0} were found during this pass'.format(file_path), logger.DEBUG) + self._log(u'No associated files for {0} were found during this pass'.format(filepath), logger.DEBUG) return file_path_list + @staticmethod + def _search_files(path, pattern='*', subfolders=None, base_name_only=None, sort=None): + """ + Search for files in a given path. + + :param path: path to file or folder (folder paths must end with slashes) + :type path: text_type + :param pattern: pattern used to match the files + :type pattern: text_type + :param subfolders: search for files in subfolders + :type subfolders: bool + :param base_name_only: only match files with the same name + :type base_name_only: bool + :param sort: return files sorted by size + :type sort: bool + :return: list with found files or empty list + :rtype: list + """ + directory = os.path.dirname(path) + + if base_name_only: + if os.path.isfile(path): + new_pattern = os.path.splitext(os.path.basename(path))[0] + elif os.path.isdir(path): + new_pattern = os.path.split(directory)[1] + else: + return [] + + if any(char in new_pattern for char in ['[', '?', '*']): + # Escaping is done by wrapping any of "*?[" between square brackets. + # Modified from: https://hg.python.org/cpython/file/tip/Lib/glob.py#l161 + if isinstance(new_pattern, bytes): + new_pattern = re.compile(b'([*?[])').sub(br'[\1]', new_pattern) + else: + new_pattern = re.compile('([*?[])').sub(r'[\1]', new_pattern) + + pattern = new_pattern + pattern + + files = [] + for root, __, filenames in os.walk(directory): + for filename in fnmatch.filter(filenames, pattern): + files.append(os.path.join(root, filename)) + if not subfolders: + break + + if sort: + files = sorted(files, key=os.path.getsize, reverse=True) + + return files + + @staticmethod + def _rar_basename(filepath, files): + """Return the basename of the source rar archive if found.""" + videofile = os.path.basename(filepath) + rars = (x for x in files if os.path.isfile(x) and rarfile.is_rarfile(x)) + + for rar in rars: + try: + content = rarfile.RarFile(rar).namelist() + except NeedFirstVolume: + continue + except RarError as e: + logger.log(u'An error occurred while reading the following RAR file: {name}. ' + u'Error: {message}'.format(name=rar, message=e), logger.WARNING) + continue + if videofile in content: + return os.path.splitext(os.path.basename(rar))[0] + def _delete(self, file_path, associated_files=False): """ Delete the file and optionally all associated files. @@ -332,6 +347,58 @@ def _delete(self, file_path, associated_files=False): # do the library update for synoindex notifiers.synoindex_notifier.deleteFile(cur_file) + @staticmethod + def rename_associated_file(new_path, new_base_name, filepath): + """Rename associated file using media basename. + + :param new_path: full show folder path where the file will be moved|copied|linked to + :param new_base_name: the media base filename (no extension) to use during the rename + :param filepath: full path of the associated file + :return: renamed full file path + """ + # remember if the extension changed + changed_extension = None + # file extension without leading dot + extension = helpers.get_extension(filepath) + # initally set current extension as new extension + new_extension = extension + + if is_subtitle(filepath): + code = filepath.rsplit('.', 2)[1].lower().replace('_', '-') + if from_code(code, unknown='') or from_ietf_code(code, unknown=''): + # TODO remove this hardcoded language + if code == 'pt-br': + code = 'pt-BR' + new_extension = code + '.' + extension + changed_extension = True + # replace nfo with nfo-orig to avoid conflicts + elif extension == 'nfo' and app.NFO_RENAME: + new_extension = 'nfo-orig' + changed_extension = True + + # rename file with new base name + if new_base_name: + new_file_name = new_base_name + '.' + new_extension + else: + # current file name including extension + new_file_name = os.path.basename(filepath) + # if we're not renaming we still need to change the extension sometimes + if changed_extension: + new_file_name = new_file_name.replace(extension, new_extension) + + if app.SUBTITLES_DIR and is_subtitle(filepath): + subs_new_path = os.path.join(new_path, app.SUBTITLES_DIR) + dir_exists = helpers.make_dir(subs_new_path) + if not dir_exists: + logger.log(u'Unable to create subtitles folder {0}'.format(subs_new_path), logger.ERROR) + else: + helpers.chmod_as_parent(subs_new_path) + new_file_path = os.path.join(subs_new_path, new_file_name) + else: + new_file_path = os.path.join(new_path, new_file_name) + + return new_file_path + def _combined_file_operation(self, file_path, new_path, new_base_name, associated_files=False, action=None, subtitles=False, subtitle_action=None): """ @@ -339,11 +406,11 @@ def _combined_file_operation(self, file_path, new_path, new_base_name, associate Can rename the file as well as change its location, and optionally move associated files too. - :param file_path: The full path of the media file to act on - :param new_path: Destination path where we want to move/copy the file to - :param new_base_name: The base filename (no extension) to use during the copy. Use None to keep the same name. + :param file_path: The full path of the file to act on + :param new_path: full show folder path where the file will be moved|copied|linked to + :param new_base_name: The base filename (no extension) to use during the action. Use None to keep the same name :param associated_files: Boolean, whether we should copy similarly-named files too - :param action: function that takes an old path and new path and does an operation with them (move/copy) + :param action: function that takes an old path and new path and does an operation with them (move/copy/link) :param subtitles: Boolean, whether we should process subtitles too """ if not action: @@ -361,71 +428,24 @@ def _combined_file_operation(self, file_path, new_path, new_base_name, associate (file_path), logger.DEBUG) return - # base name with file path (without extension and ending dot) - old_base_name = file_path.rpartition('.')[0] - old_base_name_length = len(old_base_name) - - for cur_file_path in file_list: - # remember if the extension changed - changed_extension = None - # file extension without leading dot (for example: de.srt) - extension = cur_file_path[old_base_name_length + 1:] - # If basename is different, then is a RAR associated file. - if not extension: - helpers.get_extension(cur_file_path) - # initally set current extension as new extension - new_extension = extension - - # split the extension in two parts. E.g.: ('de', '.srt') - split_extension = os.path.splitext(extension) - # check if it's a subtitle and also has a subtitle language - if is_subtitle(cur_file_path) and all(split_extension): - sub_lang = split_extension[0].lower() - if sub_lang == 'pt-br': - sub_lang = 'pt-BR' - new_extension = sub_lang + split_extension[1] - changed_extension = True - # If subtitle was downloaded from Medusa it can't be in the torrent folder, so we move it. - # Otherwise when torrent+data gets removed the folder won't be deleted because of subtitle - if app.POSTPONE_IF_NO_SUBS: - # subtitle_action = move - action = subtitle_action or action - - # replace nfo with nfo-orig to avoid conflicts - if extension == 'nfo' and app.NFO_RENAME: - new_extension = 'nfo-orig' - changed_extension = True + for cur_associated_file in file_list: + new_file_path = self.rename_associated_file(new_path, new_base_name, cur_associated_file) - # rename file with new base name - if new_base_name: - new_file_name = new_base_name + '.' + new_extension - else: - # current file name including extension - new_file_name = os.path.basename(cur_file_path) - # if we're not renaming we still need to change the extension sometimes - if changed_extension: - new_file_name = new_file_name.replace(extension, new_extension) - - if app.SUBTITLES_DIR and is_subtitle(cur_file_path): - subs_new_path = os.path.join(new_path, app.SUBTITLES_DIR) - dir_exists = helpers.make_dir(subs_new_path) - if not dir_exists: - logger.log(u'Unable to create subtitles folder {0}'.format(subs_new_path), logger.ERROR) - else: - helpers.chmod_as_parent(subs_new_path) - new_file_path = os.path.join(subs_new_path, new_file_name) - else: - new_file_path = os.path.join(new_path, new_file_name) + # If subtitle was downloaded from Medusa it can't be in the torrent folder, so we move it. + # Otherwise when torrent+data gets removed, the folder won't be deleted because of subtitle + if app.POSTPONE_IF_NO_SUBS and is_subtitle(cur_associated_file): + # subtitle_action = move + action = subtitle_action or action - action(cur_file_path, new_file_path) + action(cur_associated_file, new_file_path) def post_process_action(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False): """ Run the given action on file and set proper permissions. - :param file_path: The full path of the media file - :param new_path: Destination path where we want the file to - :param new_base_name: The base filename (no extension) to use. Use None to keep the same name. + :param file_path: The full path of the file to act on + :param new_path: full show folder path where the file will be moved|copied|linked to + :param new_base_name: The base filename (no extension) to use. Use None to keep the same name :param associated_files: Boolean, whether we should run the action in similarly-named files too """ def move(cur_file_path, new_file_path): @@ -436,7 +456,7 @@ def move(cur_file_path, new_file_path): except (IOError, OSError) as e: self._log(u'Unable to move file {0} to {1}: {2!r}'.format (cur_file_path, new_file_path, e), logger.ERROR) - raise + raise EpisodePostProcessingFailedException('Unable to move the files to their new home') def copy(cur_file_path, new_file_path): self._log(u'Copying file from {0} to {1}'.format(cur_file_path, new_file_path), logger.DEBUG) @@ -446,7 +466,7 @@ def copy(cur_file_path, new_file_path): except (IOError, OSError) as e: self._log(u'Unable to copy file {0} to {1}: {2!r}'.format (cur_file_path, new_file_path, e), logger.ERROR) - raise + raise EpisodePostProcessingFailedException('Unable to copy the files to their new home') def hardlink(cur_file_path, new_file_path): self._log(u'Hard linking file from {0} to {1}'.format(cur_file_path, new_file_path), logger.DEBUG) @@ -456,7 +476,7 @@ def hardlink(cur_file_path, new_file_path): except (IOError, OSError) as e: self._log(u'Unable to link file {0} to {1}: {2!r}'.format (cur_file_path, new_file_path, e), logger.ERROR) - raise + raise EpisodePostProcessingFailedException('Unable to hard link the files to their new home') def symlink(cur_file_path, new_file_path): self._log(u'Moving then symbolic linking file from {0} to {1}'.format @@ -467,7 +487,7 @@ def symlink(cur_file_path, new_file_path): except (IOError, OSError) as e: self._log(u'Unable to link file {0} to {1}: {2!r}'.format (cur_file_path, new_file_path, e), logger.ERROR) - raise + raise EpisodePostProcessingFailedException('Unable to move and link the files to their new home') action = {'copy': copy, 'move': move, 'hardlink': hardlink, 'symlink': symlink}.get(self.process_method) # Subtitle action should be move in case of hardlink|symlink as downloaded subtitle is not part of torrent @@ -706,26 +726,33 @@ def _get_ep_obj(self, show, season, episodes): return root_ep - def _get_quality(self, ep_obj): + def _quality_from_status(self, status): + """ + Determine the quality of the file that is being post processed with its status. + + :param status: The status related to the file we are post processing + :return: A quality value found in common.Quality """ - Determine the quality of the file that is being post processed. + quality = common.Quality.UNKNOWN + + if status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER + common.Quality.SNATCHED_BEST: + _, quality = common.Quality.split_composite_status(status) + if quality != common.Quality.UNKNOWN: + self._log(u'The snatched status has a quality in it, using that: {0}'.format + (common.Quality.qualityStrings[quality]), logger.DEBUG) + return quality + + return quality - First by checking if it is directly available in the Episode's status or - otherwise by parsing through the data available. + def _get_quality(self, ep_obj): + """ + Determine the quality of the file that is being post processed with alternative methods. :param ep_obj: The Episode object related to the file we are post processing :return: A quality value found in common.Quality """ ep_quality = common.Quality.UNKNOWN - # Try getting quality from the episode (snatched) status first - if ep_obj.status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER + common.Quality.SNATCHED_BEST: - _, ep_quality = common.Quality.split_composite_status(ep_obj.status) - if ep_quality != common.Quality.UNKNOWN: - self._log(u'The snatched status has a quality in it, using that: {0}'.format - (common.Quality.qualityStrings[ep_quality]), logger.DEBUG) - return ep_quality - for resource_name, cur_name in self.item_resources.items(): # Skip names that are falsey @@ -771,7 +798,7 @@ def _priority_from_history(self, show_id, season, episodes, quality): # Second: get the quality of the last snatched epsiode # and compare it to the quality we are post-processing history_result = main_db_con.select( - 'SELECT quality, manually_searched ' + 'SELECT quality, manually_searched, info_hash ' 'FROM history ' 'WHERE showid = ? ' 'AND season = ? ' @@ -789,6 +816,8 @@ def _priority_from_history(self, show_id, season, episodes, quality): # Check if the last snatch was a manual snatch if history_result[0]['manually_searched']: self.manually_searched = True + # Get info hash so we can move torrent if setting is enabled + self.info_hash = history_result[0]['info_hash'] or None download_result = main_db_con.select( 'SELECT resource ' @@ -976,7 +1005,7 @@ def process(self): # retrieve/create the corresponding Episode objects ep_obj = self._get_ep_obj(show, season, episodes) - old_ep_status, old_ep_quality = common.Quality.split_composite_status(ep_obj.status) + _, old_ep_quality = common.Quality.split_composite_status(ep_obj.status) # get the quality of the episode we're processing if quality and common.Quality.qualityStrings[quality] != 'Unknown': @@ -984,16 +1013,19 @@ def process(self): (common.Quality.qualityStrings[quality]), logger.DEBUG) new_ep_quality = quality else: - new_ep_quality = self._get_quality(ep_obj) - - logger.log(u'Quality of the episode we are processing: {0}'.format - (common.Quality.qualityStrings[new_ep_quality]), logger.DEBUG) + new_ep_quality = self._quality_from_status(ep_obj.status) # check snatched history to see if we should set the download as priority self._priority_from_history(show.indexerid, season, episodes, new_ep_quality) if self.in_history: self._log(u'This episode was found in history as SNATCHED.', logger.DEBUG) + if new_ep_quality == common.Quality.UNKNOWN: + new_ep_quality = self._get_quality(ep_obj) + + logger.log(u'Quality of the episode we are processing: {0}'.format + (common.Quality.qualityStrings[new_ep_quality]), logger.DEBUG) + # see if this is a priority download (is it snatched, in history, PROPER, or BEST) priority_download = self._is_priority(old_ep_quality, new_ep_quality) self._log(u'This episode is a priority download: {0}'.format(priority_download), logger.DEBUG) @@ -1004,7 +1036,7 @@ def process(self): new_ep_version = version # check for an existing file - existing_file_status = self._check_for_existing_file(ep_obj.location) + existing_file_status = self._compare_file_size(ep_obj.location) if not priority_download: if existing_file_status == PostProcessor.EXISTS_SAME: @@ -1128,11 +1160,11 @@ def process(self): sql_l.append(cur_ep.get_sql()) # Just want to keep this consistent for failed handling right now - release_name = show_name_helpers.determineReleaseName(self.folder_path, self.nzb_name) - if release_name is not None: - failed_history.log_success(release_name) + nzb_release_name = show_name_helpers.determineReleaseName(self.folder_path, self.nzb_name) + if nzb_release_name is not None: + failed_history.log_success(nzb_release_name) else: - self._log(u"Couldn't determine release name, aborting", logger.WARNING) + self._log(u"Couldn't determine NZB release name, aborting", logger.WARNING) # find the destination folder try: @@ -1146,7 +1178,8 @@ def process(self): self._log(u'Destination folder for this episode: {0}'.format(dest_path), logger.DEBUG) # create any folders we need - helpers.make_dirs(dest_path) + if not helpers.make_dirs(dest_path): + raise EpisodePostProcessingFailedException('Unable to create destination folder to the files') # figure out the base name of the resulting episode file if app.RENAME_EPISODES: @@ -1213,37 +1246,31 @@ def process(self): # log it to history episode and related episodes (multi-episode for example) for cur_ep in [ep_obj] + ep_obj.related_episodes: - history.logDownload(cur_ep, self.file_path, new_ep_quality, self.release_group, new_ep_version) - - # If any notification fails, don't stop post_processor - try: - # send notifications - notifiers.notify_download(ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN')) - - # do the library update for KODI - notifiers.kodi_notifier.update_library(ep_obj.show.name) - - # do the library update for Plex - notifiers.plex_notifier.update_library(ep_obj) - - # do the library update for EMBY - notifiers.emby_notifier.update_library(ep_obj.show) - - # do the library update for NMJ - # nmj_notifier kicks off its library update when the notify_download is issued (inside notifiers) - - # do the library update for Synology Indexer - notifiers.synoindex_notifier.addFile(ep_obj.location) - - # do the library update for pyTivo - notifiers.pytivo_notifier.update_library(ep_obj) - - # do the library update for Trakt - notifiers.trakt_notifier.update_library(ep_obj) - except Exception as e: - logger.log(u'Some notifications could not be sent. Error: {0!r}. ' - u'Continuing with post-processing...'.format(e)) + history.log_download(cur_ep, self.file_path, new_ep_quality, self.release_group, new_ep_version) + + # send notifications + notifiers.notify_download(ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN')) + # do the library update for KODI + notifiers.kodi_notifier.update_library(ep_obj.show.name) + # do the library update for Plex + notifiers.plex_notifier.update_library(ep_obj) + # do the library update for EMBY + notifiers.emby_notifier.update_library(ep_obj.show) + # do the library update for NMJ + # nmj_notifier kicks off its library update when the notify_download is issued (inside notifiers) + # do the library update for Synology Indexer + notifiers.synoindex_notifier.addFile(ep_obj.location) + # do the library update for pyTivo + notifiers.pytivo_notifier.update_library(ep_obj) + # do the library update for Trakt + notifiers.trakt_notifier.update_library(ep_obj) self._run_extra_scripts(ep_obj) + # Store self.info_hash and self.release_name so later we can remove from client if setting is enabled + if self.info_hash: + existing_release_names = app.RECENTLY_POSTPROCESSED.get(self.info_hash, []) + existing_release_names.append(self.release_name) + app.RECENTLY_POSTPROCESSED[self.info_hash] = existing_release_names + return True diff --git a/medusa/process_tv.py b/medusa/process_tv.py index a654c91498..4a86bfba15 100644 --- a/medusa/process_tv.py +++ b/medusa/process_tv.py @@ -1,5 +1,4 @@ # coding=utf-8 -# Author: Nic Wolfe # # This file is part of Medusa. # @@ -16,17 +15,24 @@ # You should have received a copy of the GNU General Public License # along with Medusa. If not, see . +from __future__ import unicode_literals + import os import shutil +import socket import stat +from medusa.clients import torrent + +import requests + import shutil_custom + from unrar2 import RarFile from unrar2.rar_exceptions import (ArchiveHeaderBroken, FileOpenError, IncorrectRARPassword, InvalidRARArchive, InvalidRARArchiveUsage) from . import app, db, failed_processor, helpers, logger, notifiers, post_processor -from .helper.common import is_sync_file, is_torrent_or_nzb_file, subtitle_extensions -from .helper.encoding import ss +from .helper.common import is_sync_file, subtitle_extensions from .helper.exceptions import EpisodePostProcessingFailedException, FailedPostProcessingFailedException, ex from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser from .subtitles import accept_any, accept_unknown, get_embedded_subtitles @@ -34,650 +40,645 @@ shutil.copyfile = shutil_custom.copyfile_custom -class ProcessResult(object): # pylint: disable=too-few-public-methods - def __init__(self): - self.result = True - self.output = '' - self.missedfiles = [] - self.aggresult = True +class ProcessResult(object): + IGNORED_FOLDERS = ('.AppleDouble', '.@__thumb', '@eaDir') -def delete_folder(folder, check_empty=True): - """ - Remove a folder from the filesystem. + def __init__(self, path, process_method=None): - :param folder: Path to folder to remove - :param check_empty: Boolean, check if the folder is empty before removing it, defaults to True - :return: True on success, False on failure - """ - # check if it's a folder - if not os.path.isdir(folder): - return False - - # check if it isn't TV_DOWNLOAD_DIR - if app.TV_DOWNLOAD_DIR: - if helpers.real_path(folder) == helpers.real_path(app.TV_DOWNLOAD_DIR): - return False - - # check if it's empty folder when wanted checked - if check_empty: - check_files = os.listdir(folder) - if check_files: - logger.log(u"Not deleting folder %s found the following files: %s" % - (folder, check_files), logger.INFO) - return False + self._output = [] + self.directory = path + self.process_method = process_method + self.resource_name = None + self.result = True + self.succeeded = True + self.missedfiles = [] + self.allowed_extensions = app.ALLOWED_EXTENSIONS.split(',') + self.postponed_no_subs = False + + @property + def directory(self): + """Return the root directory we are going to process.""" + return getattr(self, '_directory') + + @directory.setter + def directory(self, path): + directory = None + if os.path.isdir(path): + self._log('Processing path: {0}'.format(path), logger.DEBUG) + directory = os.path.realpath(path) + + # If the client and the application are not on the same machine, + # translate the directory into a network directory + elif all([app.TV_DOWNLOAD_DIR, os.path.isdir(app.TV_DOWNLOAD_DIR), + os.path.normpath(path) == os.path.normpath(app.TV_DOWNLOAD_DIR)]): + directory = os.path.join( + app.TV_DOWNLOAD_DIR, + os.path.abspath(path).split(os.path.sep)[-1] + ) + self._log('Trying to use folder: {0}'.format(directory), + logger.DEBUG) + else: + self._log("Unable to figure out what folder to process." + " If your download client and Medusa aren't on the same" + " machine, make sure to fill out the Post Processing Dir" + " field in the config.", logger.WARNING) + setattr(self, '_directory', directory) + + @property + def paths(self): + """Return the paths we are going to try to process.""" + if self.directory: + yield self.directory + if self.resource_name: + return + for root, dirs, files in os.walk(self.directory): + del files # unused variable + for folder in dirs: + path = os.path.join(root, folder) + yield path + break + + @property + def video_files(self): + return getattr(self, '_video_files', []) + + @video_files.setter + def video_files(self, value): + setattr(self, '_video_files', value) + + @property + def output(self): + return '\n'.join(self._output) + + def _log(self, message, level=logger.INFO): + logger.log(message, level) + self._output.append(message) + + def process(self, resource_name=None, force=False, is_priority=None, delete_on=False, failed=False, + proc_type='auto', ignore_subs=False): + """ + Scan through the files in the root directory and process whatever media files are found. + + :param resource_name: The resource that will be processed directly + :param force: True to postprocess already postprocessed files + :param is_priority: Boolean for whether or not is a priority download + :param delete_on: Boolean for whether or not it should delete files + :param failed: Boolean for whether or not the download failed + :param proc_type: Type of postprocessing auto or manual + :param ignore_subs: True to ignore setting 'postpone if no subs' + """ + if not self.directory: + return self.output + + if resource_name: + self.resource_name = resource_name + + if app.POSTPONE_IF_NO_SUBS: + self._log("Feature 'postpone post-processing if no subtitle available' is enabled.") + + for path in self.paths: + + if not self.should_process(path, failed): + continue - try: - logger.log(u"Deleting folder (if it's empty): %s" % folder) - os.rmdir(folder) - except (OSError, IOError) as e: - logger.log(u"Warning: unable to delete folder: %s: %s" % (folder, ex(e)), logger.WARNING) - return False - else: - try: - logger.log(u"Deleting folder: " + folder) - shutil.rmtree(folder) - except (OSError, IOError) as e: - logger.log(u"Warning: unable to delete folder: %s: %s" % (folder, ex(e)), logger.WARNING) - return False + self.result = True - return True + for dir_path, filelist in self._get_files(path): + sync_files = (filename + for filename in filelist + if is_sync_file(filename)) + # Don't process files if they are still being synced + postpone = app.POSTPONE_IF_SYNC_FILES and any(sync_files) -def delete_files(processPath, notwantedFiles, result, force=False): - """ - Remove files from filesystem. + if not postpone: - :param processPath: path to process - :param notwantedFiles: files we do not want - :param result: Processor results - :param force: Boolean, force deletion, defaults to false - """ - if not result.result and force: - result.output += logHelper(u"Forcing deletion of files, even though last result was not successful", logger.DEBUG) - elif not result.result: - return + self._log('Processing folder: {0}'.format(dir_path), logger.DEBUG) - # Delete all file not needed - for cur_file in notwantedFiles: + self.prepare_files(dir_path, filelist, force) - cur_file_path = os.path.join(processPath, cur_file) + self.process_files(dir_path, force=force, is_priority=is_priority, + ignore_subs=ignore_subs) - if not os.path.isfile(cur_file_path): - continue # Prevent error when a notwantedfiles is an associated files + # Always delete files if they are being moved or if it's explicitly wanted + if not self.process_method == 'move' or (proc_type == 'manual' and not delete_on): + continue - result.output += logHelper(u"Deleting file: %s" % cur_file, logger.DEBUG) + self.delete_folder(os.path.join(dir_path, '@eaDir')) + if self.unwanted_files: + self.delete_files(dir_path, self.unwanted_files) - # check first the read-only attribute - file_attribute = os.stat(cur_file_path)[0] - if not file_attribute & stat.S_IWRITE: - # File is read-only, so make it writeable - result.output += logHelper(u"Changing ReadOnly Flag for file: %s" % cur_file, logger.DEBUG) - try: - os.chmod(cur_file_path, stat.S_IWRITE) - except OSError as e: - result.output += logHelper(u"Cannot change permissions of %s: %s" % - (cur_file_path, ex(e)), logger.DEBUG) - try: - os.remove(cur_file_path) - except OSError as e: - result.output += logHelper(u"Unable to delete file %s: %s" % (cur_file, e.strerror), logger.DEBUG) + if all([not app.NO_DELETE or proc_type == 'manual', self.process_method == 'move', + os.path.normpath(dir_path) != os.path.normpath(app.TV_DOWNLOAD_DIR)]): + if self.delete_folder(dir_path, check_empty=True): + self._log('Deleted folder: {0}'.format(dir_path), logger.DEBUG) -def logHelper(logMessage, logLevel=logger.INFO): - logger.log(logMessage, logLevel) - return logMessage + u"\n" + else: + self._log('Found temporary sync files in folder: {0}'.format(dir_path)) + self._log('Skipping post processing for folder: {0}'.format(dir_path)) + self.missedfiles.append('{0}: Sync files found'.format(dir_path)) + if self.succeeded: + self._log('Successfully processed.') -#def OneRunPP(): -# isRunning = [False] -# -# def decorate(func): -# @wraps(func) -# def func_wrapper(*args, **kargs): -# if isRunning[0]: -# return logHelper(u'Post processor is already running', logger.WARNING) - -# isRunning[0] = True -# ret = func(*args, **kargs) -# isRunning[0] = False -# return ret -# return func_wrapper -# return decorate - - -# pylint: disable=too-many-arguments,too-many-branches,too-many-statements,too-many-locals -#@OneRunPP() -def processDir(dirName, nzbName=None, process_method=None, force=False, is_priority=None, - delete_on=False, failed=False, proc_type="auto", ignore_subs=False): - """ - Scan through the files in dirName and process whatever media files are found. - - :param dirName: The folder name to look in - :param nzbName: The NZB name which resulted in this folder being downloaded - :param process_method: Process methodo: hardlink, move, softlink, etc. - :param force: True to postprocess already postprocessed files - :param is_priority: Boolean for whether or not is a priority download - :param delete_on: Boolean for whether or not it should delete files - :param failed: Boolean for whether or not the download failed - :param proc_type: Type of postprocessing auto or manual - :param ignore_subs: True to ignore setting 'postpone if no subs' - """ - - result = ProcessResult() - - # if they passed us a real dir then assume it's the one we want - if os.path.isdir(dirName): - dirName = os.path.realpath(dirName) - result.output += logHelper(u"Processing folder %s" % dirName, logger.DEBUG) - - # if the client and the application are not on the same machine translate the directory into a network directory - elif all([app.TV_DOWNLOAD_DIR, - os.path.isdir(app.TV_DOWNLOAD_DIR), - os.path.normpath(dirName) == os.path.normpath(app.TV_DOWNLOAD_DIR)]): - dirName = os.path.join(app.TV_DOWNLOAD_DIR, os.path.abspath(dirName).split(os.path.sep)[-1]) - result.output += logHelper(u"Trying to use folder: %s " % dirName, logger.DEBUG) - - # if we didn't find a real dir then quit - if not os.path.isdir(dirName): - result.output += logHelper(u"Unable to figure out what folder to process. " - u"If your downloader and Medusa aren't on the same PC " - u"make sure you fill out your TV download dir in the config.", - logger.DEBUG) - return result.output - - path, dirs, files = get_path_dir_files(dirName, nzbName, proc_type) - - files = [x for x in files if not is_torrent_or_nzb_file(x)] - SyncFiles = [x for x in files if is_sync_file(x)] - nzbNameOriginal = nzbName - - # Don't post process if files are still being synced and option is activated - postpone = SyncFiles and app.POSTPONE_IF_SYNC_FILES - - # Warn user if 'postpone if no subs' is enabled. Will debug possible user issues with PP - if app.POSTPONE_IF_NO_SUBS: - result.output += logHelper(u"Feature 'postpone post-processing if no subtitle available' is enabled", logger.INFO) - - if not postpone: - result.output += logHelper(u"PostProcessing Path: %s" % path, logger.INFO) - result.output += logHelper(u"PostProcessing Dirs: %s" % str(dirs), logger.DEBUG) - - videoFiles = [x for x in files if helpers.is_media_file(x)] - rarFiles = [x for x in files if helpers.is_rar_file(x)] - rarContent = "" - if rarFiles: - rarContent = unRAR(path, rarFiles, force, result) - files += rarContent - videoFiles += [x for x in rarContent if helpers.is_media_file(x)] - videoInRar = [x for x in rarContent if helpers.is_media_file(x)] if rarContent else '' - - result.output += logHelper(u"PostProcessing Files: %s" % files, logger.DEBUG) - result.output += logHelper(u"PostProcessing VideoFiles: %s" % videoFiles, logger.DEBUG) - result.output += logHelper(u"PostProcessing RarContent: %s" % rarContent, logger.DEBUG) - result.output += logHelper(u"PostProcessing VideoInRar: %s" % videoInRar, logger.DEBUG) - - # If nzbName is set and there's more than one videofile in the folder, files will be lost (overwritten). - nzbName = None if len(videoFiles) >= 2 else nzbName - - process_method = process_method if process_method else app.PROCESS_METHOD - result.result = True + # Clean Kodi library + if app.KODI_LIBRARY_CLEAN_PENDING and notifiers.kodi_notifier.clean_library(): + app.KODI_LIBRARY_CLEAN_PENDING = False - # Don't Link media when the media is extracted from a rar in the same path - if process_method in (u'hardlink', u'symlink') and videoInRar: - process_media(path, videoInRar, nzbName, u'move', force, is_priority, ignore_subs, result) - # As is a hardlink/symlink we can't keep the extracted file in the folder - # Otherwise when torrent+data gets removed the folder won't be deleted because of hanging files - # That's why we don't check for app.DELRARCONTENTS here. - delete_files(path, rarContent, result) - for video in set(videoFiles) - set(videoInRar): - process_media(path, [video], nzbName, process_method, force, is_priority, ignore_subs, result) - elif app.DELRARCONTENTS and videoInRar: - process_media(path, videoInRar, nzbName, process_method, force, is_priority, ignore_subs, result) - delete_files(path, rarContent, result, True) - for video in set(videoFiles) - set(videoInRar): - process_media(path, [video], nzbName, process_method, force, is_priority, ignore_subs, result) + if self.missedfiles: + self._log('I did encounter some unprocessable items: ') + for missedfile in self.missedfiles: + self._log('{0}'.format(missedfile)) else: - for video in videoFiles: - process_media(path, [video], nzbName, process_method, force, is_priority, ignore_subs, result) + self._log('Problem(s) during processing, failed for the following files/folders: ', logger.WARNING) + for missedfile in self.missedfiles: + self._log('{0}'.format(missedfile), logger.WARNING) + + if app.USE_TORRENTS and app.PROCESS_METHOD in ('hardlink', 'symlink') and app.TORRENT_SEED_LOCATION: + to_remove_hashes = app.RECENTLY_POSTPROCESSED.items() + for info_hash, release_names in to_remove_hashes: + if self.move_torrent_seeding_folder(info_hash, release_names): + app.RECENTLY_POSTPROCESSED.pop(info_hash) + + return self.output + + def should_process(self, path, failed=False): + """ + Determine if a directory should be processed. + + :param path: Path we want to verify + :param failed: (optional) Mark the directory as failed + :return: True if the directory is valid for processing, otherwise False + :rtype: Boolean + """ + folder = os.path.basename(path) + if folder in self.IGNORED_FOLDERS: + return False - else: - result.output += logHelper(u"Found temporary sync files: %s in path: %s" % (SyncFiles, path)) - result.output += logHelper(u"Skipping post processing for folder: %s" % path) - result.missedfiles.append(u"%s : Syncfiles found" % path) + if folder.startswith('_FAILED_'): + self._log('The directory name indicates it failed to extract.', logger.DEBUG) + failed = True + elif folder.startswith('_UNDERSIZED_'): + self._log('The directory name indicates that it was previously rejected for being undersized.', + logger.DEBUG) + failed = True + elif folder.upper().startswith('_UNPACK'): + self._log('The directory name indicates that this release is in the process of being unpacked.', + logger.DEBUG) + self.missedfiles.append('{0}: Being unpacked'.format(folder)) + return False - # Process Video File in all TV Subdir - for curDir in [x for x in dirs if validateDir(path, x, nzbNameOriginal, failed, result)]: - result.result = True + if failed: + self.process_failed(path) + self.missedfiles.append('{0}: Failed download'.format(folder)) + return False - for processPath, _, fileList in os.walk(os.path.join(path, curDir), topdown=False): + if helpers.is_hidden_folder(path): + self._log('Ignoring hidden folder: {0}'.format(folder), logger.DEBUG) + self.missedfiles.append('{0}: Hidden folder'.format(folder)) + return False - if not validateDir(path, processPath, nzbNameOriginal, failed, result): - continue + for root, dirs, files in os.walk(path): + for each_file in files: + if helpers.is_media_file(each_file) or helpers.is_rar_file(each_file): + return True + del root # unused variable + del dirs # unused variable - SyncFiles = [x for x in fileList if is_sync_file(x)] - - # Don't post process if files are still being synced and option is activated - postpone = SyncFiles and app.POSTPONE_IF_SYNC_FILES - - if not postpone: - videoFiles = [x for x in fileList if helpers.is_media_file(x)] - rarFiles = [x for x in fileList if helpers.is_rar_file(x)] - rarContent = "" - if rarFiles: - rarContent = unRAR(processPath, rarFiles, force, result) - fileList = set(fileList + rarContent) - videoFiles += [x for x in rarContent if helpers.is_media_file(x)] - - videoInRar = [x for x in rarContent if helpers.is_media_file(x)] if rarContent else '' - - # Don't Link media when the media is extracted from a rar in the same path - if process_method in (u'hardlink', u'symlink') and videoInRar: - process_media(processPath, videoInRar, nzbName, u'move', force, is_priority, ignore_subs, result) - process_media(processPath, set(videoFiles) - set(videoInRar), nzbName, process_method, force, - is_priority, ignore_subs, result) - # As is a hardlink/symlink we can't keep the extracted file in the folder - # Otherwise when torrent+data gets removed the folder won't be deleted because of hanging files - # That's why we don't check for app.DELRARCONTENTS here. - delete_files(processPath, rarContent, result) - elif app.DELRARCONTENTS and videoInRar: - process_media(processPath, videoInRar, nzbName, process_method, force, is_priority, ignore_subs, result) - process_media(processPath, set(videoFiles) - set(videoInRar), nzbName, process_method, force, - is_priority, ignore_subs, result) - delete_files(processPath, rarContent, result, True) - else: - process_media(processPath, videoFiles, nzbName, process_method, force, is_priority, ignore_subs, result) - - # Delete all file not needed and avoid deleting files if Manual PostProcessing - if not(process_method == u"move" and result.result) or (proc_type == u"manual" and not delete_on): - continue + self._log('No processable items found in folder: {0}'.format(path), logger.DEBUG) + return False - delete_folder(os.path.join(processPath, u'@eaDir')) - allowed_extensions = app.ALLOWED_EXTENSIONS.split(',') - notwantedFiles = [x for x in fileList - if x not in videoFiles and helpers.get_extension(x) not in allowed_extensions] - if notwantedFiles: - result.output += logHelper(u"Found unwanted files: %s" % notwantedFiles, logger.DEBUG) - delete_files(processPath, notwantedFiles, result) + def _get_files(self, path): + """Return the path to a folder and its contents as a tuple.""" + # If resource_name is a file and not an NZB, process it directly + if self.resource_name and (not self.resource_name.endswith('.nzb') and + os.path.isfile(os.path.join(path, self.resource_name))): + yield path, [self.resource_name] + else: + topdown = True if self.directory == path else False + for root, dirs, files in os.walk(path, topdown=topdown): + if files: + yield root, files + if topdown: + break + del dirs # unused variable + + def prepare_files(self, path, files, force=False): + """Prepare files for post-processing.""" + video_files = [] + rar_files = [] + for each_file in files: + if helpers.is_media_file(each_file): + video_files.append(each_file) + elif helpers.is_rar_file(each_file): + rar_files.append(each_file) + + rar_content = [] + video_in_rar = [] + if rar_files: + rar_content = self.unrar(path, rar_files, force) + files.extend(rar_content) + video_in_rar = [each_file for each_file in rar_content if helpers.is_media_file(each_file)] + video_files.extend(video_in_rar) + + self._log('Post-processing files: {0}'.format(files), logger.DEBUG) + self._log('Post-processing video files: {0}'.format(video_files), logger.DEBUG) + + if rar_content: + self._log('Post-processing rar content: {0}'.format(rar_content), logger.DEBUG) + self._log('Post-processing video in rar: {0}'.format(video_in_rar), logger.DEBUG) + + unwanted_files = [filename + for filename in files + if filename not in video_files and + helpers.get_extension(filename) not in + self.allowed_extensions] + if unwanted_files: + self._log('Found unwanted files: {0}'.format(unwanted_files), logger.DEBUG) + + self.video_files = video_files + self.rar_content = rar_content + self.video_in_rar = video_in_rar + self.unwanted_files = unwanted_files + + def process_files(self, path, force=False, is_priority=None, ignore_subs=False): + """Post-process and delete the files in a given path.""" + # TODO: Replace this with something that works for multiple video files + if self.resource_name and len(self.video_files) > 1: + self.resource_name = None - if all([not app.NO_DELETE or proc_type == u"manual", - process_method == u"move", - os.path.normpath(processPath) != os.path.normpath(app.TV_DOWNLOAD_DIR)]): + # Don't Link media when the media is extracted from a rar in the same path + if self.process_method in ('hardlink', 'symlink') and self.video_in_rar: + self.process_media(path, self.video_in_rar, force, is_priority, ignore_subs) - if delete_folder(processPath, check_empty=True): - result.output += logHelper(u"Deleted folder: %s" % processPath, logger.DEBUG) + self.process_media(path, set(self.video_files) - set(self.video_in_rar), force, + is_priority, ignore_subs) + if not self.postponed_no_subs: + self.delete_files(path, self.rar_content) else: - result.output += logHelper(u"Found temporary sync files: %s in path: %s" % (SyncFiles, processPath)) - result.output += logHelper(u"Skipping post processing for folder: %s" % processPath) - result.missedfiles.append(u"%s : Syncfiles found" % path) - - if result.aggresult: - result.output += logHelper(u"Successfully processed") - - # Clean library from KODI after PP ended - if app.KODI_LIBRARY_CLEAN_PENDING and notifiers.kodi_notifier.clean_library(): - app.KODI_LIBRARY_CLEAN_PENDING = False - - if result.missedfiles: - result.output += logHelper(u"I did encounter some unprocessable items: ") - for missedfile in result.missedfiles: - result.output += logHelper(u"[%s]" % missedfile) - else: - result.output += logHelper(u"Problem(s) during processing, failed the following files/folders: ", logger.WARNING) - for missedfile in result.missedfiles: - result.output += logHelper(u"[%s]" % missedfile, logger.WARNING) - - return result.output - - -def validateDir(path, dirName, nzbNameOriginal, failed, result): - """ - Check if directory is valid for processing. - - :param path: Path to use - :param dirName: Directory to check - :param nzbNameOriginal: Original NZB name - :param failed: Previously failed objects - :param result: Previous results - :return: True if dir is valid for processing, False if not - """ - dirName = ss(dirName) - - IGNORED_FOLDERS = [u'.AppleDouble', u'.@__thumb', u'@eaDir'] - folder_name = os.path.basename(dirName) - if folder_name in IGNORED_FOLDERS: - return False - - result.output += logHelper(u"Processing folder " + dirName, logger.DEBUG) - - if folder_name.startswith(u'_FAILED_'): - result.output += logHelper(u"The directory name indicates it failed to extract.", logger.DEBUG) - failed = True - elif folder_name.startswith(u'_UNDERSIZED_'): - result.output += logHelper(u"The directory name indicates that it was previously rejected for being undersized.", logger.DEBUG) - failed = True - elif folder_name.upper().startswith(u'_UNPACK'): - result.output += logHelper(u"The directory name indicates that this release is in the process of being unpacked.", logger.DEBUG) - result.missedfiles.append(u"%s : Being unpacked" % dirName) - return False + self.postponed_no_subs = False - if failed: - process_failed(os.path.join(path, dirName), nzbNameOriginal, result) - result.missedfiles.append(u"%s : Failed download" % dirName) - return False - - if helpers.is_hidden_folder(os.path.join(path, dirName)): - result.output += logHelper(u"Ignoring hidden folder: %s" % dirName, logger.DEBUG) - result.missedfiles.append(u"%s : Hidden folder" % dirName) - return False + elif app.DELRARCONTENTS and self.video_in_rar: + self.process_media(path, self.video_in_rar, force, is_priority, ignore_subs) - # make sure the dir isn't inside a show dir - main_db_con = db.DBConnection() - sql_results = main_db_con.select("SELECT location FROM tv_shows") + self.process_media(path, set(self.video_files) - set(self.video_in_rar), + force, is_priority, ignore_subs) - for sqlShow in sql_results: - if dirName.lower().startswith(os.path.realpath(sqlShow["location"]).lower() + os.sep) or \ - dirName.lower() == os.path.realpath(sqlShow["location"]).lower(): + if not self.postponed_no_subs: + self.delete_files(path, self.rar_content, force=True) + else: + self.postponed_no_subs = False - result.output += logHelper( - u"Cannot process an episode that's already been moved to its show dir, skipping " + dirName, - logger.WARNING) + else: + self.process_media(path, self.video_files, force, is_priority, ignore_subs) + self.postponed_no_subs = False + + @staticmethod + def delete_folder(folder, check_empty=True): + """ + Remove a folder from the filesystem. + + :param folder: Path to folder to remove + :param check_empty: Boolean, check if the folder is empty before removing it, defaults to True + :return: True on success, False on failure + """ + # check if it's a folder + if not os.path.isdir(folder): return False - # Get the videofile list for the next checks - allFiles = [] - allDirs = [] - for _, processdir, fileList in os.walk(os.path.join(path, dirName), topdown=False): - allDirs += processdir - allFiles += fileList - - videoFiles = [x for x in allFiles if helpers.is_media_file(x)] - allDirs.append(dirName) - - # check if the dir have at least one tv video file - for video in videoFiles: - try: - NameParser().parse(video, cache_result=False) - return True - except (InvalidNameException, InvalidShowException) as error: - logger.log(u'{0}'.format(error), logger.DEBUG) - - for proc_dir in allDirs: - try: - NameParser().parse(proc_dir, cache_result=False) - return True - except (InvalidNameException, InvalidShowException) as error: - logger.log(u'{0}'.format(error), logger.DEBUG) + # check if it isn't TV_DOWNLOAD_DIR + if app.TV_DOWNLOAD_DIR: + if helpers.real_path(folder) == helpers.real_path(app.TV_DOWNLOAD_DIR): + return False - if app.UNPACK: - # Search for packed release - packedFiles = [x for x in allFiles if helpers.is_rar_file(x)] + # check if it's empty folder when wanted checked + if check_empty: + check_files = os.listdir(folder) + if check_files: + logger.log('Not deleting folder {0} found the following files: {1}'.format + (folder, check_files), logger.INFO) + return False - for packed in packedFiles: try: - NameParser().parse(packed, cache_result=False) - return True - except (InvalidNameException, InvalidShowException) as error: - logger.log(u'{0}'.format(error), logger.DEBUG) - - result.output += logHelper(u"%s : No processable items found in the folder" % dirName, logger.DEBUG) - return False - - -def unRAR(path, rarFiles, force, result): - """ - Extract RAR files. - - :param path: Path to look for files in - :param rarFiles: Names of RAR files - :param force: process currently processing items - :param result: Previous results - :return: List of unpacked file names - """ - unpacked_files = [] - - if app.UNPACK and rarFiles: - - result.output += logHelper(u"Packed releases detected: %s" % rarFiles, logger.DEBUG) - - for archive in rarFiles: + logger.log("Deleting folder (if it's empty): {0}".format(folder)) + os.rmdir(folder) + except (OSError, IOError) as error: + logger.log('Unable to delete folder: {0}: {1}'.format(folder, ex(error)), logger.WARNING) + return False + else: + try: + logger.log('Deleting folder: {0}'.format(folder)) + shutil.rmtree(folder) + except (OSError, IOError) as error: + logger.log('Unable to delete folder: {0}: {1}'.format(folder, ex(error)), logger.WARNING) + return False - result.output += logHelper(u"Unpacking archive: %s" % archive, logger.DEBUG) + return True - failure = None + def delete_files(self, path, files, force=False): + """ + Remove files from filesystem. + + :param path: path to process + :param files: files we want to delete + :param result: Processor results + :param force: Boolean, force deletion, defaults to false + """ + if not files: + return + + if not self.result and force: + self._log('Forcing deletion of files, even though last result was not successful.', logger.DEBUG) + elif not self.result: + return + + # Delete all file not needed + for cur_file in files: + + cur_file_path = os.path.join(path, cur_file) + + if not os.path.isfile(cur_file_path): + continue # Prevent error when a notwantedfiles is an associated files + + self._log('Deleting file: {0}'.format(cur_file), logger.DEBUG) + + # check first the read-only attribute + file_attribute = os.stat(cur_file_path)[0] + if not file_attribute & stat.S_IWRITE: + # File is read-only, so make it writeable + self._log('Changing read-only flag for file: {0}'.format(cur_file), logger.DEBUG) + try: + os.chmod(cur_file_path, stat.S_IWRITE) + except OSError as error: + self._log('Cannot change permissions of {0}: {1}'.format(cur_file_path, ex(error)), logger.DEBUG) try: - rar_handle = RarFile(os.path.join(path, archive)) - - # Skip extraction if any file in archive has previously been extracted - skip_file = False - for file_in_archive in [os.path.basename(x.filename) for x in rar_handle.infolist() if not x.isdir]: - if already_postprocessed(path, file_in_archive, force, result): - result.output += logHelper(u"Archive file already post-processed, extraction skipped: %s" % - file_in_archive, logger.DEBUG) - skip_file = True - break - if app.POSTPONE_IF_NO_SUBS and os.path.isfile(os.path.join(path, file_in_archive)): - result.output += logHelper(u"Archive file already extracted, extraction skipped: %s" % - file_in_archive, logger.DEBUG) - - skip_file = True - # We need to return the media file inside the .RAR so we can move - # when method is hardlink/symlink - unpacked_files.append(file_in_archive) - break - if skip_file: - continue + os.remove(cur_file_path) + except OSError as error: + self._log('Unable to delete file {0}: {1}'.format(cur_file, ex(error)), logger.DEBUG) + + def unrar(self, path, rar_files, force=False): + """ + Extract RAR files. + + :param path: Path to look for files in + :param rarFiles: Names of RAR files + :param force: process currently processing items + :param result: Previous results + :return: List of unpacked file names + """ + unpacked_files = [] + + if app.UNPACK and rar_files: + + self._log('Packed files detected: {0}'.format(rar_files), logger.DEBUG) + + for archive in rar_files: + + self._log('Unpacking archive: {0}'.format(archive), logger.DEBUG) + + failure = None + try: + rar_handle = RarFile(os.path.join(path, archive)) + + # Skip extraction if any file in archive has previously been extracted + skip_file = False + for file_in_archive in [os.path.basename(each.filename) + for each in rar_handle.infolist() + if not each.isdir]: + if not force and self.already_postprocessed(file_in_archive): + self._log('Archive file already post-processed, extraction skipped: {0}'.format + (file_in_archive), logger.DEBUG) + skip_file = True + break + + if app.POSTPONE_IF_NO_SUBS and os.path.isfile(os.path.join(path, file_in_archive)): + self._log('Archive file already extracted, extraction skipped: {0}'.format + (file_in_archive), logger.DEBUG) + skip_file = True + # We need to return the media file inside the rar so we can + # move it when the method is hardlink/symlink + unpacked_files.append(file_in_archive) + break + + if skip_file: + continue - rar_handle.extract(path=path, withSubpath=False, overwrite=False) - for x in rar_handle.infolist(): - if not x.isdir: - basename = os.path.basename(x.filename) - if basename not in unpacked_files: - unpacked_files.append(basename) - del rar_handle - - except ArchiveHeaderBroken: - failure = (u'Archive Header Broken', u'Unpacking failed because the Archive Header is Broken') - except IncorrectRARPassword: - failure = (u'Incorrect RAR Password', u'Unpacking failed because of an Incorrect Rar Password') - except FileOpenError: - failure = (u'File Open Error, check the parent folder and destination file permissions.', - u'Unpacking failed with a File Open Error (file permissions?)') - except InvalidRARArchiveUsage: - failure = (u'Invalid Rar Archive Usage', u'Unpacking Failed with Invalid Rar Archive Usage') - except InvalidRARArchive: - failure = (u'Invalid Rar Archive', u'Unpacking Failed with an Invalid Rar Archive Error') - except Exception as e: - failure = (ex(e), u'Unpacking failed for an unknown reason') - - if failure is not None: - result.output += logHelper(u'Failed Unrar archive {}: {}'.format(archive, failure[0]), logger.WARNING) - result.missedfiles.append(u'{} : Unpacking failed: {}'.format(archive, failure[1])) - result.result = False - continue + rar_handle.extract(path=path, withSubpath=False, overwrite=False) + for each in rar_handle.infolist(): + if not each.isdir: + basename = os.path.basename(each.filename) + if basename not in unpacked_files: + unpacked_files.append(basename) + del rar_handle + + except ArchiveHeaderBroken: + failure = ('Archive Header Broken', 'Unpacking failed because the Archive Header is Broken') + except IncorrectRARPassword: + failure = ('Incorrect RAR Password', 'Unpacking failed because of an Incorrect Rar Password') + except FileOpenError: + failure = ('File Open Error, check the parent folder and destination file permissions.', + 'Unpacking failed with a File Open Error (file permissions?)') + except InvalidRARArchiveUsage: + failure = ('Invalid Rar Archive Usage', 'Unpacking Failed with Invalid Rar Archive Usage') + except InvalidRARArchive: + failure = ('Invalid Rar Archive', 'Unpacking Failed with an Invalid Rar Archive Error') + except Exception as error: + failure = (ex(error), 'Unpacking failed for an unknown reason') + + if failure is not None: + self._log('Failed Unrar archive {0}: {1}'.format(archive, failure[0]), logger.WARNING) + self.missedfiles.append('{0}: Unpacking failed: {1}'.format(archive, failure[1])) + self.result = False + continue - result.output += logHelper(u"UnRar content: %s" % unpacked_files, logger.DEBUG) + self._log('Unrar content: {0}'.format(unpacked_files), logger.DEBUG) - return unpacked_files + return unpacked_files + def already_postprocessed(self, video_file): + """ + Check if we already post processed a file. -def already_postprocessed(dir_name, video_file, force, result): - """ - Check if we already post processed a file. + :param video_file: File name + :param result: True if file is already postprocessed + :return: + """ + main_db_con = db.DBConnection() + history_result = main_db_con.select( + 'SELECT * FROM history ' + "WHERE action LIKE '%04' " + 'AND resource LIKE ?', + ['%' + video_file]) - :param dir_name: Directory a file resides in - :param video_file: File name - :param force: Force checking when already checking (currently unused) - :param result: True if file is already postprocessed, False if not - :return: - """ - if force: - return False + if history_result: + self._log("You're trying to post-process a file that has already " + "been processed, skipping: {0}".format(video_file), logger.DEBUG) + return True - main_db_con = db.DBConnection() - history_result = main_db_con.select( - 'SELECT * FROM history ' - "WHERE action LIKE '%04' " - 'AND resource LIKE ?', - ['%' + video_file]) + def process_media(self, path, video_files, force=False, is_priority=None, ignore_subs=False): + """ + Postprocess media files. + + :param processPath: Path to postprocess in + :param videoFiles: Filenames to look for and postprocess + :param force: Postprocess currently postprocessing file + :param is_priority: Boolean, is this a priority download + :param result: Previous results + :param ignore_subs: True to ignore setting 'postpone if no subs' + """ + processor = None + for video_file in video_files: + file_path = os.path.join(path, video_file) - if history_result: - result.output += logHelper(u"You're trying to post-process a file that has already " - u"been processed, skipping: {0}".format(video_file), logger.DEBUG) - return True + if not force and self.already_postprocessed(video_file): + self._log('Skipping already processed file: {0}'.format(video_file), logger.DEBUG) + self._log('Skipping already processed directory: {0}'.format(path), logger.DEBUG) + continue - return False - - -def process_media(processPath, videoFiles, nzbName, process_method, force, is_priority, ignore_subs, result): - """ - Postprocess mediafiles. - - :param processPath: Path to postprocess in - :param videoFiles: Filenames to look for and postprocess - :param nzbName: Name of NZB file related - :param process_method: auto/manual - :param force: Postprocess currently postprocessing file - :param is_priority: Boolean, is this a priority download - :param result: Previous results - :param ignore_subs: True to ignore setting 'postpone if no subs' - """ - processor = None - for cur_video_file in videoFiles: - cur_video_file_path = os.path.join(processPath, cur_video_file) - - if already_postprocessed(processPath, cur_video_file, force, result): - result.output += logHelper(u"Skipping already processed file: %s" % cur_video_file, logger.DEBUG) - result.output += logHelper(u"Skipping already processed dir: %s" % processPath, logger.DEBUG) - continue - - try: - processor = post_processor.PostProcessor(cur_video_file_path, nzbName, process_method, is_priority) - - # This feature prevents PP for files that do not have subtitle associated with the video file - if app.POSTPONE_IF_NO_SUBS: - if not ignore_subs: - if subtitles_enabled(cur_video_file_path, nzbName): - embedded_subs = set() if app.IGNORE_EMBEDDED_SUBS else get_embedded_subtitles(cur_video_file_path) - - # If user don't want to ignore embedded subtitles and video has at least one, don't post pone PP - if accept_unknown(embedded_subs): - result.output += logHelper(u"Found embedded unknown subtitles and we don't want to ignore them. " - u"Continuing the post-process of this file: %s" % cur_video_file) - elif accept_any(embedded_subs): - result.output += logHelper(u"Found wanted embedded subtitles. " - u"Continuing the post-process of this file: %s" % cur_video_file) - else: - associated_files = processor.list_associated_files(cur_video_file_path, subtitles_only=True) - if not [f for f in associated_files if helpers.get_extension(f) in subtitle_extensions]: - result.output += logHelper(u"No subtitles associated. Postponing the post-process of this file:" - u" %s" % cur_video_file, logger.DEBUG) - continue + try: + processor = post_processor.PostProcessor(file_path, self.resource_name, + self.process_method, is_priority) + + if app.POSTPONE_IF_NO_SUBS: + if not ignore_subs: + if self.subtitles_enabled(file_path, self.resource_name): + embedded_subs = set() if app.IGNORE_EMBEDDED_SUBS else get_embedded_subtitles(file_path) + + # We want to ignore embedded subtitles and video has at least one + if accept_unknown(embedded_subs): + self._log("Found embedded unknown subtitles and we don't want to ignore them. " + "Continuing the post-processing of this file: {0}".format(video_file)) + elif accept_any(embedded_subs): + self._log('Found wanted embedded subtitles. ' + 'Continuing the post-processing of this file: {0}'.format(video_file)) else: - result.output += logHelper(u"Found subtitles associated. " - u"Continuing the post-process of this file: %s" % cur_video_file) + associated_files = processor.list_associated_files(file_path, subtitles_only=True) + if not [filename + for filename in associated_files + if helpers.get_extension(filename) + in subtitle_extensions]: + self._log('No subtitles associated. Postponing the post-process of this file: ' + '{0}'.format(video_file), logger.DEBUG) + self.postponed_no_subs = True + continue + else: + self._log('Found subtitles associated. ' + 'Continuing the post-process of this file: {0}'.format(video_file)) + else: + self._log('Subtitles disabled for this show. ' + 'Continuing the post-process of this file: {0}'.format(video_file)) else: - result.output += logHelper(u"Subtitles disabled for this show. " - u"Continuing the post-process of this file: %s" % cur_video_file) - else: - result.output += logHelper(u"Subtitles check was disabled for this episode in Manual PP. " - u"Continuing the post-process of this file: %s" % cur_video_file) + self._log('Subtitles check was disabled for this episode in manual post-processing. ' + 'Continuing the post-process of this file: {0}'.format(video_file)) - result.result = processor.process() - process_fail_message = u"" - except EpisodePostProcessingFailedException as e: - result.result = False - process_fail_message = ex(e) + self.result = processor.process() + process_fail_message = '' + except EpisodePostProcessingFailedException as error: + self.result = False + process_fail_message = ex(error) - if processor: - result.output += processor.log + if processor: + self._output.append(processor.log) - if result.result: - result.output += logHelper(u"Processing succeeded for %s" % cur_video_file_path) - else: - result.output += logHelper(u"Processing failed for %s: %s" % (cur_video_file_path, process_fail_message), logger.WARNING) - result.missedfiles.append(u"%s : Processing failed: %s" % (cur_video_file_path, process_fail_message)) - result.aggresult = False - - -def get_path_dir_files(dirName, nzbName, proc_type): - """ - Get files in a path - - :param dirName: Directory to start in - :param nzbName: NZB file, if present - :param proc_type: auto/manual - :return: a tuple of (path,dirs,files) - """ - path = u"" - dirs = [] - files = [] - - if dirName == app.TV_DOWNLOAD_DIR and not nzbName or proc_type == u"manual": # Scheduled Post Processing Active - # Get at first all the subdir in the dirName - for path, dirs, files in os.walk(dirName): - break - else: - path, dirs = os.path.split(dirName) # Script Post Processing - if not (nzbName is None or nzbName.endswith(u'.nzb')) and os.path.isfile(os.path.join(dirName, nzbName)): # For single torrent file without Dir - dirs = [] - files = [os.path.join(dirName, nzbName)] - else: - dirs = [dirs] - files = [] - - return path, dirs, files - - -def process_failed(dirName, nzbName, result): - """Process a download that did not complete correctly.""" - if app.USE_FAILED_DOWNLOADS: - processor = None + if self.result: + self._log('Processing succeeded for {0}'.format(file_path)) + else: + self._log('Processing failed for {0}: {1}'.format(file_path, process_fail_message), logger.WARNING) + self.missedfiles.append('{0}: Processing failed: {1}'.format(file_path, process_fail_message)) + self.succeeded = False - try: - processor = failed_processor.FailedProcessor(dirName, nzbName) - result.result = processor.process() - process_fail_message = u"" - except FailedPostProcessingFailedException as e: - result.result = False - process_fail_message = ex(e) + def process_failed(self, path): + """Process a download that did not complete correctly.""" + if app.USE_FAILED_DOWNLOADS: + processor = None - if processor: - result.output += processor.log + try: + processor = failed_processor.FailedProcessor(path, self.resource_name) + self.result = processor.process() + process_fail_message = '' + except FailedPostProcessingFailedException as error: + self.result = False + process_fail_message = ex(error) + + if processor: + self._output.append(processor.log) + + if app.DELETE_FAILED and self.result: + if self.delete_folder(path, check_empty=False): + self._log('Deleted folder: {0}'.format(path), logger.DEBUG) + + if self.result: + self._log('Failed Download Processing succeeded: {0}, {1}'.format + (self.resource_name, path)) + else: + self._log('Failed Download Processing failed: {0}, {1}: {2}'.format + (self.resource_name, path, process_fail_message), logger.WARNING) + + @staticmethod + def subtitles_enabled(*args): + """Try to parse names to a show and check whether the show has subtitles enabled. + + :param args: + :return: + :rtype: bool + """ + for name in args: + if not name: + continue - if app.DELETE_FAILED and result.result: - if delete_folder(dirName, check_empty=False): - result.output += logHelper(u"Deleted folder: %s" % dirName, logger.DEBUG) + try: + parse_result = NameParser().parse(name, cache_result=True) + if parse_result.show.indexerid: + main_db_con = db.DBConnection() + sql_results = main_db_con.select("SELECT subtitles FROM tv_shows WHERE indexer_id = ? LIMIT 1", + [parse_result.show.indexerid]) + return bool(sql_results[0][b'subtitles']) if sql_results else False + + logger.log('Empty indexer ID for: {name}'.format(name=name), logger.WARNING) + except (InvalidNameException, InvalidShowException): + logger.log('Not enough information to parse filename into a valid show. Consider adding scene ' + 'exceptions or improve naming for: {name}'.format(name=name), logger.WARNING) + return False - if result.result: - result.output += logHelper(u"Failed Download Processing succeeded: (%s, %s)" % (nzbName, dirName)) + @staticmethod + def move_torrent_seeding_folder(info_hash, release_names): + """Move torrent to a given seeding folder after PP.""" + if not os.path.isdir(app.TORRENT_SEED_LOCATION): + logger.log('Not possible to move torrent after Post-Processor because seed location is invalid', + logger.WARNING) + return False else: - result.output += logHelper(u"Failed Download Processing failed: (%s, %s): %s" % - (nzbName, dirName, process_fail_message), logger.WARNING) - - -def subtitles_enabled(*args): - """Try to parse names to a show and check whether the show has subtitles enabled. - - :param args: - :return: - :rtype: bool - """ - for name in args: - if not name: - continue - - try: - parse_result = NameParser().parse(name, cache_result=True) - if parse_result.show.indexerid: - main_db_con = db.DBConnection() - sql_results = main_db_con.select("SELECT subtitles FROM tv_shows WHERE indexer_id = ? LIMIT 1", - [parse_result.show.indexerid]) - return bool(sql_results[0]["subtitles"]) if sql_results else False - - logger.log(u'Empty indexer ID for: {name}'.format(name=name), logger.WARNING) - except (InvalidNameException, InvalidShowException): - logger.log(u'Not enough information to parse filename into a valid show. Consider adding scene exceptions ' - u'or improve naming for: {name}'.format(name=name), logger.WARNING) - return False + if release_names: + # Log 'release' or 'releases' + s = 's' if len(release_names) > 1 else '' + release_names = ', '.join(release_names) + else: + s = '' + release_names = 'N/A' + logger.log('Trying to move torrent after Post-Processor', logger.DEBUG) + torrent_moved = False + client = torrent.get_client_class(app.TORRENT_METHOD)() + try: + torrent_moved = client.move_torrent(info_hash) + except (requests.exceptions.RequestException, socket.gaierror) as e: + logger.log("Could't connect to client to move torrent for release{s} '{release}' with hash: {hash} " + "to: '{path}'. Error: {error}".format + (release=release_names, hash=info_hash, error=e.message, path=app.TORRENT_SEED_LOCATION, s=s), + logger.WARNING) + return False + except AttributeError: + logger.log("Your client doesn't support moving torrents to new location", logger.WARNING) + return True + if torrent_moved: + logger.log("Moved torrent for release{s} '{release}' with hash: {hash} to: '{path}'".format + (release=release_names, hash=info_hash, path=app.TORRENT_SEED_LOCATION, s=s), + logger.WARNING) + return True + else: + logger.log("Could not move torrent for release{s} '{release}' with hash: {hash} to: '{path}'. " + "Please check logs.".format(release=release_names, hash=info_hash, s=s, + path=app.TORRENT_SEED_LOCATION), logger.WARNING) + return False diff --git a/medusa/providers/__init__.py b/medusa/providers/__init__.py index fd3838706e..2b32df8c31 100644 --- a/medusa/providers/__init__.py +++ b/medusa/providers/__init__.py @@ -24,7 +24,7 @@ anizb, binsearch, ) from .torrent import ( - abnormal, alpharatio, animebytes, animetorrents, bitcannon, bithdtv, bitsnoop, btn, cpasbien, danishbits, + abnormal, alpharatio, animebytes, animetorrents, bitcannon, bithdtv, btn, cpasbien, danishbits, elitetorrent, extratorrent, freshontv, gftracker, hd4free, hdbits, hdspace, hdtorrents, hounddawgs, iptorrents, limetorrents, morethantv, newpct, norbits, nyaatorrents, pretome, rarbg, scc, scenetime, sdbits, shazbat, speedcd, t411, thepiratebay, tntvillage, tokyotoshokan, torrentbytes, torrentday, torrentleech, torrentproject, @@ -37,7 +37,7 @@ 'speedcd', 'nyaatorrents', 'torrentbytes', 'freshontv', 'cpasbien', 'morethantv', 't411', 'tokyotoshokan', 'alpharatio', 'sdbits', 'shazbat', 'rarbg', 'tntvillage', 'binsearch', 'xthor', 'abnormal', 'scenetime', 'transmitthenet', 'tvchaosuk', 'torrentproject', 'extratorrent', 'bitcannon', 'torrentz2', 'pretome', 'gftracker', - 'hdspace', 'newpct', 'elitetorrent', 'bitsnoop', 'danishbits', 'hd4free', 'limetorrents', 'norbits', 'anizb', + 'hdspace', 'newpct', 'elitetorrent', 'danishbits', 'hd4free', 'limetorrents', 'norbits', 'anizb', 'bithdtv', 'zooqle', 'animebytes', 'animetorrents' ] diff --git a/medusa/providers/generic_provider.py b/medusa/providers/generic_provider.py index eb5f001598..6ffedfc2db 100644 --- a/medusa/providers/generic_provider.py +++ b/medusa/providers/generic_provider.py @@ -60,7 +60,6 @@ ) from medusa.scene_exceptions import get_scene_exceptions from medusa.show.show import Show -from medusa.show_name_helpers import allPossibleShowNames from requests.utils import add_dict_to_cookiejar @@ -500,7 +499,7 @@ def _get_episode_search_strings(self, episode, add_string=''): 'Episode': [] } - for show_name in allPossibleShowNames(episode.show, season=episode.scene_season): + for show_name in episode.show.get_all_possible_names(season=episode.scene_season): episode_string = show_name + self.search_separator episode_string_fallback = None @@ -548,7 +547,7 @@ def _get_season_search_strings(self, episode): 'Season': [] } - for show_name in allPossibleShowNames(episode.show, season=episode.season): + for show_name in episode.show.get_all_possible_names(season=episode.season): episode_string = show_name + ' ' if episode.show.air_by_date or episode.show.sports: @@ -680,10 +679,8 @@ def add_cookies_from_ui(self): return {'result': True, 'message': ''} - else: # Else is not needed, but placed it here for readability - ui.notifications.message('Failed to validate cookie for provider {provider}'.format(provider=self.name), - 'No Cookies added from ui for provider: {0}'.format(self.name)) - return {'result': False, + else: # Cookies not set. Don't need to check cookies + return {'result': True, 'message': 'No Cookies added from ui for provider: {0}'.format(self.name)} return {'result': False, diff --git a/medusa/providers/nzb/newznab.py b/medusa/providers/nzb/newznab.py index 4c6563c4aa..267f59daa3 100644 --- a/medusa/providers/nzb/newznab.py +++ b/medusa/providers/nzb/newznab.py @@ -89,7 +89,7 @@ def __init__(self, name, url, key='0', cat_ids='5030,5040', search_mode='eponly' # self.cap_movie_search = None # self.cap_audio_search = None - self.cache = tv.Cache(self, min_time=30) # only poll newznab providers every 30 minutes max + self.cache = tv.Cache(self) def search(self, search_strings, age=0, ep_obj=None): """ @@ -174,7 +174,8 @@ def search(self, search_strings, age=0, ep_obj=None): self.torznab = False if not html('item'): - logger.log('No results returned from provider', logger.INFO) + logger.log('No results returned from provider. Check chosen Newznab search categories ' + 'in provider settings and/or usenet retention', logger.DEBUG) continue for item in html('item'): diff --git a/medusa/providers/torrent/__init__.py b/medusa/providers/torrent/__init__.py index ff7e02a08a..9d3c17fa3d 100644 --- a/medusa/providers/torrent/__init__.py +++ b/medusa/providers/torrent/__init__.py @@ -54,7 +54,6 @@ ) from .xml import ( - bitsnoop, torrentz2, ) @@ -64,5 +63,5 @@ 'newpct', 'pretome', 'sdbits', 'scc', 'scenetime', 'speedcd', 'thepiratebay', 'tntvillage', 'tokyotoshokan', 'torrentbytes', 'torrentleech', 'transmitthenet', 'tvchaosuk', 'xthor', 'zooqle', 'bitcannon', 'btn', 'hd4free', 'hdbits', 'norbits', 'rarbg', 't411', 'torrentday', 'torrentproject', 'nyaatorrents', 'rsstorrent', 'shazbat', - 'bitsnoop', 'torrentz2', 'animetorrents' + 'torrentz2', 'animetorrents' ] diff --git a/medusa/providers/torrent/html/animebytes.py b/medusa/providers/torrent/html/animebytes.py index 8324ca6b43..aa9dfba402 100644 --- a/medusa/providers/torrent/html/animebytes.py +++ b/medusa/providers/torrent/html/animebytes.py @@ -28,7 +28,6 @@ from medusa.bs4_parser import BS4Parser from medusa.helper.common import convert_size from medusa.providers.torrent.torrent_provider import TorrentProvider -from medusa.show_name_helpers import allPossibleShowNames from requests.compat import urljoin from requests.utils import dict_from_cookiejar @@ -338,7 +337,7 @@ def _get_episode_search_strings(self, episode, add_string=''): 'Episode': [] } - for show_name in allPossibleShowNames(episode.show, season=episode.scene_season): + for show_name in episode.show.get_all_possible_names(season=episode.scene_season): search_string['Episode'].append(show_name.strip()) return [search_string] @@ -349,7 +348,7 @@ def _get_season_search_strings(self, episode): 'Season': [] } - for show_name in allPossibleShowNames(episode.show, season=episode.scene_season): + for show_name in episode.show.get_all_possible_names(season=episode.scene_season): search_string['Season'].append(show_name.strip()) return [search_string] diff --git a/medusa/providers/torrent/html/animetorrents.py b/medusa/providers/torrent/html/animetorrents.py index 21dbe8a5ac..35042b3368 100644 --- a/medusa/providers/torrent/html/animetorrents.py +++ b/medusa/providers/torrent/html/animetorrents.py @@ -31,7 +31,6 @@ from medusa.helper.common import convert_size from medusa.helper.exceptions import AuthException from medusa.providers.torrent.torrent_provider import TorrentProvider -from medusa.show_name_helpers import allPossibleShowNames from requests.compat import urljoin from requests.utils import dict_from_cookiejar @@ -256,7 +255,7 @@ def _get_episode_search_strings(self, episode, add_string=''): episode.scene_season ) - for show_name in allPossibleShowNames(episode.show, episode.scene_season): + for show_name in episode.show.get_all_possible_names(season=episode.scene_season): if show_name in season_scene_names: episode_season = int(episode.scene_episode) else: @@ -278,7 +277,7 @@ def _get_season_search_strings(self, episode): 'Season': [] } - for show_name in allPossibleShowNames(episode.show, season=episode.season): + for show_name in episode.show.get_all_possible_names(season=episode.season): search_string['Season'].append(show_name) return [search_string] diff --git a/medusa/providers/torrent/html/bithdtv.py b/medusa/providers/torrent/html/bithdtv.py index c70d320432..54987dd647 100644 --- a/medusa/providers/torrent/html/bithdtv.py +++ b/medusa/providers/torrent/html/bithdtv.py @@ -139,8 +139,8 @@ def parse(self, data, mode): continue try: - title = cells[2].find('a')['title'] - download_url = urljoin(self.url, cells[0].find('a')['href']) + title = cells[2].find('a')['title'] if cells[2] else None + download_url = urljoin(self.url, cells[0].find('a')['href']) if cells[0] else None if not all([title, download_url]): continue diff --git a/medusa/providers/torrent/html/limetorrents.py b/medusa/providers/torrent/html/limetorrents.py index 8dea80eba9..0e3bc83459 100644 --- a/medusa/providers/torrent/html/limetorrents.py +++ b/medusa/providers/torrent/html/limetorrents.py @@ -56,7 +56,9 @@ def __init__(self): self.urls = { 'update': urljoin(self.url, '/post/updatestats.php'), 'search': urljoin(self.url, '/search/tv/{query}/'), - 'rss': urljoin(self.url, '/browse-torrents/TV-shows/date/{page}/'), + # Original rss feed url, temporary offline. Replaced by the main Tv-show page. + # 'rss': urljoin(self.url, '/browse-torrents/TV-shows/date/{page}/'), + 'rss': urljoin(self.url, '/browse-torrents/TV-shows/'), } # Proper Strings @@ -96,7 +98,8 @@ def search(self, search_strings, age=0, ep_obj=None): search_url = self.urls['search'].format(query=search_string) else: - search_url = self.urls['rss'].format(page=1) + # search_url = self.urls['rss'].format(page=1) + search_url = self.urls['rss'] response = self.get_url(search_url, returns='response') if not response or not response.text: diff --git a/medusa/providers/torrent/html/speedcd.py b/medusa/providers/torrent/html/speedcd.py index c7ac0cb511..ae5c24dd43 100644 --- a/medusa/providers/torrent/html/speedcd.py +++ b/medusa/providers/torrent/html/speedcd.py @@ -50,7 +50,10 @@ def __init__(self): # URLs self.url = 'https://speed.cd' self.urls = { - 'login': urljoin(self.url, 'takeElogin.php'), + 'login': [urljoin(self.url, 'take_login.php'), + urljoin(self.url, 'takeElogin.php'), + urljoin(self.url, 'takelogin.php') + ], 'search': urljoin(self.url, 'browse.php'), } @@ -192,16 +195,19 @@ def login(self): 'password': self.password, } - response = self.get_url(self.urls['login'], post_data=login_params, returns='response') - if not response or not response.text: - logger.log('Unable to connect to provider', logger.WARNING) - return False + for login_url in self.urls['login']: + response = self.get_url(login_url, post_data=login_params, returns='response') + if not response or not response.text: + logger.log('Unable to connect to provider using login URL: {url}'.format(url=login_url), logger.DEBUG) + continue - if re.search('Incorrect username or Password. Please try again.', response.text): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False + if re.search('Incorrect username or Password. Please try again.', response.text): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + return True - return True + logger.log('Unable to connect to provider', logger.WARNING) + return provider = SpeedCDProvider() diff --git a/medusa/providers/torrent/json/rarbg.py b/medusa/providers/torrent/json/rarbg.py index b351000754..7a12739a8c 100644 --- a/medusa/providers/torrent/json/rarbg.py +++ b/medusa/providers/torrent/json/rarbg.py @@ -52,7 +52,7 @@ def __init__(self): } # Proper Strings - self.proper_strings = ['{{PROPER|REPACK}}'] + self.proper_strings = ['{{PROPER|REPACK|REAL|RERIP}}'] # Miscellaneous Options self.ranked = None @@ -174,7 +174,7 @@ def parse(self, data, mode): for row in torrent_rows: try: title = row.pop('title') - download_url = row.pop('download') + download_url = row.pop('download') + self._custom_trackers if not all([title, download_url]): continue diff --git a/medusa/providers/torrent/json/t411.py b/medusa/providers/torrent/json/t411.py index 995ca04eed..d7e66b498d 100644 --- a/medusa/providers/torrent/json/t411.py +++ b/medusa/providers/torrent/json/t411.py @@ -51,7 +51,7 @@ def __init__(self): self.tokenLastUpdate = None # URLs - self.url = 'https://api.t411.li' + self.url = 'https://api.t411.ai' self.urls = { 'search': urljoin(self.url, 'torrents/search/{search}'), 'rss': urljoin(self.url, 'torrents/top/today'), diff --git a/medusa/providers/torrent/json/torrentday.py b/medusa/providers/torrent/json/torrentday.py index 9abdac7d50..9af5a0498f 100644 --- a/medusa/providers/torrent/json/torrentday.py +++ b/medusa/providers/torrent/json/torrentday.py @@ -60,7 +60,18 @@ def __init__(self): self.enable_cookies = True self.cookies = '' - self.categories = {'Season': {'c14': 1}, 'Episode': {'c2': 1, 'c26': 1, 'c7': 1, 'c24': 1}, + # TV/480p - 24 + # TV/Bluray - 32 + # TV/DVD-R - 31 + # TV/DVD-Rip - 33 + # TV/Mobile - 46 + # TV/Packs - 14 + # TV/SD/x264 - 26 + # TV/x264 - 7 + # TV/x265 - 34 + # TV/XviD - 2 + + self.categories = {'Season': {'c14': 1}, 'Episode': {'c2': 1, 'c7': 1, 'c24': 1, 'c26': 1, 'c31': 1, 'c32': 1, 'c33': 1, 'c34': 1, 'c46': 1}, 'RSS': {'c2': 1, 'c26': 1, 'c7': 1, 'c24': 1, 'c14': 1}} # Torrent Stats diff --git a/medusa/providers/torrent/rss/extratorrent.py b/medusa/providers/torrent/rss/extratorrent.py index a216bb16b3..a85f5674c5 100644 --- a/medusa/providers/torrent/rss/extratorrent.py +++ b/medusa/providers/torrent/rss/extratorrent.py @@ -47,7 +47,7 @@ def __init__(self): self.public = True # URLs - self.url = 'http://extratorrent.cc' + self.url = 'http://extra.to' self.custom_url = None # Proper Strings diff --git a/medusa/providers/torrent/rss/rsstorrent.py b/medusa/providers/torrent/rss/rsstorrent.py index 6443d03458..14e88f8bbe 100644 --- a/medusa/providers/torrent/rss/rsstorrent.py +++ b/medusa/providers/torrent/rss/rsstorrent.py @@ -171,7 +171,7 @@ def validate_rss(self): if not add_cookie.get('result'): return add_cookie - data = self.cache._getRSSData()['entries'] + data = self.cache._get_rss_data()['entries'] if not data: return {'result': False, 'message': 'No items found in the RSS feed {0}'.format(self.url)} @@ -226,5 +226,4 @@ class TorrentRssCache(tv.Cache): def _get_rss_data(self): """Get RSS data.""" - self.provider.add_cookies_from_ui() return self.get_rss_feed(self.provider.url) diff --git a/medusa/providers/torrent/xml/bitsnoop.py b/medusa/providers/torrent/xml/bitsnoop.py deleted file mode 100644 index 6fa77c9675..0000000000 --- a/medusa/providers/torrent/xml/bitsnoop.py +++ /dev/null @@ -1,161 +0,0 @@ -# coding=utf-8 -# Author: Gonçalo M. (aka duramato/supergonkas) -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . -"""Provider code for Bitsnoop.""" -from __future__ import unicode_literals - -import traceback - -from medusa import ( - logger, - tv, -) -from medusa.bs4_parser import BS4Parser -from medusa.helper.common import convert_size, try_int -from medusa.providers.torrent.torrent_provider import TorrentProvider - -from requests.compat import urljoin - - -class BitSnoopProvider(TorrentProvider): - """BitSnoop Torrent provider.""" - - def __init__(self): - """Initialize the class.""" - super(self.__class__, self).__init__('BitSnoop') - - # Credentials - self.public = True - - # URLs - self.url = 'https://bitsnoop.com' - self.urls = { - 'base': self.url, - 'rss': urljoin(self.url, '/new_video.html?fmt=rss'), - 'search': urljoin(self.url, '/search/video/'), - } - - # Proper Strings - self.proper_strings = ['PROPER', 'REPACK', 'REAL'] - - # Miscellaneous Options - - # Torrent Stats - self.minseed = None - self.minleech = None - - # Cache - self.cache = tv.Cache(self, min_time=20) - - def search(self, search_strings, age=0, ep_obj=None): - """ - Search a provider and parse the results. - - :param search_strings: A dict with mode (key) and the search value (value) - :param age: Not used - :param ep_obj: Not used - :returns: A list of search results (structure) - """ - results = [] - - for mode in search_strings: - logger.log('Search mode: {0}'.format(mode), logger.DEBUG) - - for search_string in search_strings[mode]: - if mode != 'RSS': - logger.log('Search string: {search}'.format - (search=search_string), logger.DEBUG) - - search_url = (self.urls['rss'], self.urls['search'] + search_string + '/s/d/1/?fmt=rss')[mode != 'RSS'] - response = self.get_url(search_url, returns='response') - if not response or not response.text: - logger.log('No data returned from provider', logger.DEBUG) - continue - elif not response.text.startswith(' - -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . - -from __future__ import unicode_literals - -import datetime - -from requests.compat import urljoin -from . import app, helpers, logger - -session = helpers.make_session() - - -def sendNZB(nzb): # pylint:disable=too-many-return-statements, too-many-branches, too-many-statements - """ - Sends an NZB to SABnzbd via the API. - - :param nzb: The NZBSearchResult object to send to SAB - """ - - category = app.SAB_CATEGORY - if nzb.show.is_anime: - category = app.SAB_CATEGORY_ANIME - - # if it aired more than 7 days ago, override with the backlog category IDs - for cur_ep in nzb.episodes: - if datetime.date.today() - cur_ep.airdate > datetime.timedelta(days=7): - category = app.SAB_CATEGORY_ANIME_BACKLOG if nzb.show.is_anime else app.SAB_CATEGORY_BACKLOG - - # set up a dict with the URL params in it - params = {'output': 'json'} - if app.SAB_USERNAME: - params['ma_username'] = app.SAB_USERNAME - if app.SAB_PASSWORD: - params['ma_password'] = app.SAB_PASSWORD - if app.SAB_APIKEY: - params['apikey'] = app.SAB_APIKEY - - if category: - params['cat'] = category - - if nzb.priority: - params['priority'] = 2 if app.SAB_FORCED else 1 - - logger.log('Sending NZB to SABnzbd') - url = urljoin(app.SAB_HOST, 'api') - - if nzb.resultType == 'nzb': - params['mode'] = 'addurl' - params['name'] = nzb.url - jdata = helpers.get_url(url, params=params, session=session, returns='json', verify=False) - elif nzb.resultType == 'nzbdata': - params['mode'] = 'addfile' - multiPartParams = {'nzbfile': (nzb.name + '.nzb', nzb.extraInfo[0])} - jdata = helpers.get_url(url, params=params, file=multiPartParams, session=session, returns='json', verify=False) - - if not jdata: - logger.log('Error connecting to sab, no data returned') - return False - - logger.log('Result text from SAB: {0}'.format(jdata), logger.DEBUG) - - result, _ = _checkSabResponse(jdata) - return result - - -def _checkSabResponse(jdata): - """ - Check response from SAB - - :param jdata: Response from requests api call - :return: a list of (Boolean, string) which is True if SAB is not reporting an error - """ - if 'error' in jdata: - if jdata['error'] == 'API Key Incorrect': - logger.log("Sabnzbd's API key is incorrect", logger.WARNING) - else: - logger.log('Sabnzbd encountered an error: {0}'.format(jdata['error']), logger.ERROR) - return False, jdata['error'] - else: - return True, jdata - - -def getSabAccesMethod(host=None): - """ - Find out how we should connect to SAB - - :param host: hostname where SAB lives - :return: (boolean, string) with True if method was successful - """ - params = {'mode': 'auth', 'output': 'json'} - url = urljoin(host, 'api') - data = helpers.get_url(url, params=params, session=session, returns='json', verify=False) - if not data: - return False, data - - return _checkSabResponse(data) - - -def testAuthentication(host=None, username=None, password=None, apikey=None): - """ - Sends a simple API request to SAB to determine if the given connection information is connect - - :param host: The host where SAB is running (incl port) - :param username: The username to use for the HTTP request - :param password: The password to use for the HTTP request - :param apikey: The API key to provide to SAB - :return: A tuple containing the success boolean and a message - """ - - # build up the URL parameters - params = { - 'mode': 'queue', - 'output': 'json', - 'ma_username': username, - 'ma_password': password, - 'apikey': apikey - } - - url = urljoin(host, 'api') - - data = helpers.get_url(url, params=params, session=session, returns='json', verify=False) - if not data: - return False, data - - # check the result and determine if it's good or not - result, sabText = _checkSabResponse(data) - if not result: - return False, sabText - - return True, 'Success' diff --git a/medusa/scene_exceptions.py b/medusa/scene_exceptions.py index 23bb733f95..1cc4f7ae69 100644 --- a/medusa/scene_exceptions.py +++ b/medusa/scene_exceptions.py @@ -68,6 +68,12 @@ def refresh_exceptions_cache(): logger.info('Finished processing {x} scene exceptions.', x=len(exceptions)) +def get_last_refresh(ex_list): + """Get the last update timestamp for the specific scene exception list.""" + cache_db_con = db.DBConnection('cache.db') + return cache_db_con.select(b'SELECT last_refreshed FROM scene_exceptions_refresh WHERE list = ?', [ex_list]) + + def should_refresh(ex_list): """ Check if we should refresh cache for items in ex_list. @@ -76,14 +82,8 @@ def should_refresh(ex_list): :return: True if refresh is needed """ max_refresh_age_secs = 86400 # 1 day + rows = get_last_refresh(ex_list) - cache_db_con = db.DBConnection('cache.db') - rows = cache_db_con.select( - b'SELECT last_refreshed ' - b'FROM scene_exceptions_refresh ' - b'WHERE list = ?', - [ex_list] - ) if rows: last_refresh = int(rows[0][b'last_refreshed']) return int(time.time()) > last_refresh + max_refresh_age_secs @@ -223,21 +223,28 @@ def update_scene_exceptions(indexer_id, indexer, scene_exceptions, season=-1): ) -def retrieve_exceptions(): +def retrieve_exceptions(force=False, exception_type=None): """ Look up the exceptions from all sources. Parses the exceptions into a dict, and inserts them into the scene_exceptions table in cache.db. Also clears the scene name cache. + :param force: If enabled this will force the refresh of scene exceptions using the medusa exceptions, + xem exceptions and anidb exceptions. + :param exception_type: Only refresh a specific exception_type. Options are: 'medusa', 'anidb', 'xem' """ + custom_exceptions = _get_custom_exceptions(force) if exception_type in ['custom_exceptions', None] else defaultdict(dict) + xem_exceptions = _get_xem_exceptions(force) if exception_type in ['xem', None] else defaultdict(dict) + anidb_exceptions = _get_anidb_exceptions(force) if exception_type in ['anidb', None] else defaultdict(dict) + # Combined scene exceptions from all sources combined_exceptions = combine_exceptions( # Custom scene exceptions - _get_custom_exceptions(), + custom_exceptions, # XEM scene exceptions - _get_xem_exceptions(), + xem_exceptions, # AniDB scene exceptions - _get_anidb_exceptions(), + anidb_exceptions, ) queries = [] @@ -281,10 +288,10 @@ def combine_exceptions(*scene_exceptions): return combined_ex -def _get_custom_exceptions(): +def _get_custom_exceptions(force): custom_exceptions = defaultdict(dict) - if should_refresh('custom_exceptions'): + if force or should_refresh('custom_exceptions'): for indexer in indexerApi().indexers: try: location = indexerApi(indexer).config['scene_loc'] @@ -331,11 +338,11 @@ def _get_custom_exceptions(): return custom_exceptions -def _get_xem_exceptions(): +def _get_xem_exceptions(force): xem_exceptions = defaultdict(dict) xem_url = 'http://thexem.de/map/allNames?origin={0}&seasonNumbers=1' - if should_refresh('xem'): + if force or should_refresh('xem'): for indexer in indexerApi().indexers: indexer_api = indexerApi(indexer) @@ -391,12 +398,12 @@ def _get_xem_exceptions(): return xem_exceptions -def _get_anidb_exceptions(): +def _get_anidb_exceptions(force): anidb_exceptions = defaultdict(dict) # AniDB exceptions use TVDB as indexer exceptions = anidb_exceptions[INDEXER_TVDBV2] - if should_refresh('anidb'): + if force or should_refresh('anidb'): logger.info('Checking for scene exceptions updates from AniDB') for show in app.showList: diff --git a/medusa/search/__init__.py b/medusa/search/__init__.py index e69de29bb2..e00b2c7135 100644 --- a/medusa/search/__init__.py +++ b/medusa/search/__init__.py @@ -0,0 +1,3 @@ +# coding=utf-8 + +"""Search module for all Medusa searches.""" diff --git a/medusa/search/backlog.py b/medusa/search/backlog.py index f15c23f016..88f136ac34 100644 --- a/medusa/search/backlog.py +++ b/medusa/search/backlog.py @@ -1,39 +1,32 @@ # coding=utf-8 -# Author: Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . +"""Backlog module.""" import datetime +import logging import threading -from six import iteritems -from .queue import BacklogQueueItem +from medusa import app, common, db, scheduler, ui +from medusa.helper.common import episode_num +from medusa.logger.adapters.style import BraceAdapter +from medusa.search.queue import BacklogQueueItem -from .. import app, common, db, logger, scheduler, ui +from six import iteritems -from ..helper.common import episode_num +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class BacklogSearchScheduler(scheduler.Scheduler): - def forceSearch(self): + """Backlog search scheduler class.""" + + def force_search(self): + """Set the last backlog in the DB.""" self.action._set_last_backlog(1) self.lastRun = datetime.datetime.fromordinal(1) def next_run(self): + """Return when backlog should run next.""" if self.action._last_backlog <= 1: return datetime.date.today() else: @@ -41,8 +34,10 @@ def next_run(self): class BacklogSearcher(object): - def __init__(self): + """Backlog Searcher class.""" + def __init__(self): + """Initialize the class.""" self._last_backlog = self._get_last_backlog() self.cycleTime = app.BACKLOG_FREQUENCY / 60.0 / 24 self.lock = threading.Lock() @@ -52,30 +47,33 @@ def __init__(self): self.forced = False self.currentSearchInfo = {} - self._resetPI() + self._reset_pi() - def _resetPI(self): + def _reset_pi(self): + """Reset percent done.""" self.percentDone = 0 self.currentSearchInfo = {'title': 'Initializing'} def get_progress_indicator(self): + """Get backlog search progress indicator.""" if self.amActive: return ui.ProgressIndicator(self.percentDone, self.currentSearchInfo) else: return None def am_running(self): - logger.log(u"amWaiting: " + str(self.amWaiting) + ", amActive: " + str(self.amActive), logger.DEBUG) + """Check if backlog is running.""" + log.debug(u'amWaiting: {0}, amActive: {1}', self.amWaiting, self.amActive) return (not self.amWaiting) and self.amActive def search_backlog(self, which_shows=None): - + """Run the backlog search for given shows.""" if self.amActive: - logger.log(u"Backlog is still running, not starting it again", logger.DEBUG) + log.debug(u'Backlog is still running, not starting it again') return if app.forced_search_queue_scheduler.action.is_forced_search_in_progress(): - logger.log(u"Manual search is running. Can't start Backlog Search", logger.WARNING) + log.warning(u'Manual search is running. Unable to start Backlog Search') return self.amActive = True @@ -88,14 +86,15 @@ def search_backlog(self, which_shows=None): self._get_last_backlog() - curDate = datetime.date.today().toordinal() + cur_date = datetime.date.today().toordinal() from_date = datetime.date.fromordinal(1) if not which_shows and self.forced: - logger.log(u'Running limited backlog search on missed episodes from last {0} days'.format(app.BACKLOG_DAYS)) + log.info(u'Running limited backlog search on missed episodes from last {0} days', + app.BACKLOG_DAYS) from_date = datetime.date.today() - datetime.timedelta(days=app.BACKLOG_DAYS) else: - logger.log(u'Running full backlog search on missed episodes for selected shows') + log.info(u'Running full backlog search on missed episodes for selected shows') # go through non air-by-date shows and see if they need any episodes for cur_show in show_list: @@ -106,54 +105,58 @@ def search_backlog(self, which_shows=None): segments = self._get_segments(cur_show, from_date) for season, segment in iteritems(segments): - self.currentSearchInfo = {'title': cur_show.name + " Season " + str(season)} + self.currentSearchInfo = {'title': cur_show.name + ' Season ' + str(season)} backlog_queue_item = BacklogQueueItem(cur_show, segment) app.search_queue_scheduler.action.add_item(backlog_queue_item) # @UndefinedVariable if not segments: - logger.log(u"Nothing needs to be downloaded for %s, skipping" % cur_show.name, logger.DEBUG) + log.debug(u'Nothing needs to be downloaded for {0!r}, skipping', cur_show.name) # don't consider this an actual backlog search if we only did recent eps # or if we only did certain shows if from_date == datetime.date.fromordinal(1) and not which_shows: - self._set_last_backlog(curDate) + self._set_last_backlog(cur_date) self.amActive = False - self._resetPI() + self._reset_pi() def _get_last_backlog(self): - - logger.log(u"Retrieving the last check time from the DB", logger.DEBUG) + """Get the last time backloged runned.""" + log.debug(u'Retrieving the last check time from the DB') main_db_con = db.DBConnection() - sql_results = main_db_con.select("SELECT last_backlog FROM info") + sql_results = main_db_con.select('SELECT last_backlog ' + 'FROM info') if not sql_results: last_backlog = 1 - elif sql_results[0]["last_backlog"] is None or sql_results[0]["last_backlog"] == "": + elif sql_results[0]['last_backlog'] is None or sql_results[0]['last_backlog'] == '': last_backlog = 1 else: - last_backlog = int(sql_results[0]["last_backlog"]) + last_backlog = int(sql_results[0]['last_backlog']) if last_backlog > datetime.date.today().toordinal(): last_backlog = 1 self._last_backlog = last_backlog return self._last_backlog - def _get_segments(self, show, from_date): + @staticmethod + def _get_segments(show, from_date): + """Get episodes that should be backlog searched.""" wanted = {} if show.paused: - logger.log(u"Skipping backlog for %s because the show is paused" % show.name, logger.DEBUG) + log.debug(u'Skipping backlog for {0} because the show is paused', show.name) return wanted - allowed_qualities, preferred_qualities = common.Quality.split_quality(show.quality) - - logger.log(u"Seeing if we need anything from %s" % show.name, logger.DEBUG) + log.debug(u'Seeing if we need anything from {0}', show.name) con = db.DBConnection() sql_results = con.select( - "SELECT status, season, episode, manually_searched FROM tv_episodes WHERE airdate > ? AND showid = ?", + 'SELECT status, season, episode, manually_searched ' + 'FROM tv_episodes ' + 'WHERE airdate > ?' + ' AND showid = ?', [from_date.toordinal(), show.indexerid] ) @@ -163,10 +166,14 @@ def _get_segments(self, show, from_date): sql_result['manually_searched']) if not should_search: continue - logger.log(u"Found needed backlog episodes for: {show} {ep}. Reason: {reason}".format - (show=show.name, ep=episode_num(sql_result["season"], sql_result["episode"]), - reason=shold_search_reason), logger.DEBUG) - ep_obj = show.get_episode(sql_result["season"], sql_result["episode"]) + log.debug( + u'Found needed backlog episodes for: {show} {ep}. Reason: {reason}', { + 'show': show.name, + 'ep': episode_num(sql_result['season'], sql_result['episode']), + 'reason': shold_search_reason, + } + ) + ep_obj = show.get_episode(sql_result['season'], sql_result['episode']) if ep_obj.season not in wanted: wanted[ep_obj.season] = [ep_obj] @@ -175,19 +182,24 @@ def _get_segments(self, show, from_date): return wanted - def _set_last_backlog(self, when): - - logger.log(u"Setting the last backlog in the DB to " + str(when), logger.DEBUG) + @staticmethod + def _set_last_backlog(when): + """Set the last backlog in the DB.""" + log.debug(u'Setting the last backlog in the DB to {0}', when) main_db_con = db.DBConnection() - sql_results = main_db_con.select("SELECT last_backlog FROM info") + sql_results = main_db_con.select('SELECT last_backlog ' + 'FROM info') if not sql_results: - main_db_con.action("INSERT INTO info (last_backlog, last_indexer) VALUES (?,?)", [str(when), 0]) + main_db_con.action('INSERT INTO info (last_backlog, last_indexer) ' + 'VALUES (?,?)', [str(when), 0]) else: - main_db_con.action("UPDATE info SET last_backlog={0}".format(when)) + main_db_con.action('UPDATE info ' + 'SET last_backlog={0}'.format(when)) def run(self, force=False): + """Run the backlog.""" try: if force: self.forced = True diff --git a/medusa/search/core.py b/medusa/search/core.py index ad399a51e7..529d8d74f0 100644 --- a/medusa/search/core.py +++ b/medusa/search/core.py @@ -1,41 +1,58 @@ # coding=utf-8 -# Author: Nic Wolfe -# -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . + """Search core module.""" + import datetime import errno +import logging import os import threading import traceback - from socket import timeout as socket_timeout +from medusa import ( + app, + common, + db, + failed_history, + helpers, + history, + name_cache, + notifiers, + nzb_splitter, + show_name_helpers, + ui, +) +from medusa.clients import torrent +from medusa.clients.nzb import ( + nzbget, + sab, +) +from medusa.common import ( + MULTI_EP_RESULT, + Quality, + SEASON_RESULT, + SNATCHED, + SNATCHED_BEST, + SNATCHED_PROPER, + UNKNOWN, +) +from medusa.helper.common import ( + enabled_providers, + episode_num, +) +from medusa.helper.exceptions import ( + AuthException, + ex, +) +from medusa.logger.adapters.style import BraceAdapter +from medusa.providers import sorted_provider_list +from medusa.providers.generic_provider import GenericProvider + import requests -from .. import ( - app, clients, common, db, failed_history, helpers, history, logger, - name_cache, notifiers, nzb_splitter, nzbget, sab, show_name_helpers, ui -) -from ..common import MULTI_EP_RESULT, Quality, SEASON_RESULT, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, UNKNOWN -from ..helper.common import enabled_providers, episode_num -from ..helper.exceptions import AuthException, ex -from ..providers import sorted_provider_list -from ..providers.generic_provider import GenericProvider +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) def _download_result(result): @@ -47,36 +64,36 @@ def _download_result(result): """ res_provider = result.provider if res_provider is None: - logger.log(u"Invalid provider name - this is a coding error, report it please", logger.ERROR) + log.error(u'Invalid provider name - this is a coding error, report it please') return False # nzbs with an URL can just be downloaded from the provider - if result.resultType == "nzb": + if result.resultType == u'nzb': new_result = res_provider.download_result(result) # if it's an nzb data result - elif result.resultType == "nzbdata": + elif result.resultType == u'nzbdata': # get the final file path to the nzb - file_name = os.path.join(app.NZB_DIR, result.name + ".nzb") + file_name = os.path.join(app.NZB_DIR, result.name + u'.nzb') - logger.log(u"Saving NZB to " + file_name) + log.info(u'Saving NZB to {0}', file_name) new_result = True # save the data to disk try: - with open(file_name, 'w') as fileOut: + with open(file_name, u'w') as fileOut: fileOut.write(result.extraInfo[0]) helpers.chmod_as_parent(file_name) except EnvironmentError as e: - logger.log(u"Error trying to save NZB to black hole: " + ex(e), logger.ERROR) + log.error(u'Error trying to save NZB to black hole: {0}', ex(e)) new_result = False - elif result.resultType == "torrent": + elif result.resultType == u'torrent': new_result = res_provider.download_result(result) else: - logger.log(u"Invalid provider type - this is a coding error, report it please", logger.ERROR) + log.error(u'Invalid provider type - this is a coding error, report it please') new_result = False return new_result @@ -84,7 +101,7 @@ def _download_result(result): def snatch_episode(result): """ - Internal logic necessary to actually "snatch" a result that has been found. + Internal logic necessary to actually snatch a result that has been found. :param result: SearchResult instance to be snatched. :return: boolean, True on success @@ -100,45 +117,45 @@ def snatch_episode(result): if datetime.date.today() - cur_ep.airdate <= datetime.timedelta(days=7): result.priority = 1 if result.proper_tags: - logger.log(u'Found proper tags for {0}. Snatching as PROPER'.format(result.name), logger.DEBUG) + log.debug(u'Found proper tags for {0}. Snatching as PROPER', result.name) is_proper = True end_status = SNATCHED_PROPER else: end_status = SNATCHED - if result.url.startswith('magnet') or result.url.endswith('torrent'): - result.resultType = 'torrent' + if result.url.startswith(u'magnet') or result.url.endswith(u'torrent'): + result.resultType = u'torrent' # NZBs can be sent straight to SAB or saved to disk - if result.resultType in ("nzb", "nzbdata"): - if app.NZB_METHOD == "blackhole": + if result.resultType in (u'nzb', u'nzbdata'): + if app.NZB_METHOD == u'blackhole': result_downloaded = _download_result(result) - elif app.NZB_METHOD == "sabnzbd": - result_downloaded = sab.sendNZB(result) - elif app.NZB_METHOD == "nzbget": + elif app.NZB_METHOD == u'sabnzbd': + result_downloaded = sab.send_nzb(result) + elif app.NZB_METHOD == u'nzbget': result_downloaded = nzbget.sendNZB(result, is_proper) else: - logger.log(u"Unknown NZB action specified in config: " + app.NZB_METHOD, logger.ERROR) + log.error(u'Unknown NZB action specified in config: {0}', app.NZB_METHOD) result_downloaded = False # Torrents can be sent to clients or saved to disk - elif result.resultType == "torrent": + elif result.resultType == u'torrent': # torrents are saved to disk when blackhole mode - if app.TORRENT_METHOD == "blackhole": + if app.TORRENT_METHOD == u'blackhole': result_downloaded = _download_result(result) else: - if not result.content and not result.url.startswith('magnet'): + if not result.content and not result.url.startswith(u'magnet'): if result.provider.login(): - result.content = result.provider.get_url(result.url, returns='content') + result.content = result.provider.get_url(result.url, returns=u'content') - if result.content or result.url.startswith('magnet'): - client = clients.get_client_class(app.TORRENT_METHOD)() + if result.content or result.url.startswith(u'magnet'): + client = torrent.get_client_class(app.TORRENT_METHOD)() result_downloaded = client.send_torrent(result) else: - logger.log(u"Torrent file content is empty", logger.WARNING) + log.warning(u'Torrent file content is empty') result_downloaded = False else: - logger.log(u"Unknown result type, unable to download it (%r)" % result.resultType, logger.ERROR) + log.error(u'Unknown result type, unable to download it: {0!r}', result.resultType) result_downloaded = False if not result_downloaded: @@ -147,7 +164,7 @@ def snatch_episode(result): if app.USE_FAILED_DOWNLOADS: failed_history.log_snatch(result) - ui.notifications.message('Episode snatched', result.name) + ui.notifications.message(u'Episode snatched', result.name) history.log_snatch(result) @@ -160,7 +177,7 @@ def snatch_episode(result): curEpObj.status = Quality.composite_status(SNATCHED_BEST, result.quality) else: curEpObj.status = Quality.composite_status(end_status, result.quality) - # Reset all others fields to the "snatched" status + # Reset all others fields to the snatched status # New snatch by default doesn't have nfo/tbn curEpObj.hasnfo = False curEpObj.hastbn = False @@ -175,7 +192,7 @@ def snatch_episode(result): # Need to reset subtitle settings because it's a different file curEpObj.subtitles = list() curEpObj.subtitles_searchcount = 0 - curEpObj.subtitles_lastsearch = '0001-01-01 00:00:00' + curEpObj.subtitles_lastsearch = u'0001-01-01 00:00:00' # Need to store the correct is_proper. Not use the old one curEpObj.is_proper = True if result.proper_tags else False @@ -189,30 +206,27 @@ def snatch_episode(result): sql_l.append(curEpObj.get_sql()) if curEpObj.status not in Quality.DOWNLOADED: - # TODO: Remove this broad catch when all notifiers handle exceptions - try: - notify_message = curEpObj.formatted_filename('%SN - %Sx%0E - %EN - %QN') - if all([app.SEEDERS_LEECHERS_IN_NOTIFY, result.seeders not in (-1, None), - result.leechers not in (-1, None)]): - notifiers.notify_snatch("{0} with {1} seeders and {2} leechers from {3}".format - (notify_message, result.seeders, - result.leechers, result.provider.name), is_proper) - else: - notifiers.notify_snatch("{0} from {1}".format(notify_message, result.provider.name), is_proper) - except Exception as e: - # Without this, when notification fail, it crashes the snatch thread and Medusa will - # keep snatching until notification is sent - logger.log(u"Failed to send snatch notification. Error: {0}".format(e), logger.DEBUG) + notify_message = curEpObj.formatted_filename(u'%SN - %Sx%0E - %EN - %QN') + if all([app.SEEDERS_LEECHERS_IN_NOTIFY, result.seeders not in (-1, None), + result.leechers not in (-1, None)]): + notifiers.notify_snatch(u'{0} with {1} seeders and {2} leechers from {3}'.format + (notify_message, result.seeders, + result.leechers, result.provider.name), is_proper) + else: + notifiers.notify_snatch(u'{0} from {1}'.format(notify_message, result.provider.name), is_proper) if app.USE_TRAKT and app.TRAKT_SYNC_WATCHLIST: trakt_data.append((curEpObj.season, curEpObj.episode)) - logger.log(u'Adding {0} {1} to Trakt watchlist'.format - (result.show.name, episode_num(curEpObj.season, curEpObj.episode)), logger.INFO) + log.info( + u'Adding {0} {1} to Trakt watchlist', + result.show.name, + episode_num(curEpObj.season, curEpObj.episode), + ) if trakt_data: data_episode = notifiers.trakt_notifier.trakt_episode_data_generate(trakt_data) if data_episode: - notifiers.trakt_notifier.update_watchlist(result.show, data_episode=data_episode, update="add") + notifiers.trakt_notifier.update_watchlist(result.show, data_episode=data_episode, update=u'add') if sql_l: main_db_con = db.DBConnection() @@ -231,7 +245,7 @@ def pick_best_result(results, show): # pylint: disable=too-many-branches """ results = results if isinstance(results, list) else [results] - logger.log(u"Picking the best result out of " + str([x.name for x in results]), logger.DEBUG) + log.debug(u'Picking the best result out of {0}', [x.name for x in results]) best_result = None @@ -245,23 +259,27 @@ def pick_best_result(results, show): # pylint: disable=too-many-branches if not show.release_groups.is_valid(cur_result): continue - logger.log(u"Quality of " + cur_result.name + u" is " + Quality.qualityStrings[cur_result.quality]) + log.info(u'Quality of {0} is {1}', cur_result.name, Quality.qualityStrings[cur_result.quality]) allowed_qualities, preferred_qualities = show.current_qualities if cur_result.quality not in allowed_qualities + preferred_qualities: - logger.log(cur_result.name + u" is a quality we know we don't want, rejecting it", logger.DEBUG) + log.debug(u'{0} is an unwanted quality, rejecting it', cur_result.name) continue # If doesnt have min seeders OR min leechers then discard it if cur_result.seeders not in (-1, None) and cur_result.leechers not in (-1, None) \ - and hasattr(cur_result.provider, 'minseed') and hasattr(cur_result.provider, 'minleech') \ + and hasattr(cur_result.provider, u'minseed') and hasattr(cur_result.provider, u'minleech') \ and (int(cur_result.seeders) < int(cur_result.provider.minseed) or int(cur_result.leechers) < int(cur_result.provider.minleech)): - logger.log(u"Discarding torrent because it doesn't meet the minimum provider setting " - u"S:{0} L:{1}. Result has S:{2} L:{3}".format - (cur_result.provider.minseed, cur_result.provider.minleech, - cur_result.seeders, cur_result.leechers)) + log.info( + u'Discarding torrent because it does not meet the minimum provider setting ' + u'S:{0} L:{1}. Result has S:{2} L:{3}', + cur_result.provider.minseed, + cur_result.provider.minleech, + cur_result.seeders, + cur_result.leechers, + ) continue ignored_words = show.show_words().ignored_words @@ -270,29 +288,27 @@ def pick_best_result(results, show): # pylint: disable=too-many-branches found_required_word = show_name_helpers.containsAtLeastOneWord(cur_result.name, required_words) if ignored_words and found_ignored_word: - logger.log(u"Ignoring " + cur_result.name + u" based on ignored words filter: " + found_ignored_word, - logger.INFO) + log.info(u'Ignoring {0} based on ignored words filter: {1}', cur_result.name, found_ignored_word) continue if required_words and not found_required_word: - logger.log(u"Ignoring " + cur_result.name + u" based on required words filter: " + required_words, - logger.INFO) + log.info(u'Ignoring {0} based on required words filter: {1}', cur_result.name, required_words) continue if not show_name_helpers.filterBadReleases(cur_result.name, parse=False): continue - if hasattr(cur_result, 'size'): + if hasattr(cur_result, u'size'): if app.USE_FAILED_DOWNLOADS and failed_history.has_failed(cur_result.name, cur_result.size, cur_result.provider.name): - logger.log(cur_result.name + u" has previously failed, rejecting it") + log.info(u'{0} has previously failed, rejecting it', cur_result.name) continue preferred_words = '' if app.PREFERRED_WORDS: - preferred_words = app.PREFERRED_WORDS.lower().split(',') + preferred_words = app.PREFERRED_WORDS.lower().split(u',') undesired_words = '' if app.UNDESIRED_WORDS: - undesired_words = app.UNDESIRED_WORDS.lower().split(',') + undesired_words = app.UNDESIRED_WORDS.lower().split(u',') if not best_result: best_result = cur_result @@ -300,25 +316,25 @@ def pick_best_result(results, show): # pylint: disable=too-many-branches best_result = cur_result elif best_result.quality == cur_result.quality: if any(ext in cur_result.name.lower() for ext in preferred_words): - logger.log(u"Preferring " + cur_result.name + u" (preferred words)") + log.info(u'Preferring {0} (preferred words)', cur_result.name) best_result = cur_result if cur_result.proper_tags: - logger.log(u"Preferring " + cur_result.name + u" (repack/proper/real/rerip over nuked)") + log.info(u'Preferring {0} (repack/proper/real/rerip over nuked)', cur_result.name) best_result = cur_result - elif "internal" in best_result.name.lower() and "internal" not in cur_result.name.lower(): - logger.log(u"Preferring " + cur_result.name + u" (normal instead of internal)") + elif u'internal' in best_result.name.lower() and u'internal' not in cur_result.name.lower(): + log.info(u'Preferring {0} (normal instead of internal)', cur_result.name) best_result = cur_result - elif "xvid" in best_result.name.lower() and "x264" in cur_result.name.lower(): - logger.log(u"Preferring " + cur_result.name + u" (x264 over xvid)") + elif u'xvid' in best_result.name.lower() and u'x264' in cur_result.name.lower(): + log.info(u'Preferring {0} (x264 over xvid)', cur_result.name) best_result = cur_result if any(ext in best_result.name.lower() and ext not in cur_result.name.lower() for ext in undesired_words): - logger.log(u"Dont want this release " + cur_result.name + u" (contains undesired word(s))") + log.info(u'Unwanted release {0} (contains undesired word(s))', cur_result.name) best_result = cur_result if best_result: - logger.log(u"Picked " + best_result.name + u" as the best", logger.DEBUG) + log.debug(u'Picked {0} as the best', best_result.name) else: - logger.log(u"No result picked.", logger.DEBUG) + log.debug(u'No result picked.') return best_result @@ -330,8 +346,7 @@ def is_first_best_match(result): :param result: to check :return: True if the result is the best quality match else False """ - logger.log(u"Checking if we should stop searching for a better quality for for episode " + result.name, - logger.DEBUG) + log.debug(u'Checking if we should stop searching for a better quality for for episode {0}', result.name) show_obj = result.episodes[0].show @@ -352,28 +367,33 @@ def wanted_episodes(show, from_date): allowed_qualities, preferred_qualities = show.current_qualities all_qualities = list(set(allowed_qualities + preferred_qualities)) - logger.log(u"Seeing if we need anything from " + show.name, logger.DEBUG) + log.debug(u'Seeing if we need anything from {0}', show.name) con = db.DBConnection() sql_results = con.select( - "SELECT status, season, episode, manually_searched " - "FROM tv_episodes " - "WHERE showid = ? AND season > 0 and airdate > ?", + 'SELECT status, season, episode, manually_searched ' + 'FROM tv_episodes ' + 'WHERE showid = ?' + ' AND season > 0' + ' and airdate > ?', [show.indexerid, from_date.toordinal()] ) # check through the list of statuses to see if we want any for result in sql_results: - _, cur_quality = common.Quality.split_composite_status(int(result['status'] or UNKNOWN)) - should_search, should_search_reason = Quality.should_search(result['status'], show, result['manually_searched']) + _, cur_quality = common.Quality.split_composite_status(int(result[b'status'] or UNKNOWN)) + should_search, should_search_reason = Quality.should_search(result[b'status'], show, result[b'manually_searched']) if not should_search: continue else: - logger.log(u'Searching for {show} {ep}. Reason: {reason}'.format - (show=show.name, ep=episode_num(result['season'], result['episode']), - reason=should_search_reason), logger.DEBUG) - - ep_obj = show.get_episode(result['season'], result['episode']) + log.debug( + u'Searching for {show} {ep}. Reason: {reason}', { + u'show': show.name, + u'ep': episode_num(result[b'season'], result[b'episode']), + u'reason': should_search_reason, + } + ) + ep_obj = show.get_episode(result[b'season'], result[b'episode']) ep_obj.wanted_quality = [i for i in all_qualities if i > cur_quality and i != common.Quality.UNKNOWN] wanted.append(ep_obj) @@ -396,37 +416,36 @@ def search_for_needed_episodes(): for cur_show in show_list: if cur_show.paused: - logger.log(u"Not checking for needed episodes of %s because the show is paused" % cur_show.name, - logger.DEBUG) + log.debug(u'Not checking for needed episodes of {0} because the show is paused', cur_show.name) continue episodes.extend(wanted_episodes(cur_show, from_date)) if not episodes: - # nothing wanted so early out, ie: avoid whatever abritrarily + # nothing wanted so early out, ie: avoid whatever arbitrarily # complex thing a provider cache update entails, for example, # reading rss feeds return found_results.values() original_thread_name = threading.currentThread().name - providers = enabled_providers('daily') - logger.log("Using daily search providers") + providers = enabled_providers(u'daily') + log.info(u'Using daily search providers') for cur_provider in providers: - threading.currentThread().name = '{thread} :: [{provider}]'.format(thread=original_thread_name, - provider=cur_provider.name) + threading.currentThread().name = u'{thread} :: [{provider}]'.format(thread=original_thread_name, + provider=cur_provider.name) cur_provider.cache.update_cache() for cur_provider in providers: - threading.currentThread().name = '{thread} :: [{provider}]'.format(thread=original_thread_name, - provider=cur_provider.name) + threading.currentThread().name = u'{thread} :: [{provider}]'.format(thread=original_thread_name, + provider=cur_provider.name) try: cur_found_results = cur_provider.search_rss(episodes) - except AuthException as e: - logger.log(u"Authentication error: " + ex(e), logger.ERROR) + except AuthException as error: + log.error(u'Authentication error: {0}', ex(error)) continue - except Exception as e: - logger.log(u"Error while searching " + cur_provider.name + u", skipping: " + ex(e), logger.ERROR) - logger.log(traceback.format_exc(), logger.DEBUG) + except Exception as error: + log.debug(traceback.format_exc()) + log.error(u'Error while searching {0}, skipping: {1}', cur_provider.name, ex(error)) continue did_search = True @@ -434,14 +453,14 @@ def search_for_needed_episodes(): # pick a single result for each episode, respecting existing results for cur_ep in cur_found_results: if not cur_ep.show or cur_ep.show.paused: - logger.log(u"Skipping %s because the show is paused " % cur_ep.pretty_name(), logger.DEBUG) + log.debug(u'Skipping {0} because the show is paused ', cur_ep.pretty_name()) continue best_result = pick_best_result(cur_found_results[cur_ep], cur_ep.show) # if all results were rejected move on to the next episode if not best_result: - logger.log(u"All found results for " + cur_ep.pretty_name() + u" were rejected.", logger.DEBUG) + log.debug(u'All found results for {0} were rejected.', cur_ep.pretty_name()) continue # if it's already in the list (from another provider) and the newly found quality is no better then skip it @@ -453,15 +472,16 @@ def search_for_needed_episodes(): threading.currentThread().name = original_thread_name if not did_search: - logger.log( - u"No NZB/Torrent providers found or enabled in the application config for daily searches. " - u"Please check your settings.", logger.WARNING) + log.warning( + u'No NZB/Torrent providers found or enabled in the application config for daily searches. ' + u'Please check your settings.' + ) return found_results.values() def search_providers(show, episodes, forced_search=False, down_cur_quality=False, - manual_search=False, manual_search_type='episode'): + manual_search=False, manual_search_type=u'episode'): """ Walk providers for information on shows. @@ -485,21 +505,21 @@ def search_providers(show, episodes, forced_search=False, down_cur_quality=False original_thread_name = threading.currentThread().name if manual_search: - logger.log("Using manual search providers") + log.info(u'Using manual search providers') providers = [x for x in sorted_provider_list(app.RANDOMIZE_PROVIDERS) if x.is_active() and x.enable_manualsearch] else: - logger.log("Using backlog search providers") + log.info(u'Using backlog search providers') providers = [x for x in sorted_provider_list(app.RANDOMIZE_PROVIDERS) if x.is_active() and x.enable_backlog] threading.currentThread().name = original_thread_name for cur_provider in providers: - threading.currentThread().name = original_thread_name + " :: [" + cur_provider.name + "]" + threading.currentThread().name = original_thread_name + u' :: [' + cur_provider.name + u']' if cur_provider.anime_only and not show.is_anime: - logger.log(str(show.name) + u" is not an anime, skipping", logger.DEBUG) + log.debug(u'{0} is not an anime, skipping', show.name) continue found_results[cur_provider.name] = {} @@ -508,54 +528,54 @@ def search_providers(show, episodes, forced_search=False, down_cur_quality=False search_mode = cur_provider.search_mode # Always search for episode when manually searching when in sponly - if search_mode == 'sponly' and (forced_search or manual_search): - search_mode = 'eponly' + if search_mode == u'sponly' and (forced_search or manual_search): + search_mode = u'eponly' - if manual_search and manual_search_type == 'season': - search_mode = 'sponly' + if manual_search and manual_search_type == u'season': + search_mode = u'sponly' while True: search_count += 1 - if search_mode == 'eponly': - logger.log(u"Performing episode search for " + show.name) + if search_mode == u'eponly': + log.info(u'Performing episode search for {0}', show.name) else: - logger.log(u"Performing season pack search for " + show.name) + log.info(u'Performing season pack search for {0}', show.name) try: search_results = cur_provider.find_search_results(show, episodes, search_mode, forced_search, down_cur_quality, manual_search, manual_search_type) - except AuthException as e: - logger.log(u"Authentication error: " + ex(e), logger.ERROR) + except AuthException as error: + log.error(u'Authentication error: {0}', ex(error)) break - except socket_timeout as e: - logger.log(u"Connection timed out (sockets) while searching %s. Error: %r" % - (cur_provider.name, ex(e)), logger.DEBUG) + except socket_timeout as error: + log.debug(u'Connection timed out (sockets) while searching {0}. Error: {1!r}', + cur_provider.name, ex(error)) break - except (requests.exceptions.HTTPError, requests.exceptions.TooManyRedirects) as e: - logger.log(u"HTTP error while searching %s. Error: %r" % - (cur_provider.name, ex(e)), logger.DEBUG) + except (requests.exceptions.HTTPError, requests.exceptions.TooManyRedirects) as error: + log.debug(u'HTTP error while searching {0}. Error: {1!r}', + cur_provider.name, ex(error)) break - except requests.exceptions.ConnectionError as e: - logger.log(u"Connection error while searching %s. Error: %r" % - (cur_provider.name, ex(e)), logger.DEBUG) + except requests.exceptions.ConnectionError as error: + log.debug(u'Connection error while searching {0}. Error: {1!r}', + cur_provider.name, ex(error)) break - except requests.exceptions.Timeout as e: - logger.log(u"Connection timed out while searching %s. Error: %r" % - (cur_provider.name, ex(e)), logger.DEBUG) + except requests.exceptions.Timeout as error: + log.debug(u'Connection timed out while searching {0}. Error: {1!r}', + cur_provider.name, ex(error)) break - except requests.exceptions.ContentDecodingError as e: - logger.log(u"Content-Encoding was gzip, but content was not compressed while searching %s. " - u"Error: %r" % (cur_provider.name, ex(e)), logger.DEBUG) + except requests.exceptions.ContentDecodingError as error: + log.debug(u'Content-Encoding was gzip, but content was not compressed while searching {0}.' + u' Error: {1!r}', cur_provider.name, ex(error)) break - except Exception as e: - if 'ECONNRESET' in e or (hasattr(e, 'errno') and e.errno == errno.ECONNRESET): - logger.log(u"Connection reseted by peer while searching %s. Error: %r" % - (cur_provider.name, ex(e)), logger.WARNING) + except Exception as error: + if u'ECONNRESET' in error or (hasattr(error, u'errno') and error.errno == errno.ECONNRESET): + log.warning(u'Connection reseted by peer while searching {0}. Error: {1!r}', + cur_provider.name, ex(error)) else: - logger.log(u"Unknown exception while searching %s. Error: %r" % - (cur_provider.name, ex(e)), logger.ERROR) - logger.log(traceback.format_exc(), logger.DEBUG) + log.debug(traceback.format_exc()) + log.error(u'Unknown exception while searching {0}. Error: {1!r}', + cur_provider.name, ex(error)) break did_search = True @@ -569,32 +589,32 @@ def search_providers(show, episodes, forced_search=False, down_cur_quality=False found_results[cur_provider.name][cur_ep] = search_results[cur_ep] # Sort the list by seeders if possible - if cur_provider.provider_type == 'torrent' or getattr(cur_provider, 'torznab', None): + if cur_provider.provider_type == u'torrent' or getattr(cur_provider, u'torznab', None): found_results[cur_provider.name][cur_ep].sort(key=lambda d: int(d.seeders), reverse=True) break elif not cur_provider.search_fallback or search_count == 2: break - # Dont fallback when doing manual season search - if manual_search_type == 'season': + # Don't fallback when doing manual season search + if manual_search_type == u'season': break - if search_mode == 'sponly': - logger.log(u"Fallback episode search initiated", logger.DEBUG) - search_mode = 'eponly' + if search_mode == u'sponly': + log.debug(u'Fallback episode search initiated') + search_mode = u'eponly' else: - logger.log(u"Fallback season pack search initiate", logger.DEBUG) - search_mode = 'sponly' + log.debug(u'Fallback season pack search initiate') + search_mode = u'sponly' # skip to next provider if we have no results to process if not found_results[cur_provider.name]: continue - # Update the cache if a manual search is being runned + # Update the cache if a manual search is being run if manual_search: # Let's create a list with episodes that we where looking for - if manual_search_type == 'season': + if manual_search_type == u'season': # If season search type, we only want season packs searched_episode_list = [SEASON_RESULT] else: @@ -617,8 +637,7 @@ def search_providers(show, episodes, forced_search=False, down_cur_quality=False for cur_result in found_results[cur_provider.name][cur_episode]: if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall: highest_quality_overall = cur_result.quality - logger.log(u"The highest quality of any match is " + Quality.qualityStrings[highest_quality_overall], - logger.DEBUG) + log.debug(u'The highest quality of any match is {0}', Quality.qualityStrings[highest_quality_overall]) # see if every episode is wanted if best_season_result: @@ -626,18 +645,19 @@ def search_providers(show, episodes, forced_search=False, down_cur_quality=False # get the quality of the season nzb season_quality = best_season_result.quality - logger.log( - u"The quality of the season " + best_season_result.provider.provider_type + " is " + - Quality.qualityStrings[season_quality], logger.DEBUG) - + log.debug(u'The quality of the season {0} is {1}', + best_season_result.provider.provider_type, + Quality.qualityStrings[season_quality]) main_db_con = db.DBConnection() - all_eps = [int(x["episode"]) - for x in main_db_con.select("SELECT episode FROM tv_episodes WHERE showid = ? AND " - "( season IN ( " + ','.join(searched_seasons) + " ) )", [show.indexerid])] - - logger.log(u"Executed query: [SELECT episode FROM tv_episodes WHERE showid = %s AND season in %s]" % - (show.indexerid, ','.join(searched_seasons))) - logger.log(u"Episode list: " + str(all_eps), logger.DEBUG) + selection = main_db_con.select( + 'SELECT episode ' + 'FROM tv_episodes ' + 'WHERE showid = ?' + ' AND ( season IN ( {0} ) )'.format(','.join(searched_seasons)), + [show.indexerid] + ) + all_eps = [int(x[b'episode']) for x in selection] + log.debug(u'Episode list: {0}', all_eps) all_wanted = True any_wanted = False @@ -651,9 +671,9 @@ def search_providers(show, episodes, forced_search=False, down_cur_quality=False # if we need every ep in the season and there's nothing better then # just download this and be done with it (unless single episodes are preferred) if all_wanted and best_season_result.quality == highest_quality_overall: - logger.log( - u"Every ep in this season is needed, downloading the whole " + - best_season_result.provider.provider_type + " " + best_season_result.name) + log.info(u'All episodes in this season are needed, downloading {0} {1}', + best_season_result.provider.provider_type, + best_season_result.name) ep_objs = [] for cur_ep_num in all_eps: for season in {x.season for x in episodes}: @@ -666,14 +686,12 @@ def search_providers(show, episodes, forced_search=False, down_cur_quality=False return [best_season_result] elif not any_wanted: - logger.log( - u"No eps from this season are wanted at this quality, ignoring the result of " + - best_season_result.name, logger.DEBUG) - + log.debug(u'No episodes in this season are needed at this quality, ignoring {0} {1}', + best_season_result.provider.provider_type, + best_season_result.name) else: - if best_season_result.provider.provider_type == GenericProvider.NZB: - logger.log(u"Breaking apart the NZB and adding the individual ones to our results", logger.DEBUG) + log.debug(u'Breaking apart the NZB and adding the individual ones to our results') # if not, break it apart and add them as the lowest priority results individual_results = nzb_splitter.split_result(best_season_result) @@ -691,12 +709,10 @@ def search_providers(show, episodes, forced_search=False, down_cur_quality=False # If this is a torrent all we can do is leech the entire torrent, # user will have to select which eps not do download in his torrent client else: - # Season result from Torrent Provider must be a full-season torrent, # creating multi-ep result for it. - logger.log( - u"Adding multi-ep result for full-season torrent. " - u"Set the episodes you don't want to 'don't download' in your torrent client if desired!") + log.info(u'Adding multi-ep result for full-season torrent.' + u' Undesired episodes can be skipped in torrent client if desired!') ep_objs = [] for cur_ep_num in all_eps: for season in {x.season for x in episodes}: @@ -712,9 +728,7 @@ def search_providers(show, episodes, forced_search=False, down_cur_quality=False multi_results = {} if MULTI_EP_RESULT in found_results[cur_provider.name]: for _multi_result in found_results[cur_provider.name][MULTI_EP_RESULT]: - - logger.log(u"Seeing if we want to bother with multi-episode result " + - _multi_result.name, logger.DEBUG) + log.debug(u'Seeing if we want to bother with multi-episode result {0}', _multi_result.name) # Filter result by ignore/required/whitelist/blacklist/quality, etc multi_result = pick_best_result(_multi_result, show) @@ -732,13 +746,12 @@ def search_providers(show, episodes, forced_search=False, down_cur_quality=False else: needed_eps.append(ep_obj.episode) - logger.log( - u"Single-ep check result is needed_eps: " + str(needed_eps) + u", not_needed_eps: " + - str(not_needed_eps), logger.DEBUG) + log.debug(u'Single-ep check result is needed_eps: {0}, not_needed_eps: {1}', + needed_eps, not_needed_eps) if not needed_eps: - logger.log(u"All of these episodes were covered by single episode results, " - u"ignoring this multi-episode result", logger.DEBUG) + log.debug(u'All of these episodes were covered by single episode results,' + u' ignoring this multi-episode result') continue # check if these eps are already covered by another multi-result @@ -750,22 +763,26 @@ def search_providers(show, episodes, forced_search=False, down_cur_quality=False else: multi_needed_eps.append(ep_obj.episode) - logger.log(u"Multi-ep check result is multi_needed_eps: " + str(multi_needed_eps) + - u", multi_not_needed_eps: " + str(multi_not_needed_eps), logger.DEBUG) + log.debug(u'Multi-ep check result is multi_needed_eps: {0}, multi_not_needed_eps: {1}', + multi_needed_eps, + multi_not_needed_eps) if not multi_needed_eps: - logger.log( - u"All of these episodes were covered by another multi-episode nzbs, " - u"ignoring this multi-ep result", logger.DEBUG) + log.debug( + u'All of these episodes were covered by another multi-episode nzb, ' + u'ignoring this multi-ep result' + ) continue # don't bother with the single result if we're going to get it with a multi result for ep_obj in multi_result.episodes: multi_results[ep_obj.episode] = multi_result if ep_obj.episode in found_results[cur_provider.name]: - logger.log( - u"A needed multi-episode result overlaps with a single-episode result for ep #" + - str(ep_obj.episode) + u", removing the single-episode results from the list", logger.DEBUG) + log.debug( + u'A needed multi-episode result overlaps with a single-episode result for episode {0},' + u' removing the single-episode results from the list', + ep_obj.episode, + ) del found_results[cur_provider.name][ep_obj.episode] # of all the single ep results narrow it down to the best one for each episode @@ -795,8 +812,8 @@ def search_providers(show, episodes, forced_search=False, down_cur_quality=False final_results += [best_result] if not did_search: - logger.log(u"No NZB/Torrent providers found or enabled in the application config for backlog searches. " - u"Please check your settings.", logger.WARNING) + log.warning(u'No NZB/Torrent providers found or enabled in the application config for backlog searches.' + u' Please check your settings.') # Remove provider from thread name before return results threading.currentThread().name = original_thread_name diff --git a/medusa/search/daily.py b/medusa/search/daily.py index 1af23c9e12..cf60cadab9 100644 --- a/medusa/search/daily.py +++ b/medusa/search/daily.py @@ -1,54 +1,50 @@ # coding=utf-8 -# Author: Nic Wolfe - -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . + +"""Daily searcher module.""" from __future__ import unicode_literals +import logging import threading from datetime import date, datetime, timedelta -from .queue import DailySearchQueueItem -from .. import app, common, logger -from ..db import DBConnection -from ..helper.common import try_int -from ..helper.exceptions import MultipleShowObjectsException -from ..network_timezones import app_timezone, network_dict, parse_date_time, update_network_dict -from ..show.show import Show +from medusa import app, common +from medusa.db import DBConnection +from medusa.helper.common import try_int +from medusa.helper.exceptions import MultipleShowObjectsException +from medusa.logger.adapters.style import BraceAdapter +from medusa.network_timezones import ( + app_timezone, + network_dict, + parse_date_time, + update_network_dict, +) +from medusa.search.queue import DailySearchQueueItem +from medusa.show.show import Show + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) class DailySearcher(object): # pylint:disable=too-few-public-methods """Daily search class.""" def __init__(self): + """Initialize the class.""" self.lock = threading.Lock() self.amActive = False def run(self, force=False): # pylint:disable=too-many-branches """ - Runs the daily searcher, queuing selected episodes for search + Run the daily searcher, queuing selected episodes for search. :param force: Force search """ if self.amActive: - logger.log('Daily search is still running, not starting it again', logger.DEBUG) + log.debug('Daily search is still running, not starting it again') return elif app.forced_search_queue_scheduler.action.is_forced_search_in_progress() and not force: - logger.log('Manual search is running. Can\'t start Daily search', logger.WARNING) + log.warning('Manual search is running. Unable to start Daily search') return self.amActive = True @@ -73,8 +69,8 @@ def run(self, force=False): # pylint:disable=too-many-branches show = None for db_episode in episodes_from_db: + show_id = int(db_episode[b'showid']) try: - show_id = int(db_episode[b'showid']) if not show or show_id != show.indexerid: show = Show.find(app.showList, show_id) @@ -83,7 +79,8 @@ def run(self, force=False): # pylint:disable=too-many-branches continue except MultipleShowObjectsException: - logger.log('ERROR: expected to find a single show matching {id}'.format(id=show_id)) + log.info('ERROR: expected to find a single show matching {id}', + {'id': show_id}) continue if show.airs and show.network: @@ -98,11 +95,13 @@ def run(self, force=False): # pylint:disable=too-many-branches cur_ep = show.get_episode(db_episode[b'season'], db_episode[b'episode']) with cur_ep.lock: cur_ep.status = show.default_ep_status if cur_ep.season else common.SKIPPED - logger.log('Setting status ({status}) for show airing today: {name} {special}'.format( - name=cur_ep.pretty_name(), - status=common.statusStrings[cur_ep.status], - special='(specials are not supported)' if not cur_ep.season else '' - )) + log.info( + 'Setting status ({status}) for show airing today: {name} {special}', { + 'name': cur_ep.pretty_name(), + 'status': common.statusStrings[cur_ep.status], + 'special': '(specials are not supported)' if not cur_ep.season else '', + } + ) new_releases.append(cur_ep.get_sql()) if new_releases: diff --git a/medusa/search/manual.py b/medusa/search/manual.py index c8d3cbe363..8f8d020226 100644 --- a/medusa/search/manual.py +++ b/medusa/search/manual.py @@ -16,14 +16,16 @@ # # You should have received a copy of the GNU General Public License # along with Medusa. If not, see . - +"""Manual search module.""" import json import threading import time from datetime import datetime + from dateutil import parser + from .queue import FORCED_SEARCH_HISTORY, ForcedSearchQueueItem from .. import app, db, logger from ..common import Overview, Quality, cpu_presets, statusStrings @@ -38,10 +40,7 @@ def get_quality_class(ep_obj): - """ - Find the quality class for the episode - """ - + """Find the quality class for the episode.""" _, ep_quality = Quality.split_composite_status(ep_obj.status) if ep_quality in Quality.cssClassStrings: quality_class = Quality.cssClassStrings[ep_quality] @@ -52,7 +51,7 @@ def get_quality_class(ep_obj): def get_episode(show, season=None, episode=None, absolute=None): - """ Get a specific episode object based on show, season and episode number + """Get a specific episode object based on show, season and episode number. :param show: Season number :param season: Season number @@ -82,8 +81,7 @@ def get_episode(show, season=None, episode=None, absolute=None): def get_episodes(search_thread, searchstatus): - """ Get all episodes located in a search thread with a specific status """ - + """Get all episodes located in a search thread with a specific status.""" results = [] # NOTE!: Show.find called with just indexerid! show_obj = Show.find(app.showList, int(search_thread.show.indexerid)) @@ -115,8 +113,8 @@ def get_episodes(search_thread, searchstatus): def update_finished_search_queue_item(snatch_queue_item): - """ - Updates the previous manual searched queue item with the correct status + """Update the previous manual searched queue item with the correct status. + @param snatch_queue_item: A successful snatch queue item, send from pickManualSearch(). @return: True if status update was successful, False if not. """ @@ -140,9 +138,9 @@ def update_finished_search_queue_item(snatch_queue_item): def collect_episodes_from_search_thread(show): - """ - Collects all episodes from from the forced_search_queue_scheduler - and looks for episodes that are in status queued or searching. + """Collect all episodes from from the forced_search_queue_scheduler. + + And looks for episodes that are in status queued or searching. If episodes are found in FORCED_SEARCH_HISTORY, these are set to status finished. """ episodes = [] @@ -180,10 +178,7 @@ def collect_episodes_from_search_thread(show): def get_provider_cache_results(indexer, show_all_results=None, perform_search=None, **search_show): # pylint: disable=too-many-locals,unused-argument - """ - Check all provider cache tables for search results - """ - + """Check all provider cache tables for search results.""" show = search_show.get('show') season = search_show.get('season') episode = search_show.get('episode') diff --git a/medusa/search/proper.py b/medusa/search/proper.py index 6f74aa3d78..f6859e4b89 100644 --- a/medusa/search/proper.py +++ b/medusa/search/proper.py @@ -47,6 +47,7 @@ def __init__(self): """Initialize the class.""" self.amActive = False self.processed_propers = [] + self.ignore_processed_propers = False def run(self, force=False): # pylint: disable=unused-argument """ @@ -67,9 +68,10 @@ def run(self, force=False): # pylint: disable=unused-argument self.amActive = True # If force we should ignore existing processed propers + self.ignore_processed_propers = False if force: - current_processed_propers = self.processed_propers - self.processed_propers = [] + self.ignore_processed_propers = True + logger.log("Ignoring already processed propers as it's a forced search", logger.DEBUG) logger.log('Using proper search days: {0}'.format(app.PROPERS_SEARCH_DAYS)) @@ -82,17 +84,13 @@ def run(self, force=False): # pylint: disable=unused-argument run_at = '' if None is app.proper_finder_scheduler.start_time: - run_in = app.proper_finder_scheduler.lastRun + app.proper_finder_scheduler.cycleTime - datetime.datetime.now() + run_in = app.proper_finder_scheduler.lastRun + \ + app.proper_finder_scheduler.cycleTime - datetime.datetime.now() hours, remainder = divmod(run_in.seconds, 3600) minutes, seconds = divmod(remainder, 60) run_at = ', next check in approx. {0}'.format( '{0}h, {1}m'.format(hours, minutes) if 0 < hours else '{0}m, {1}s'.format(minutes, seconds)) - # Restore processed propers and add new ones to the end of the list - if force: - current_processed_propers.extend(set(self.processed_propers).difference(set(current_processed_propers))) - self.processed_propers = current_processed_propers - logger.log('Completed the search for new propers{0}'.format(run_at)) self.amActive = False @@ -130,7 +128,8 @@ def _get_proper_results(self): # pylint: disable=too-many-locals, too-many-bran # Loop through the providers, and search for releases for cur_provider in providers: - threading.currentThread().name = '{thread} :: [{provider}]'.format(thread=original_thread_name, provider=cur_provider.name) + threading.currentThread().name = '{thread} :: [{provider}]'.format(thread=original_thread_name, + provider=cur_provider.name) logger.log('Searching for any new PROPER releases from {provider}'.format (provider=cur_provider.name)) @@ -158,12 +157,14 @@ def _get_proper_results(self): # pylint: disable=too-many-locals, too-many-bran (provider=cur_provider.name, error=ex(e)), logger.DEBUG) continue except requests_exceptions.ContentDecodingError as e: - logger.log('Content-Encoding was gzip, but content was not compressed while searching for propers in {provider}, skipping: {error}'.format + logger.log('Content-Encoding was gzip, but content was not compressed while' + ' searching for propers in {provider}, sipping: {error}'.format (provider=cur_provider.name, error=ex(e)), logger.DEBUG) continue except Exception as e: - if 'ECONNRESET' in e or (hasattr(e, 'errno') and e.errno == errno.ECONNRESET): - logger.log('Connection reset by peer while searching for propers in {provider}, skipping: {error}'.format + if 'ECONNRESET' in e or getattr(e, 'errno') == errno.ECONNRESET: + logger.log('Connection reset by peer while searching for propers in {provider}. ' + 'Skipping: {error}'.format (provider=cur_provider.name, error=ex(e)), logger.DEBUG) else: logger.log('Unknown exception while searching for propers in {provider}, skipping: {error}'.format @@ -186,12 +187,16 @@ def _get_proper_results(self): # pylint: disable=too-many-locals, too-many-bran sorted_propers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) final_propers = [] - # Keep only last 100 items of processed propers: - self.processed_propers = self.processed_propers[-100:] + # Keep only items from last PROPER_SEARCH_DAYS setting in processed propers: + latest_proper = datetime.datetime.now() - datetime.timedelta(days=app.PROPERS_SEARCH_DAYS) + self.processed_propers = [p for p in self.processed_propers if p.get('date') >= latest_proper] + + # Get proper names from processed propers + processed_propers_names = [proper.get('name') for proper in self.processed_propers if proper.get('name')] for cur_proper in sorted_propers: - if cur_proper.name in self.processed_propers: + if not self.ignore_processed_propers and cur_proper.name in processed_propers_names: logger.log(u'Proper already processed. Skipping: {0}'.format(cur_proper.name), logger.DEBUG) continue @@ -211,13 +216,15 @@ def _get_proper_results(self): # pylint: disable=too-many-locals, too-many-bran if not parse_result.series_name: logger.log('Ignoring invalid show: {name}'.format (name=cur_proper.name), logger.DEBUG) - self.processed_propers.append(cur_proper.name) + if cur_proper.name not in processed_propers_names: + self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date}) continue if not parse_result.episode_numbers: logger.log('Ignoring full season instead of episode: {name}'.format (name=cur_proper.name), logger.DEBUG) - self.processed_propers.append(cur_proper.name) + if cur_proper.name not in processed_propers_names: + self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date}) continue logger.log('Successful match! Matched {original_name} to show {new_name}'.format @@ -244,7 +251,8 @@ def _get_proper_results(self): # pylint: disable=too-many-locals, too-many-bran if not best_result: logger.log('Rejected proper due to release filters: {name}'.format (name=cur_proper.name)) - self.processed_propers.append(cur_proper.name) + if cur_proper.name not in processed_propers_names: + self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date}) continue # only get anime proper if it has release group and version @@ -252,7 +260,8 @@ def _get_proper_results(self): # pylint: disable=too-many-locals, too-many-bran if not best_result.release_group and best_result.version == -1: logger.log('Ignoring proper without release group and version: {name}'.format (name=best_result.name)) - self.processed_propers.append(cur_proper.name) + if cur_proper.name not in processed_propers_names: + self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date}) continue # check if we have the episode as DOWNLOADED @@ -269,7 +278,8 @@ def _get_proper_results(self): # pylint: disable=too-many-locals, too-many-bran _, old_quality = Quality.split_composite_status(int(sql_results[0][b'status'])) if old_quality != best_result.quality: logger.log('Ignoring proper because quality is different: {name}'.format(name=best_result.name)) - self.processed_propers.append(cur_proper.name) + if cur_proper.name not in processed_propers_names: + self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date}) continue # only keep the proper if we have already downloaded an episode with the same codec @@ -279,7 +289,8 @@ def _get_proper_results(self): # pylint: disable=too-many-locals, too-many-bran # Ignore proper if codec differs from downloaded release codec if all([current_codec, parse_result.video_codec, parse_result.video_codec != current_codec]): logger.log('Ignoring proper because codec is different: {name}'.format(name=best_result.name)) - self.processed_propers.append(cur_proper.name) + if cur_proper.name not in processed_propers_names: + self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date}) continue else: logger.log("Coudn't find a release name in database. Skipping codec comparison for: {name}".format @@ -301,22 +312,26 @@ def _get_proper_results(self): # pylint: disable=too-many-locals, too-many-bran else: logger.log('Ignoring proper with the same or lower version: {name}'.format (name=best_result.name)) - self.processed_propers.append(cur_proper.name) + if cur_proper.name not in processed_propers_names: + self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date}) continue if old_release_group != best_result.release_group: logger.log('Ignoring proper from release group {new} instead of current group {old}'.format (new=best_result.release_group, old=old_release_group)) - self.processed_propers.append(cur_proper.name) + if cur_proper.name not in processed_propers_names: + self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date}) continue - # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers - if best_result.indexerid != -1 and (best_result.indexerid, best_result.season, best_result.episode) not in map( - operator.attrgetter('indexerid', 'season', 'episode'), final_propers): + # if the show is in our list and there hasn't been a proper already added for that particular episode + # then add it to our list of propers + if best_result.indexerid != -1 and (best_result.indexerid, best_result.season, best_result.episode) not in \ + map(operator.attrgetter('indexerid', 'season', 'episode'), final_propers): logger.log('Found a desired proper: {name}'.format(name=best_result.name)) final_propers.append(best_result) - self.processed_propers.append(cur_proper.name) + if cur_proper.name not in processed_propers_names: + self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date}) return final_propers @@ -393,8 +408,7 @@ def _sanitize_name(name): @staticmethod def _set_last_proper_search(when): - """ - Record last propersearch in DB. + """Record last propersearch in DB. :param when: When was the last proper search """ diff --git a/medusa/search/queue.py b/medusa/search/queue.py index 4f57abe5a8..d3c9a3e879 100644 --- a/medusa/search/queue.py +++ b/medusa/search/queue.py @@ -1,35 +1,26 @@ # coding=utf-8 -# Author: Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . + +"""Module with different types of Queue Items for searching and snatching.""" from __future__ import unicode_literals +import logging import threading import time import traceback -from .. import app, common, failed_history, generic_queue, history, logger, providers, ui -from ..helpers import pretty_file_size -from ..search.core import ( +from medusa import app, common, failed_history, generic_queue, history, providers, ui +from medusa.helpers import pretty_file_size +from medusa.logger.adapters.style import BraceAdapter +from medusa.search.core import ( search_for_needed_episodes, search_providers, snatch_episode, ) +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) + search_queue_lock = threading.Lock() BACKLOG_SEARCH = 10 @@ -43,12 +34,16 @@ class SearchQueue(generic_queue.GenericQueue): + """Search queue class.""" + def __init__(self): + """Initialize the class.""" generic_queue.GenericQueue.__init__(self) - self.queue_name = "SEARCHQUEUE" + self.queue_name = 'SEARCHQUEUE' self.force = False def is_in_queue(self, show, segment): + """Check if item is in queue.""" for cur_item in self.queue: if isinstance(cur_item, (BacklogQueueItem, FailedQueueItem, ForcedSearchQueueItem, ManualSnatchQueueItem)) \ @@ -57,28 +52,34 @@ def is_in_queue(self, show, segment): return False def pause_backlog(self): + """Pause the backlog.""" self.min_priority = generic_queue.QueuePriorities.HIGH def unpause_backlog(self): + """Unpause the backlog.""" self.min_priority = 0 def is_backlog_paused(self): + """Check if backlog is paused.""" # backlog priorities are NORMAL, this should be done properly somewhere return self.min_priority >= generic_queue.QueuePriorities.NORMAL def is_backlog_in_progress(self): + """Check is backlog is in progress.""" for cur_item in self.queue + [self.currentItem]: if isinstance(cur_item, BacklogQueueItem): return True return False def is_dailysearch_in_progress(self): + """Check if daily search is in progress.""" for cur_item in self.queue + [self.currentItem]: if isinstance(cur_item, DailySearchQueueItem): return True return False def queue_length(self): + """Get queue lenght.""" length = {'backlog': 0, 'daily': 0} for cur_item in self.queue: if isinstance(cur_item, DailySearchQueueItem): @@ -88,6 +89,7 @@ def queue_length(self): return length def add_item(self, item): + """Add item to queue.""" if isinstance(item, DailySearchQueueItem): # daily searches generic_queue.GenericQueue.add_item(self, item) @@ -96,9 +98,10 @@ def add_item(self, item): and not self.is_in_queue(item.show, item.segment): generic_queue.GenericQueue.add_item(self, item) else: - logger.log(u"Not adding item, it's already in the queue", logger.DEBUG) + log.debug('Item already in the queue, skipping') def force_daily(self): + """Force daily searched.""" if not self.is_dailysearch_in_progress and not self.currentItem.amActive: self.force = True return True @@ -106,26 +109,22 @@ def force_daily(self): class ForcedSearchQueue(generic_queue.GenericQueue): - """Search Queueu used for Forced Search, Failed Search and """ + """Search Queueu used for Forced Search, Failed Search.""" + def __init__(self): """Initialize ForcedSearch Queue.""" generic_queue.GenericQueue.__init__(self) - self.queue_name = "SEARCHQUEUE" + self.queue_name = 'SEARCHQUEUE' def is_in_queue(self, show, segment): - """ - Verify if the show and segment (episode or number of episodes) are scheduled. - """ + """Verify if the show and segment (episode or number of episodes) are scheduled.""" for cur_item in self.queue: if cur_item.show == show and cur_item.segment == segment: return True return False def is_ep_in_queue(self, segment): - """ - Verify if the show and segment (episode or number of episodes) are scheduled in a - ForcedSearchQueueItem or FailedQueueItem. - """ + """Verify if the show and segment (episode or number of episodes) are scheduled.""" for cur_item in self.queue: if isinstance(cur_item, (ForcedSearchQueueItem, FailedQueueItem)) and cur_item.segment == segment: return True @@ -141,6 +140,7 @@ def is_show_in_queue(self, show): def get_all_ep_from_queue(self, show): """ Get QueueItems from the queue if the queue item is scheduled to search for the passed Show. + @param show: Show indexer_id @return: A list of ForcedSearchQueueItem or FailedQueueItem items @@ -155,19 +155,21 @@ def get_all_ep_from_queue(self, show): def is_backlog_paused(self): """ - Verify if the ForcedSearchQueue's min_priority has been changed. This indicates that the - queue has been paused. + Verify if the ForcedSearchQueue's min_priority has been changed. + + This indicates that the queue has been paused. # backlog priorities are NORMAL, this should be done properly somewhere """ return self.min_priority >= generic_queue.QueuePriorities.NORMAL def is_forced_search_in_progress(self): - """Tests of a forced search is currently running, it doesn't check what's in queue.""" + """Test of a forced search is currently running, it doesn't check what's in queue.""" if isinstance(self.currentItem, (ForcedSearchQueueItem, FailedQueueItem)): return True return False def queue_length(self): + """Get queue length.""" length = {'forced_search': 0, 'manual_search': 0, 'failed': 0} for cur_item in self.queue: if isinstance(cur_item, FailedQueueItem): @@ -184,18 +186,21 @@ def add_item(self, item): # manual, snatch and failed searches generic_queue.GenericQueue.add_item(self, item) else: - logger.log(u"Not adding item, it's already in the queue", logger.DEBUG) + log.debug('Item already in the queue, skipping') class SnatchQueue(generic_queue.GenericQueue): - """Queue for queuing ManualSnatchQueueItem objects (snatch jobs)""" + """Queue for queuing ManualSnatchQueueItem objects (snatch jobs).""" + def __init__(self): """Initialize the SnatchQueue object.""" generic_queue.GenericQueue.__init__(self) - self.queue_name = "SNATCHQUEUE" + self.queue_name = 'SNATCHQUEUE' def is_in_queue(self, show, segment): - """Check if the passed show and segment (episode of list of episodes) is in the queue + """ + Check if the passed show and segment (episode of list of episodes) is in the queue. + @param show: show object @param segment: list of episode objects @@ -207,7 +212,9 @@ def is_in_queue(self, show, segment): return False def is_ep_in_queue(self, segment): - """Check if the passed segment (episode of list of episodes) is in the queue + """ + Check if the passed segment (episode of list of episodes) is in the queue. + @param segment: list of episode objects @return: True or False @@ -218,52 +225,69 @@ def is_ep_in_queue(self, segment): return False def queue_length(self): - """Get the length of the current queue + """ + Get the length of the current queue. + @return: length of queue """ return {'manual_snatch': len(self.queue)} def add_item(self, item): - """Add a ManualSnatchQueueItem queue item + """ + Add a ManualSnatchQueueItem queue item. + @param item: ManualSnatchQueueItem gueue object """ if not self.is_in_queue(item.show, item.segment): # backlog searches generic_queue.GenericQueue.add_item(self, item) else: - logger.log(u"Not adding item, it's already in the queue", logger.DEBUG) + log.debug("Not adding item, it's already in the queue") class DailySearchQueueItem(generic_queue.QueueItem): + """Daily searche queue item class.""" + def __init__(self): + """Initialize the class.""" generic_queue.QueueItem.__init__(self, u'Daily Search', DAILY_SEARCH) self.success = None self.started = None def run(self): - """ - Run daily search thread - """ + """Run daily search thread.""" generic_queue.QueueItem.run(self) self.started = True try: - logger.log(u"Beginning daily search for new episodes") + log.info('Beginning daily search for new episodes') found_results = search_for_needed_episodes() if not found_results: - logger.log(u"No needed episodes found") + log.info('No needed episodes found') else: for result in found_results: # just use the first result for now if result.seeders not in (-1, None) and result.leechers not in (-1, None): - logger.log(u"Downloading {0} with {1} seeders and {2} leechers and size {3} from {4}".format - (result.name, result.seeders, result.leechers, pretty_file_size(result.size), - result.provider.name)) + log.info( + 'Downloading {name} with {seeders} seeders and {leechers} leechers' + ' and size {size} from {provider}', { + 'name': result.name, + 'seeders': result.seeders, + 'leechers': result.leechers, + 'size': pretty_file_size(result.size), + 'provider': result.provider.name, + } + ) else: - logger.log(u"Downloading {0} with size: {1} from {2}".format - (result.name, pretty_file_size(result.size), result.provider.name)) + log.info( + 'Downloading {name} with size: {size} from {provider}', { + 'name': result.name, + 'size': pretty_file_size(result.size), + 'provider': result.provider.name, + } + ) self.success = snatch_episode(result) # give the CPU a break @@ -271,7 +295,7 @@ def run(self): except Exception: self.success = False - logger.log(traceback.format_exc(), logger.DEBUG) + log.debug(traceback.format_exc()) if self.success is None: self.success = False @@ -280,8 +304,12 @@ def run(self): class ForcedSearchQueueItem(generic_queue.QueueItem): + """Forced search queue item class.""" + def __init__(self, show, segment, down_cur_quality=False, manual_search=False, manual_search_type='episode'): - """A Queueitem used to queue Forced Searches and Manual Searches + """ + A Queueitem used to queue Forced Searches and Manual Searches. + @param show: A show object @param segment: A list of episode objects. Needs to be passed as list! @param down_cur_quality: Not sure what it's used for. Maybe legacy. @@ -295,7 +323,10 @@ def __init__(self, show, segment, down_cur_quality=False, manual_search=False, m self.priority = generic_queue.QueuePriorities.HIGH # SEARCHQUEUE-MANUAL-12345 # SEARCHQUEUE-FORCED-12345 - self.name = '{0}-{1}'.format(('FORCED', 'MANUAL')[bool(manual_search)], show.indexerid) + self.name = '{search_type}-{indexerid}'.format( + search_type=('FORCED', 'MANUAL')[bool(manual_search)], + indexerid=show.indexerid + ) self.success = None self.started = None @@ -313,10 +344,13 @@ def run(self): self.started = True try: - logger.log(u'Beginning {0} {1}search for: [{2}]'. - format(('forced', 'manual')[bool(self.manual_search)], - ('', 'season pack ')[bool(self.manual_search_type == 'season')], - self.segment[0].pretty_name())) + log.info( + 'Beginning {search_type} {season_pack}search for: {ep}', { + 'search_type': ('forced', 'manual')[bool(self.manual_search)], + 'season_pack': ('', 'season pack ')[bool(self.manual_search_type == 'season')], + 'ep': self.segment[0].pretty_name() + } + ) search_result = search_providers(self.show, self.segment, True, self.down_cur_quality, self.manual_search, self.manual_search_type) @@ -325,13 +359,24 @@ def run(self): for result in search_result: # Just use the first result for now if result.seeders not in (-1, None) and result.leechers not in (-1, None): - logger.log(u'Downloading {0} with {1} seeders and {2} leechers and size {3} from {4}'.format - (result.name, result.seeders, result.leechers, - pretty_file_size(result.size), result.provider.name)) + log.info( + 'Downloading {name} with {seeders} seeders and {leechers} leechers ' + 'and size {size} from {provider}', { + 'name': result.name, + 'seeders': result.seeders, + 'leechers': result.leechers, + 'size': pretty_file_size(result.size), + 'provider': result.provider.name, + } + ) else: - logger.log(u'Downloading {0} with size: {1} from {2}'.format - (result.name, pretty_file_size(result.size), - result.provider.name)) + log.info( + 'Downloading {name} with size: {size} from {provider}', { + 'name': result.name, + 'size': pretty_file_size(result.size), + 'provider': result.provider.name, + } + ) self.success = snatch_episode(result) # Give the CPU a break @@ -342,22 +387,28 @@ def run(self): self.success = True if self.manual_search_type == 'season': - ui.notifications.message('We have found season packs for {0}'.format(self.show.name), + ui.notifications.message('We have found season packs for {show_name}' + .format(show_name=self.show.name), 'These should become visible in the manual select page.') else: - ui.notifications.message('We have found results for {0}'.format(self.segment[0].pretty_name()), + ui.notifications.message('We have found results for {ep}' + .format(ep=self.segment[0].pretty_name()), 'These should become visible in the manual select page.') else: ui.notifications.message('No results were found') - logger.log(u'Unable to find {0} {1}results for: [{2}]'. - format(('forced', 'manual')[bool(self.manual_search)], - ('', 'season pack ')[bool(self.manual_search_type == 'season')], - self.segment[0].pretty_name())) - + log.info( + 'Unable to find {search_type} {season_pack}results for: {ep}', { + 'search_type': ('forced', 'manual')[bool(self.manual_search)], + 'season_pack': ('', 'season pack ')[bool(self.manual_search_type == 'season')], + 'ep': self.segment[0].pretty_name() + } + ) + + # TODO: Remove catch all exception. except Exception: self.success = False - logger.log(traceback.format_exc(), logger.DEBUG) + log.debug(traceback.format_exc()) # Keep a list with the 100 last executed searches fifo(FORCED_SEARCH_HISTORY, self, FORCED_SEARCH_HISTORY_SIZE) @@ -371,6 +422,7 @@ def run(self): class ManualSnatchQueueItem(generic_queue.QueueItem): """ A queue item that can be used to queue the snatch of a search result. + Currently used for the snatchSelection feature. @param show: A show object @@ -380,7 +432,9 @@ class ManualSnatchQueueItem(generic_queue.QueueItem): @return: The run() methods snatches the episode(s) if possible. """ + def __init__(self, show, segment, provider, cached_result): + """Initialize the class.""" generic_queue.QueueItem.__init__(self, u'Manual Search', MANUAL_SEARCH) self.priority = generic_queue.QueuePriorities.HIGH self.name = 'MANUALSNATCH-' + str(show.indexerid) @@ -393,48 +447,61 @@ def __init__(self, show, segment, provider, cached_result): self.cached_result = cached_result def run(self): - """ - Run manual snatch job - """ + """Run manual snatch job.""" generic_queue.QueueItem.run(self) self.started = True - search_result = providers.get_provider_class(self.provider).get_result(self.segment) - search_result.show = self.show - search_result.url = self.cached_result[b'url'] - search_result.quality = int(self.cached_result[b'quality']) - search_result.name = self.cached_result[b'name'] - search_result.size = int(self.cached_result[b'size']) - search_result.seeders = int(self.cached_result[b'seeders']) - search_result.leechers = int(self.cached_result[b'leechers']) - search_result.release_group = self.cached_result[b'release_group'] - search_result.version = int(self.cached_result[b'version']) - search_result.proper_tags = self.cached_result[b'proper_tags'].split('|') if self.cached_result[b'proper_tags'] else '' - search_result.manually_searched = True + result = providers.get_provider_class(self.provider).get_result(self.segment) + result.show = self.show + result.url = self.cached_result[b'url'] + result.quality = int(self.cached_result[b'quality']) + result.name = self.cached_result[b'name'] + result.size = int(self.cached_result[b'size']) + result.seeders = int(self.cached_result[b'seeders']) + result.leechers = int(self.cached_result[b'leechers']) + result.release_group = self.cached_result[b'release_group'] + result.version = int(self.cached_result[b'version']) + result.proper_tags = self.cached_result[b'proper_tags'].split('|') \ + if self.cached_result[b'proper_tags'] else '' + result.manually_searched = True try: - logger.log(u"Beginning to manual snatch release: {0}".format(search_result.name)) - - if search_result: - if search_result.seeders not in (-1, None) and search_result.leechers not in (-1, None): - logger.log(u"Downloading {0} with {1} seeders and {2} leechers and size {3} from {4}".format - (search_result.name, search_result.seeders, search_result.leechers, - pretty_file_size(search_result.size), search_result.provider.name)) + log.info('Beginning to manual snatch release: {name}', + {'name': result.name}) + + if result: + if result.seeders not in (-1, None) and result.leechers not in (-1, None): + log.info( + 'Downloading {name} with {seeders} seeders and {leechers} leechers' + ' and size {size} from {provider}', { + 'name': result.name, + 'seeders': result.seeders, + 'leechers': result.leechers, + 'size': pretty_file_size(result.size), + 'provider': result.provider.name, + } + ) else: - logger.log(u"Downloading {0} with size: {1} from {2}".format - (search_result.name, pretty_file_size(search_result.size), search_result.provider.name)) - self.success = snatch_episode(search_result) + log.info( + 'Downloading {name} with size: {size} from {provider}', { + 'name': result.name, + 'size': pretty_file_size(result.size), + 'provider': result.provider.name, + } + ) + self.success = snatch_episode(result) else: - logger.log(u"Unable to snatch release: {0}".format(search_result.name)) + log.info('Unable to snatch release: {name}', + {'name': result.name}) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) except Exception: self.success = False - logger.log(traceback.format_exc(), logger.DEBUG) + log.debug(traceback.format_exc()) ui.notifications.message('Error while snatching selected result', - "Couldn't snatch the result for {0}".format(search_result.name)) + 'Unable to snatch the result for {name}'.format(name=result.name)) if self.success is None: self.success = False @@ -443,7 +510,10 @@ def run(self): class BacklogQueueItem(generic_queue.QueueItem): + """Backlog queue item class.""" + def __init__(self, show, segment): + """Initialize the class.""" generic_queue.QueueItem.__init__(self, u'Backlog', BACKLOG_SEARCH) self.priority = generic_queue.QueuePriorities.LOW self.name = 'BACKLOG-' + str(show.indexerid) @@ -455,37 +525,50 @@ def __init__(self, show, segment): self.segment = segment def run(self): - """ - Run backlog search thread - """ + """Run backlog search thread.""" generic_queue.QueueItem.run(self) self.started = True if not self.show.paused: try: - logger.log(u"Beginning backlog search for: [" + self.show.name + "]") + log.info('Beginning backlog search for: {name}', + {'name': self.show.name}) search_result = search_providers(self.show, self.segment) if search_result: for result in search_result: # just use the first result for now if result.seeders not in (-1, None) and result.leechers not in (-1, None): - logger.log(u"Downloading {0} with {1} seeders and {2} leechers and size {3} from {4}".format - (result.name, result.seeders, result.leechers, pretty_file_size(result.size), - result.provider.name)) + log.info( + 'Downloading {name} with {seeders} seeders and {leechers} leechers ' + 'and size {size} from {provider}', { + 'name': result.name, + 'seeders': result.seeders, + 'leechers': result.leechers, + 'size': pretty_file_size(result.size), + 'provider': result.provider.name, + } + ) else: - logger.log(u"Downloading {0} with size: {1} from {2}".format - (result.name, pretty_file_size(result.size), result.provider.name)) + log.info( + 'Downloading {name} with size: {size} from {provider}', { + 'name': result.name, + 'size': pretty_file_size(result.size), + 'provider': result.provider.name, + } + ) self.success = snatch_episode(result) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) else: - logger.log(u"No needed episodes found during backlog search for: [" + self.show.name + "]") + log.info('No needed episodes found during backlog search for: {name}', + {'name': self.show.name}) + # TODO: Remove the catch all exception. except Exception: self.success = False - logger.log(traceback.format_exc(), logger.DEBUG) + log.debug(traceback.format_exc()) if self.success is None: self.success = False @@ -494,7 +577,10 @@ def run(self): class FailedQueueItem(generic_queue.QueueItem): + """Failed queue item class.""" + def __init__(self, show, segment, down_cur_quality=False): + """Initialize the class.""" generic_queue.QueueItem.__init__(self, u'Retry', FAILED_SEARCH) self.priority = generic_queue.QueuePriorities.HIGH self.name = 'RETRY-' + str(show.indexerid) @@ -507,16 +593,15 @@ def __init__(self, show, segment, down_cur_quality=False): self.down_cur_quality = down_cur_quality def run(self): - """ - Run failed thread - """ + """Run failed thread.""" generic_queue.QueueItem.run(self) self.started = True try: for ep_obj in self.segment: - logger.log(u"Marking episode as bad: [" + ep_obj.pretty_name() + "]") + log.info('Marking episode as bad: {ep}', + {'ep': ep_obj.pretty_name()}) failed_history.mark_failed(ep_obj) @@ -526,32 +611,47 @@ def run(self): history.log_failed(ep_obj, release, provider) failed_history.revert_episode(ep_obj) - logger.log(u"Beginning failed download search for: [" + ep_obj.pretty_name() + "]") + log.info('Beginning failed download search for: {ep}', + {'ep': ep_obj.pretty_name()}) # If it is wanted, self.down_cur_quality doesnt matter - # if it isnt wanted, we need to make sure to not overwrite the existing ep that we reverted to! + # if it isn't wanted, we need to make sure to not overwrite the existing ep that we reverted to! search_result = search_providers(self.show, self.segment, True) if search_result: for result in search_result: # just use the first result for now if result.seeders not in (-1, None) and result.leechers not in (-1, None): - logger.log(u"Downloading {0} with {1} seeders and {2} leechers and size {3} from {4}".format - (result.name, result.seeders, result.leechers, pretty_file_size(result.size), - result.provider.name)) + log.info( + 'Downloading {name} with {seeders} seeders and {leechers} leechers ' + 'and size {size} from {provider}', { + 'name': result.name, + 'seeders': result.seeders, + 'leechers': result.leechers, + 'size': pretty_file_size(result.size), + 'provider': result.provider.name, + } + ) else: - logger.log(u"Downloading {0} with size: {1} from {2}".format - (result.name, pretty_file_size(result.size), result.provider.name)) + log.info( + 'Downloading {name} with size: {size} from {provider}', { + 'name': result.name, + 'size': pretty_file_size(result.size), + 'provider': result.provider.name, + } + ) self.success = snatch_episode(result) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) else: - logger.log(u"No needed episodes found during failed search for: [" + self.show.name + "]") + log.info('No needed episodes found during failed search for: {name}', + {'name': self.show.name}) + # TODO: Replace the catch all exception with a more specific one. except Exception: self.success = False - logger.log(traceback.format_exc(), logger.DEBUG) + log.info(traceback.format_exc()) # ## Keep a list with the 100 last executed searches fifo(FORCED_SEARCH_HISTORY, self, FORCED_SEARCH_HISTORY_SIZE) @@ -562,7 +662,8 @@ def run(self): self.finish() -def fifo(myList, item, max_size=100): - if len(myList) >= max_size: - myList.pop(0) - myList.append(item) +def fifo(my_list, item, max_size=100): + """Append item to queue and limit it to 100 items.""" + if len(my_list) >= max_size: + my_list.pop(0) + my_list.append(item) diff --git a/medusa/server/api/v1/core.py b/medusa/server/api/v1/core.py index b6d3bc4101..b137dca52f 100644 --- a/medusa/server/api/v1/core.py +++ b/medusa/server/api/v1/core.py @@ -28,37 +28,38 @@ from collections import OrderedDict from datetime import date, datetime -from requests.compat import unquote_plus -from six import iteritems, text_type -from tornado.web import RequestHandler -from .... import ( +from medusa import ( app, classes, db, helpers, image_cache, logger, network_timezones, process_tv, sbdatetime, subtitles, ui, ) -from ....common import ARCHIVED, DOWNLOADED, FAILED, IGNORED, Overview, Quality, SKIPPED, SNATCHED, SNATCHED_PROPER, \ +from medusa.common import ARCHIVED, DOWNLOADED, FAILED, IGNORED, Overview, Quality, SKIPPED, SNATCHED, SNATCHED_PROPER, \ UNAIRED, UNKNOWN, WANTED, \ statusStrings -from ....helper.common import ( +from medusa.helper.common import ( dateFormat, dateTimeFormat, pretty_file_size, sanitize_filename, timeFormat, try_int, ) -from ....helper.exceptions import CantUpdateShowException, ShowDirectoryNotFoundException, ex -from ....helper.quality import get_quality_string -from ....indexers.indexer_api import indexerApi -from ....indexers.indexer_config import INDEXER_TVDBV2 -from ....indexers.indexer_exceptions import IndexerError, IndexerShowIncomplete, IndexerShowNotFound -from ....logger import filter_logline, read_loglines -from ....media.banner import ShowBanner -from ....media.fan_art import ShowFanArt -from ....media.network_logo import ShowNetworkLogo -from ....media.poster import ShowPoster -from ....search.queue import BacklogQueueItem, ForcedSearchQueueItem -from ....show.coming_episodes import ComingEpisodes -from ....show.history import History -from ....show.show import Show -from ....system.restart import Restart -from ....system.shutdown import Shutdown -from ....version_checker import CheckVersion +from medusa.helper.exceptions import CantUpdateShowException, ShowDirectoryNotFoundException, ex +from medusa.helpers.quality import get_quality_string +from medusa.indexers.indexer_api import indexerApi +from medusa.indexers.indexer_config import INDEXER_TVDBV2 +from medusa.indexers.indexer_exceptions import IndexerError, IndexerShowIncomplete, IndexerShowNotFound +from medusa.logger import filter_logline, read_loglines +from medusa.media.banner import ShowBanner +from medusa.media.fan_art import ShowFanArt +from medusa.media.network_logo import ShowNetworkLogo +from medusa.media.poster import ShowPoster +from medusa.search.queue import BacklogQueueItem, ForcedSearchQueueItem +from medusa.show.coming_episodes import ComingEpisodes +from medusa.show.history import History +from medusa.show.show import Show +from medusa.system.restart import Restart +from medusa.system.shutdown import Shutdown +from medusa.version_checker import CheckVersion + +from requests.compat import unquote_plus +from six import iteritems, text_type +from tornado.web import RequestHandler indexer_ids = ["indexerid", "tvdbid", "tvmazeid", "tmdbid"] @@ -1275,9 +1276,10 @@ def run(self): if not self.type: self.type = "manual" - data = process_tv.processDir(self.path, process_method=self.process_method, force=self.force_replace, - is_priority=self.is_priority, delete_on=self.delete_files, failed=self.failed, - proc_type=self.type) + data = process_tv.ProcessResult(self.path, process_method=self.process_method).process( + force=self.force_replace, is_priority=self.is_priority, delete_on=self.delete_files, + failed=self.failed, proc_type=self.type + ) if not self.return_data: data = "" @@ -1617,10 +1619,10 @@ def run(self): indexer_api_params['actors'] = False indexer_api_params['custom_ui'] = classes.AllShowsListUI - t = indexerApi(_indexer).indexer(**indexer_api_params) + indexer_api = indexerApi(_indexer).indexer(**indexer_api_params) try: - api_data = t[str(self.name).encode()] + api_data = indexer_api[str(self.name).encode()] except (IndexerShowNotFound, IndexerShowIncomplete, IndexerError): logger.log(u"API :: Unable to find show with id " + str(self.indexerid), logger.WARNING) continue @@ -1642,10 +1644,10 @@ def run(self): indexer_api_params['actors'] = False - t = indexerApi(_indexer).indexer(**indexer_api_params) + indexer_api = indexerApi(_indexer).indexer(**indexer_api_params) try: - my_show = t[int(self.indexerid)] + my_show = indexer_api[int(self.indexerid)] except (IndexerShowNotFound, IndexerShowIncomplete, IndexerError): logger.log(u"API :: Unable to find show with id " + str(self.indexerid), logger.WARNING) return _responds(RESULT_SUCCESS, {"results": [], "langid": lang_id}) diff --git a/medusa/server/api/v2/alias.py b/medusa/server/api/v2/alias.py new file mode 100644 index 0000000000..eac426368f --- /dev/null +++ b/medusa/server/api/v2/alias.py @@ -0,0 +1,173 @@ +# coding=utf-8 +"""Request handler for alias (scene exceptions).""" + +from medusa import db +from medusa.helper.collections import NonEmptyDict +from medusa.server.api.v2.base import BaseRequestHandler +from medusa.tv.series import SeriesIdentifier +from tornado.escape import json_decode + + +class AliasHandler(BaseRequestHandler): + """Alias request handler.""" + + #: resource name + name = 'alias' + #: identifier + identifier = ('identifier', r'\d+') + #: path param + path_param = ('path_param', r'\w+') + #: allowed HTTP methods + allowed_methods = ('GET', 'POST', 'PUT', 'DELETE') + + def get(self, identifier, path_param): + """Query scene_exception information.""" + cache_db_con = db.DBConnection('cache.db') + sql_base = (b'SELECT ' + b' exception_id, ' + b' indexer, ' + b' indexer_id, ' + b' show_name, ' + b' season, ' + b' custom ' + b'FROM scene_exceptions ') + sql_where = [] + params = [] + + if identifier is not None: + sql_where.append(b'exception_id') + params += [identifier] + else: + series_slug = self.get_query_argument('series', None) + series_identifier = SeriesIdentifier.from_slug(series_slug) + + if series_slug and not series_identifier: + return self._bad_request('Invalid series') + + season = self._parse(self.get_query_argument('season', None)) + exception_type = self.get_query_argument('type', None) + if exception_type and exception_type not in ('local', ): + return self._bad_request('Invalid type') + + if series_identifier: + sql_where.append(b'indexer') + sql_where.append(b'indexer_id') + params += [series_identifier.indexer.id, series_identifier.id] + + if season is not None: + sql_where.append(b'season') + params += [season] + + if exception_type == 'local': + sql_where.append(b'custom') + params += [1] + + if sql_where: + sql_base += b' WHERE ' + b' AND '.join([where + b' = ? ' for where in sql_where]) + + sql_results = cache_db_con.select(sql_base, params) + + data = [] + for item in sql_results: + d = NonEmptyDict() + d['id'] = item[0] + d['series'] = SeriesIdentifier.from_id(item[1], item[2]).slug + d['name'] = item[3] + d['season'] = item[4] if item[4] >= 0 else None + d['type'] = 'local' if item[5] else None + data.append(d) + + if not identifier: + return self._paginate(data, sort='id') + + if not data: + return self._not_found('Alias not found') + + data = data[0] + if path_param: + if path_param not in data: + return self._bad_request('Invalid path parameter') + data = data[path_param] + + return self._ok(data=data) + + def put(self, identifier, **kwargs): + """Update alias information.""" + identifier = self._parse(identifier) + if not identifier: + return self._not_found('Invalid alias id') + + data = json_decode(self.request.body) + + if not data or not all([data.get('id'), data.get('series'), data.get('name'), + data.get('type')]) or data['id'] != identifier: + return self._bad_request('Invalid request body') + + series_identifier = SeriesIdentifier.from_slug(data.get('series')) + if not series_identifier: + return self._bad_request('Invalid series') + + cache_db_con = db.DBConnection('cache.db') + last_changes = cache_db_con.connection.total_changes + cache_db_con.action(b'UPDATE scene_exceptions' + b' set indexer = ?' + b', indexer_id = ?' + b', show_name = ?' + b', season = ?' + b', custom = 1' + b' WHERE exception_id = ?', + [series_identifier.indexer.id, + series_identifier.id, + data['name'], + data.get('season'), + identifier]) + + if cache_db_con.connection.total_changes - last_changes != 1: + return self._not_found('Alias not found') + + return self._no_content() + + def post(self, identifier, **kwargs): + """Add an alias.""" + if identifier is not None: + return self._bad_request('Alias id should not be specified') + + data = json_decode(self.request.body) + + if not data or not all([data.get('series'), data.get('name'), + data.get('type')]) or 'id' in data or data['type'] != 'local': + return self._bad_request('Invalid request body') + + series_identifier = SeriesIdentifier.from_slug(data.get('series')) + if not series_identifier: + return self._bad_request('Invalid series') + + cache_db_con = db.DBConnection('cache.db') + last_changes = cache_db_con.connection.total_changes + cursor = cache_db_con.action(b'INSERT INTO scene_exceptions' + b' (indexer, indexer_id, show_name, season, custom) ' + b' values (?,?,?,?,1)', + [series_identifier.indexer.id, + series_identifier.id, + data['name'], + data.get('season')]) + + if cache_db_con.connection.total_changes - last_changes <= 0: + return self._conflict('Unable to create alias') + + data['id'] = cursor.lastrowid + return self._created(data=data, identifier=data['id']) + + def delete(self, identifier, **kwargs): + """Delete an alias.""" + identifier = self._parse(identifier) + if not identifier: + return self._bad_request('Invalid alias id') + + cache_db_con = db.DBConnection('cache.db') + last_changes = cache_db_con.connection.total_changes + cache_db_con.action(b'DELETE FROM scene_exceptions WHERE exception_id = ?', [identifier]) + if cache_db_con.connection.total_changes - last_changes <= 0: + return self._not_found('Alias not found') + + return self._no_content() diff --git a/medusa/server/api/v2/alias_source.py b/medusa/server/api/v2/alias_source.py new file mode 100644 index 0000000000..12f89213af --- /dev/null +++ b/medusa/server/api/v2/alias_source.py @@ -0,0 +1,92 @@ +# coding=utf-8 +"""Request handler for alias source.""" +from datetime import datetime + +from medusa.scene_exceptions import get_last_refresh, retrieve_exceptions +from medusa.server.api.v2.base import BaseRequestHandler +from tornado.escape import json_decode + + +def find_alias_sources(predicate=None): + """Query the cache table for the last update for every scene exception source.""" + data = [] + mapping = {'local': 'custom_exceptions'} + for identifier in ('local', 'xem', 'anidb'): + if not predicate or predicate(identifier): + last_refresh = get_last_refresh(mapping.get(identifier, identifier))[0]['last_refreshed'] + data.append({'id': identifier, 'lastRefresh': last_refresh}) + + return data + + +class AliasSourceHandler(BaseRequestHandler): + """Alias source request handler.""" + + #: resource name + name = 'alias-source' + #: identifier + identifier = ('identifier', r'\w+') + #: path param + path_param = ('path_param', r'\w+') + #: allowed HTTP methods + allowed_methods = ('GET', ) + + def get(self, identifier, path_param=None): + """Query alias source information. + + :param identifier: source name + :param path_param: + """ + if not identifier: + data = find_alias_sources() + return self._paginate(data, sort='id') + + data = find_alias_sources(predicate=lambda v: v == identifier) + if not data: + return self._not_found('Alias source not found.') + + data = data[0] + if path_param: + if path_param not in data: + return self._bad_request('Invalid path parameter') + data = data[path_param] + + return self._ok(data=data) + + +class AliasSourceOperationHandler(BaseRequestHandler): + """Alias source operation request handler.""" + + #: parent resource handler + parent_handler = AliasSourceHandler + #: resource name + name = 'operation' + #: identifier + identifier = None + #: path param + path_param = None + #: allowed HTTP methods + allowed_methods = ('POST', ) + + def post(self, identifier): + """Refresh all scene exception types.""" + types = { + 'local': 'custom_exceptions', + 'xem': 'xem', + 'anidb': 'anidb', + 'all': None, + } + + if identifier not in types: + return self._not_found('Alias source not found') + + data = json_decode(self.request.body) + if not data or not all([data.get('type')]) and len(data) != 1: + return self._bad_request('Invalid request body') + + if data['type'] == 'REFRESH': + retrieve_exceptions(force=True, exception_type=types[identifier]) + data['creation'] = datetime.utcnow().isoformat()[:-3] + 'Z' + return self._created(data=data) + + return self._bad_request('Operation not supported') diff --git a/medusa/server/api/v2/asset.py b/medusa/server/api/v2/asset.py deleted file mode 100644 index e1b82ea1be..0000000000 --- a/medusa/server/api/v2/asset.py +++ /dev/null @@ -1,36 +0,0 @@ -# coding=utf-8 -"""Request handler for assets.""" - -from .base import BaseRequestHandler -from ....media.banner import ShowBanner -from ....media.fan_art import ShowFanArt -from ....media.network_logo import ShowNetworkLogo -from ....media.poster import ShowPoster - - -class AssetHandler(BaseRequestHandler): - """Asset request handler.""" - - def get(self, asset_group=None, query=None, *args, **kwargs): - """Get an asset.""" - if asset_group == 'show': - # http://localhost:8081/api/v2/asset/show/295519?api_key=xxx&type=banner - asset_type = self.get_argument('type', default='banner') - show_id = query - media = None - media_format = ('normal', 'thumb')[asset_type in ('bannerThumb', 'posterThumb', 'small')] - - if asset_type.lower().startswith('banner'): - media = ShowBanner(show_id, media_format) - elif asset_type.lower().startswith('fanart'): - media = ShowFanArt(show_id, media_format) - elif asset_type.lower().startswith('poster'): - media = ShowPoster(show_id, media_format) - elif asset_type.lower().startswith('network'): - media = ShowNetworkLogo(show_id, media_format) - - if media is not None: - self.set_header('Content-Type', media.get_media_type()) - self.api_finish(stream=media.get_media()) - else: - self.api_finish(status=404, error='Asset or Asset Type Does Not Exist') diff --git a/medusa/server/api/v2/auth.py b/medusa/server/api/v2/auth.py index 77c1feb7e3..f7ec6bef49 100644 --- a/medusa/server/api/v2/auth.py +++ b/medusa/server/api/v2/auth.py @@ -1,24 +1,27 @@ # coding=utf-8 """Request handler for authentication.""" +import logging import random import string import time + import jwt -import tornado +from medusa import app, helpers, notifiers +from medusa.logger.adapters.style import BraceAdapter +from medusa.server.api.v2.base import BaseRequestHandler +from tornado.escape import json_decode -from .base import BaseRequestHandler -from .... import app, helpers, logger, notifiers +log = BraceAdapter(logging.getLogger(__name__)) class AuthHandler(BaseRequestHandler): """Auth request handler.""" - def set_default_headers(self): - """Set default CORS headers.""" - super(AuthHandler, self).set_default_headers() - self.set_header('X-Medusa-Server', app.APP_VERSION) - self.set_header('Access-Control-Allow-Methods', 'POST, OPTIONS') + #: resource name + name = 'authenticate' + #: allowed HTTP methods + allowed_methods = ('POST', ) def prepare(self): """Prepare.""" @@ -28,54 +31,47 @@ def post(self, *args, **kwargs): """Request JWT.""" username = app.WEB_USERNAME password = app.WEB_PASSWORD - submitted_username = '' - submitted_password = '' - submitted_exp = 86400 # 1 day - request_body = {} # If the user hasn't set a username and/or password just let them login - if username.strip() != '' and password.strip() != '': - if self.request.body: - if self.request.headers['content-type'] == 'application/json': - request_body = tornado.escape.json_decode(self.request.body) - else: - self._failed_login(error='Incorrect content-type') - if all(x in request_body for x in ['username', 'password']): - submitted_username = request_body['username'] - submitted_password = request_body['password'] - if 'exp' in request_body: - submitted_exp = request_body['exp'] - else: - self._failed_login(error='No Credentials Provided') - - if username != submitted_username or password != submitted_password: - self._failed_login(error='Invalid credentials') - else: - self._login(submitted_exp) - else: - self._login() + if not username.strip() or not password.strip(): + return self._login() + + if not self.request.body: + return self._failed_login(error='No Credentials Provided') + + if self.request.headers['content-type'] != 'application/json': + return self._failed_login(error='Incorrect content-type') + + request_body = json_decode(self.request.body) + submitted_username = request_body.get('username') + submitted_password = request_body.get('password') + submitted_exp = request_body.get('exp', 86400) + if username != submitted_username or password != submitted_password: + return self._failed_login(error='Invalid credentials') + + self._login(submitted_exp) def _login(self, exp=86400): self.set_header('Content-Type', 'application/jwt') if app.NOTIFY_ON_LOGIN and not helpers.is_ip_private(self.request.remote_ip): notifiers.notify_login(self.request.remote_ip) - logger.log('{user} logged into the API v2'.format(user=app.WEB_USERNAME), logger.INFO) + log.info('{user} logged into the API v2', {'user': app.WEB_USERNAME}) time_now = int(time.time()) - self.api_finish(data=jwt.encode({ + self._ok(data=jwt.encode({ 'iss': 'Medusa ' + app.APP_VERSION, 'iat': time_now, # @TODO: The jti should be saved so we can revoke tokens 'jti': ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(20)), 'exp': time_now + int(exp), - 'scopes': ['show:read', 'show:write'], # @TODO: This should be reaplce with scopes or roles/groups + 'scopes': ['show:read', 'show:write'], # @TODO: This should be replaced with scopes or roles/groups 'username': app.WEB_USERNAME, 'apiKey': app.API_KEY # TODO: This should be replaced with the JWT itself }, app.ENCRYPTION_SECRET, algorithm='HS256')) def _failed_login(self, error=None): - self.api_finish(status=401, error=error) - logger.log('{user} attempted a failed login to the API v2 from IP: {ip}'.format( - user=app.WEB_USERNAME, - ip=self.request.remote_ip - ), logger.WARNING) + self._unauthorized(error=error) + log.warning('{user} attempted a failed login to the API v2 from IP: {ip}', { + 'user': app.WEB_USERNAME, + 'ip': self.request.remote_ip + }) diff --git a/medusa/server/api/v2/base.py b/medusa/server/api/v2/base.py index 74643c8c7e..cf0d155805 100644 --- a/medusa/server/api/v2/base.py +++ b/medusa/server/api/v2/base.py @@ -2,125 +2,283 @@ """Base module for request handlers.""" import base64 +import collections import json import operator import traceback -from datetime import datetime +from datetime import date, datetime from babelfish.language import Language import jwt -from six import text_type +from medusa import app +from six import string_types, text_type +from tornado.httpclient import HTTPError from tornado.web import RequestHandler -from .... import app - class BaseRequestHandler(RequestHandler): """A base class used for shared RequestHandler methods.""" + DEFAULT_ALLOWED_METHODS = ('OPTIONS', ) + + #: resource name + name = None + #: identifier + identifier = None + #: path param + path_param = None + #: allowed HTTP methods + allowed_methods = None + #: parent resource handler + parent_handler = None + def prepare(self): """Check if JWT or API key is provided and valid.""" - if self.request.method != 'OPTIONS': - token = '' - api_key = '' - if self.request.headers.get('Authorization'): - if self.request.headers.get('Authorization').startswith('Bearer'): - try: - token = jwt.decode(self.request.headers.get('Authorization').replace('Bearer ', ''), app.ENCRYPTION_SECRET, algorithms=['HS256']) - except jwt.ExpiredSignatureError: - self.api_finish(status=401, error='Token has expired.') - except jwt.DecodeError: - self.api_finish(status=401, error='Invalid token.') - if self.request.headers.get('Authorization').startswith('Basic'): - auth_decoded = base64.decodestring(self.request.headers.get('Authorization')[6:]) - username, password = auth_decoded.split(':', 2) - if username != app.WEB_USERNAME or password != app.WEB_PASSWORD: - self.api_finish(status=401, error='Invalid user/pass.') - - if self.get_argument('api_key', default='') and self.get_argument('api_key', default='') == app.API_KEY: - api_key = self.get_argument('api_key', default='') - if self.request.headers.get('X-Api-Key') and self.request.headers.get('X-Api-Key') == app.API_KEY: - api_key = self.request.headers.get('X-Api-Key') - if token == '' and api_key == '': - self.api_finish(status=401, error='Invalid token or API key.') + if self.request.method == 'OPTIONS': + return + + api_key = self.get_argument('api_key', default=None) or self.request.headers.get('X-Api-Key') + if api_key and api_key == app.API_KEY: + return + + authorization = self.request.headers.get('Authorization') + if not authorization: + return self._unauthorized('No authorization token.') + + if authorization.startswith('Bearer'): + try: + token = authorization.replace('Bearer ', '') + jwt.decode(token, app.ENCRYPTION_SECRET, algorithms=['HS256']) + except jwt.ExpiredSignatureError: + return self._unauthorized('Token has expired.') + except jwt.DecodeError: + return self._unauthorized('Invalid token.') + elif authorization.startswith('Basic'): + auth_decoded = base64.decodestring(authorization[6:]) + username, password = auth_decoded.split(':', 2) + if username != app.WEB_USERNAME or password != app.WEB_PASSWORD: + return self._unauthorized('Invalid user/pass.') + else: + return self._unauthorized('Invalid token.') def write_error(self, *args, **kwargs): """Only send traceback if app.DEVELOPER is true.""" if app.DEVELOPER and 'exc_info' in kwargs: self.set_header('content-type', 'text/plain') + self.set_status(500) for line in traceback.format_exception(*kwargs["exc_info"]): self.write(line) self.finish() else: - self.api_finish(status=500, error='Internal Server Error') + self._internal_server_error() def options(self, *args, **kwargs): """Options.""" - self.set_status(204) - self.finish() + self._no_content() def set_default_headers(self): """Set default CORS headers.""" + if app.APP_VERSION: + self.set_header('X-Medusa-Server', app.APP_VERSION) self.set_header('Access-Control-Allow-Origin', '*') self.set_header('Access-Control-Allow-Headers', 'Origin, Accept, Authorization, Content-Type,' 'X-Requested-With, X-CSRF-Token, X-Api-Key, X-Medusa-Server') - self.set_header('Access-Control-Allow-Methods', 'GET, OPTIONS') + self.set_header('Access-Control-Allow-Methods', ', '.join(self.DEFAULT_ALLOWED_METHODS + self.allowed_methods)) - def api_finish(self, status=None, error=None, data=None, headers=None, stream=None, **kwargs): + def api_finish(self, status=None, error=None, data=None, headers=None, stream=None, content_type=None, **kwargs): """End the api request writing error or data to http response.""" + content_type = content_type or 'application/json; charset=UTF-8' if headers is not None: for header in headers: self.set_header(header, headers[header]) if error is not None and status is not None: - self.set_header('content-type', 'application/json') self.set_status(status) + self.set_header('content-type', content_type) self.finish({ 'error': error }) else: self.set_status(status or 200) if data is not None: - self.set_header('content-type', 'application/json') - self.finish(json.JSONEncoder(default=json_string_encoder).encode(data)) + self.set_header('content-type', content_type) + self.finish(json.JSONEncoder(default=json_default_encoder).encode(data)) elif stream: # This is mainly for assets + self.set_header('content-type', content_type) self.finish(stream) - elif kwargs: + elif kwargs and 'chunk' in kwargs: + self.set_header('content-type', content_type) self.finish(kwargs) + @classmethod + def _create_base_url(cls, prefix_url, resource_name, *args): + elements = [prefix_url, resource_name] + \ + [r'(?P<{key}>{value})'.format(key=key, value=value) for (key, value) in args] + return '/'.join(elements) + + @classmethod + def create_url(cls, prefix_url, resource_name, *args): + """Create url base on resource name and path params.""" + resource_url = prefix_url + '/' + resource_name + path_params = '' + + for arg in args: + if not arg: + continue + + key, value = arg + q = r'(?:/(?P<{key}>{value}))'.format(key=key, value=value) + if path_params: + path_params = r'(?:{previous}(?:{current}|/?))'.format(previous=path_params, current=q) + else: + path_params = q + + path_params = r'(?:{path}|/?)'.format(path=path_params) + + return resource_url + path_params + '/?$' + + @classmethod + def create_app_handler(cls, base): + """Create app handler tuple: regex, class.""" + if cls.parent_handler: + base = cls._create_base_url(base, cls.parent_handler.name, cls.parent_handler.identifier) + + return cls.create_url(base, cls.name, *(cls.identifier, cls.path_param)), cls + + def _handle_request_exception(self, e): + if isinstance(e, HTTPError): + self.api_finish(e.code, e.message) + else: + super(BaseRequestHandler, self)._handle_request_exception(e) + + def _ok(self, data=None, headers=None, stream=None, content_type=None): + self.api_finish(200, data=data, headers=headers, stream=stream, content_type=content_type) + + def _created(self, data=None, identifier=None): + if identifier is not None: + location = self.request.path + if not location.endswith('/'): + location += '/' + + self.set_header('Location', '{0}{1}'.format(location, identifier)) + self.api_finish(201, data=data) + + def _accepted(self): + self.api_finish(202) + + def _no_content(self): + self.api_finish(204) + + def _bad_request(self, error): + self.api_finish(400, error=error) + + def _unauthorized(self, error): + self.api_finish(401, error=error) + + def _not_found(self, error='Resource not found'): + self.api_finish(404, error=error) + + def _method_not_allowed(self, error): + self.api_finish(405, error=error) + + def _conflict(self, error): + self.api_finish(409, error=error) + + def _internal_server_error(self, error='Internal Server Error'): + self.api_finish(500, error=error) + + def _not_implemented(self): + self.api_finish(501) + + @classmethod + def _raise_bad_request_error(cls, error): + raise HTTPError(400, error) + def _get_sort(self, default): - return self.get_argument('sort', default=default) + values = self.get_argument('sort', default=default) + if values: + results = [] + for value in values.split(','): + reverse = value.startswith('-') + if reverse or value.startswith('+'): + value = value[1:] + + results.append((value, reverse)) - def _get_sort_order(self, default='asc'): - return self.get_argument('sort_order', default=default).lower() + return results def _get_page(self): - return max(1, int(self.get_argument('page', default=1))) + try: + page = int(self.get_argument('page', default=1)) + if page < 1: + self._raise_bad_request_error('Invalid page parameter') + + return page + except ValueError: + self._raise_bad_request_error('Invalid page parameter') def _get_limit(self, default=20, maximum=1000): - return min(max(1, int(self.get_argument('limit', default=default))), maximum) + try: + limit = self._parse(self.get_argument('limit', default=default)) + if limit < 1 or limit > maximum: + self._raise_bad_request_error('Invalid limit parameter') - def _paginate(self, data, sort_property): - arg_sort = self._get_sort(default=sort_property) - arg_sort_order = self._get_sort_order() + return limit + except ValueError: + self._raise_bad_request_error('Invalid limit parameter') + + def _paginate(self, data=None, data_generator=None, sort=None): arg_page = self._get_page() arg_limit = self._get_limit() - results = sorted(data, key=operator.itemgetter(arg_sort), reverse=arg_sort_order == 'desc') - count = len(results) - start = (arg_page - 1) * arg_limit - end = start + arg_limit - results = results[start:end] headers = { - 'X-Pagination-Count': count, 'X-Pagination-Page': arg_page, 'X-Pagination-Limit': arg_limit } - return self.api_finish(data=results, headers=headers) + first_page = arg_page if arg_page > 0 else 1 + previous_page = None if arg_page <= 1 else arg_page - 1 + if data_generator: + results = list(data_generator())[:arg_limit] + next_page = None if len(results) < arg_limit else arg_page + 1 + last_page = None + else: + arg_sort = self._get_sort(default=sort) + start = (arg_page - 1) * arg_limit + end = start + arg_limit + results = data + if arg_sort: + try: + for field, reverse in reversed(arg_sort): + results = sorted(results, key=operator.itemgetter(field), reverse=reverse) + except KeyError: + return self._bad_request('Invalid sort query parameter') + + count = len(results) + headers['X-Pagination-Count'] = count + results = results[start:end] + next_page = None if end > count else arg_page + 1 + last_page = ((count - 1) / arg_limit) + 1 + if last_page <= arg_page: + last_page = None + + links = [] + for rel, page in (('next', next_page), ('last', last_page), + ('first', first_page), ('previous', previous_page)): + if page is None: + continue + + delimiter = '&' if self.request.query_arguments else '?' + link = '<{uri}{delimiter}page={page}&limit={limit}>; rel="{rel}"'.format( + uri=self.request.uri, delimiter=delimiter, page=page, limit=arg_limit, rel=rel) + links.append(link) + + self.set_header('Link', ', '.join(links)) - @staticmethod - def _parse(value, function=int): + return self._ok(data=results, headers=headers) + + @classmethod + def _parse(cls, value, function=int): """Parse value using the specified function. :param value: @@ -129,17 +287,32 @@ def _parse(value, function=int): :return: """ if value is not None: - return function(value) + try: + return function(value) + except ValueError: + cls._raise_bad_request_error('Invalid value {value!r}'.format(value=value)) + + @classmethod + def _parse_boolean(cls, value): + """Parse value using the specified function. + + :param value: + :return: + """ + if isinstance(value, text_type): + return value.lower() == 'true' + + return cls._parse(value, bool) - @staticmethod - def _parse_date(value, fmt='%Y-%m-%d'): + @classmethod + def _parse_date(cls, value, fmt='%Y-%m-%d'): """Parse a date value using the specified format. :param value: :param fmt: :return: """ - return BaseRequestHandler._parse(value, lambda d: datetime.strptime(d, fmt)) + return cls._parse(value, lambda d: datetime.strptime(d, fmt)) class NotFoundHandler(BaseRequestHandler): @@ -149,10 +322,113 @@ def get(self, *args, **kwargs): """Get.""" self.api_finish(status=404) + @classmethod + def create_app_handler(cls, base): + """Capture everything.""" + return r'{base}(/?.*)'.format(base=base), cls -def json_string_encoder(o): + +def json_default_encoder(o): """Convert properties to string.""" if isinstance(o, Language): return getattr(o, 'name') + if isinstance(o, set): + return list(o) + + if isinstance(o, date): + return o.isoformat() + return text_type(o) + + +def iter_nested_items(data, prefix=''): + """Iterate through the dictionary. + + Nested keys are separated with dots. + """ + for key, value in data.items(): + p = prefix + key + if isinstance(value, collections.Mapping): + for inner_key, inner_value in iter_nested_items(value, prefix=p + '.'): + yield inner_key, inner_value + else: + yield p, value + + +def set_nested_value(data, key, value): + """Set nested value to the dictionary.""" + keys = key.split('.') + for k in keys[:-1]: + data = data.setdefault(k, {}) + + data[keys[-1]] = value + + +class PatchField(object): + """Represent a field to be patched.""" + + def __init__(self, target_type, attr, attr_type, + validator=None, converter=None, default_value=None, post_processor=None): + """Constructor.""" + if not hasattr(target_type, attr): + raise ValueError('{0!r} has no attribute {1}'.format(target_type, attr)) + + self.target_type = target_type + self.attr = attr + self.attr_type = attr_type + self.validator = validator or (lambda v: isinstance(v, self.attr_type)) + self.converter = converter or (lambda v: v) + self.default_value = default_value + self.post_processor = post_processor + + def patch(self, target, value): + """Patch the field with the specified value.""" + valid = self.validator(value) + + if not valid and self.default_value is not None: + value = self.default_value + valid = True + + if valid: + setattr(target, self.attr, self.converter(value)) + if self.post_processor: + self.post_processor(value) + return True + + +class StringField(PatchField): + """Patch string fields.""" + + def __init__(self, target_type, attr, validator=None, converter=None, default_value=None, post_processor=None): + """Constructor.""" + super(StringField, self).__init__(target_type, attr, string_types, validator=validator, converter=converter, + default_value=default_value, post_processor=post_processor) + + +class IntegerField(PatchField): + """Patch integer fields.""" + + def __init__(self, target_type, attr, validator=None, converter=None, default_value=None, post_processor=None): + """Constructor.""" + super(IntegerField, self).__init__(target_type, attr, int, validator=validator, converter=converter, + default_value=default_value, post_processor=post_processor) + + +class BooleanField(PatchField): + """Patch boolean fields.""" + + def __init__(self, target_type, attr, validator=None, converter=int, default_value=None, post_processor=None): + """Constructor.""" + super(BooleanField, self).__init__(target_type, attr, bool, validator=validator, converter=converter, + default_value=default_value, post_processor=post_processor) + + +class EnumField(PatchField): + """Patch enumeration fields.""" + + def __init__(self, target_type, attr, enums, attr_type=text_type, + converter=None, default_value=None, post_processor=None): + """Constructor.""" + super(EnumField, self).__init__(target_type, attr, attr_type, validator=lambda v: v in enums, + converter=converter, default_value=default_value, post_processor=post_processor) diff --git a/medusa/server/api/v2/config.py b/medusa/server/api/v2/config.py index 4348ed58c2..1ea9f3131f 100644 --- a/medusa/server/api/v2/config.py +++ b/medusa/server/api/v2/config.py @@ -1,277 +1,213 @@ # coding=utf-8 """Request handler for configuration.""" - +import logging import platform import sys + +from medusa import ( + app, + db, +) +from medusa.helper.collections import NonEmptyDict +from medusa.server.api.v2.base import ( + BaseRequestHandler, + BooleanField, + EnumField, + IntegerField, + StringField, + iter_nested_items, + set_nested_value, +) from six import text_type from tornado.escape import json_decode -from .base import BaseRequestHandler -from .... import app, db, logger +log = logging.getLogger(__name__) + + +def layout_schedule_post_processor(v): + """Calendar layout should sort by date.""" + if v == 'calendar': + app.COMING_EPS_SORT = 'date' class ConfigHandler(BaseRequestHandler): """Config request handler.""" - def set_default_headers(self): - """Set default CORS headers.""" - super(ConfigHandler, self).set_default_headers() - self.set_header('Access-Control-Allow-Methods', 'GET, PATCH, OPTIONS') + #: resource name + name = 'config' + #: identifier + identifier = ('identifier', r'\w+') + #: path param + path_param = ('path_param', r'\w+') + #: allowed HTTP methods + allowed_methods = ('GET', 'PATCH', ) + #: patch mapping + patches = { + 'anonRedirect': StringField(app, 'ANON_REDIRECT'), + 'emby.enabled': BooleanField(app, 'USE_EMBY'), + 'torrents.enabled': BooleanField(app, 'USE_TORRENTS'), + 'torrents.username': StringField(app, 'TORRENT_USERNAME'), + 'torrents.password': StringField(app, 'TORRENT_PASSWORD'), + 'torrents.label': StringField(app, 'TORRENT_LABEL'), + 'torrents.labelAnime': StringField(app, 'TORRENT_LABEL_ANIME'), + 'torrents.verifySSL': BooleanField(app, 'TORRENT_VERIFY_CERT'), + 'torrents.path': BooleanField(app, 'TORRENT_PATH'), + 'selectedRootIndex': IntegerField(app, 'SELECTED_ROOT'), + 'layout.schedule': EnumField(app, 'COMING_EPS_LAYOUT', ('poster', 'banner', 'list', 'calendar'), + default_value='banner', post_processor=layout_schedule_post_processor), + 'layout.history': EnumField(app, 'HISTORY_LAYOUT', ('compact', 'detailed'), default_value='detailed'), + 'layout.home': EnumField(app, 'HOME_LAYOUT', ('poster', 'small', 'banner', 'simple', 'coverflow'), + default_value='poster'), + 'layout.show.allSeasons': BooleanField(app, 'DISPLAY_ALL_SEASONS'), + 'layout.show.specials': BooleanField(app, 'DISPLAY_SHOW_SPECIALS'), + 'theme.name': StringField(app, 'THEME_NAME'), + 'backlogOverview.period': StringField(app, 'BACKLOG_PERIOD'), + 'backlogOverview.status': StringField(app, 'BACKLOG_STATUS'), + } - def get(self, query=''): + def get(self, identifier, path_param=None): """Query general configuration. - :param query: - :type query: str + :param identifier: + :param path_param: + :type path_param: str """ - config_data = { - 'anonRedirect': app.ANON_REDIRECT, - 'animeSplitHome': app.ANIME_SPLIT_HOME, - 'comingEpsSort': app.COMING_EPS_SORT, - 'datePreset': app.DATE_PRESET, - 'fuzzyDating': app.FUZZY_DATING, - 'themeName': app.THEME_NAME, - 'posterSortby': app.POSTER_SORTBY, - 'posterSortdir': app.POSTER_SORTDIR, - 'rootDirs': app.ROOT_DIRS, - 'sortArticle': app.SORT_ARTICLE, - 'timePreset': app.TIME_PRESET, - 'trimZero': app.TRIM_ZERO, - 'fanartBackground': app.FANART_BACKGROUND, - 'fanartBackgroundOpacity': 0 if app.FANART_BACKGROUND_OPACITY is None else float(app.FANART_BACKGROUND_OPACITY), - 'branch': app.BRANCH, - 'commitHash': app.CUR_COMMIT_HASH, - 'release': app.APP_VERSION, - 'sslVersion': app.OPENSSL_VERSION, - 'pythonVersion': sys.version, - 'databaseVersion': { - 'major': app.MAJOR_DB_VERSION, - 'minor': app.MINOR_DB_VERSION - }, - 'os': platform.platform(), - 'locale': '.'.join([text_type(loc or 'Unknown') for loc in app.LOCALE]), - 'localUser': app.OS_USER or 'Unknown', - 'programDir': app.PROG_DIR, - 'configFile': app.CONFIG_FILE, - 'dbFilename': db.dbFilename(), - 'cacheDir': app.CACHE_DIR, - 'logDir': app.LOG_DIR, - 'appArgs': app.MY_ARGS, - 'webRoot': app.WEB_ROOT, - 'githubUrl': app.GITHUB_IO_URL, - 'wikiUrl': app.WIKI_URL, - 'sourceUrl': app.APPLICATION_URL, - 'downloadUrl': app.DOWNLOAD_URL, - 'subtitlesMulti': app.SUBTITLES_MULTI, - 'namingForceFolders': app.NAMING_FORCE_FOLDERS, - 'subtitles': { - 'enabled': bool(app.USE_SUBTITLES) - }, - 'kodi': { - 'enabled': bool(app.USE_KODI and app.KODI_UPDATE_LIBRARY) - }, - 'plex': { - 'server': { - 'enabled': bool(app.USE_PLEX_SERVER), - 'notify': { - 'snatch': bool(app.PLEX_NOTIFY_ONSNATCH), - 'download': bool(app.PLEX_NOTIFY_ONDOWNLOAD), - 'subtitleDownload': bool(app.PLEX_NOTIFY_ONSUBTITLEDOWNLOAD) - }, - 'updateLibrary': bool(app.PLEX_UPDATE_LIBRARY), - 'host': app.PLEX_SERVER_HOST, - 'token': app.PLEX_SERVER_TOKEN, - 'username': app.PLEX_SERVER_USERNAME, - 'password': app.PLEX_SERVER_PASSWORD - }, - 'client': { - 'enabled': bool(app.USE_PLEX_CLIENT), - 'username': app.PLEX_CLIENT_USERNAME, - 'password': app.PLEX_CLIENT_PASSWORD, - 'host': app.PLEX_CLIENT_HOST - } - }, - 'emby': { - 'enabled': bool(app.USE_EMBY) - }, - 'torrents': { - 'enabled': bool(app.USE_TORRENTS), - 'method': app.TORRENT_METHOD, - 'username': app.TORRENT_USERNAME, - 'password': app.TORRENT_PASSWORD, - 'label': app.TORRENT_LABEL, - 'labelAnime': app.TORRENT_LABEL_ANIME, - 'verifySSL': app.TORRENT_VERIFY_CERT, - 'path': app.TORRENT_PATH, - 'seedTime': app.TORRENT_SEED_TIME, - 'paused': app.TORRENT_PAUSED, - 'highBandwidth': app.TORRENT_HIGH_BANDWIDTH, - 'host': app.TORRENT_HOST, - 'rpcurl': app.TORRENT_RPCURL, - 'authType': app.TORRENT_AUTH_TYPE - }, - 'nzb': { - 'enabled': bool(app.USE_NZBS), - 'username': app.NZBGET_USERNAME, - 'password': app.NZBGET_PASSWORD, - # app.NZBGET_CATEGORY - # app.NZBGET_CATEGORY_BACKLOG - # app.NZBGET_CATEGORY_ANIME - # app.NZBGET_CATEGORY_ANIME_BACKLOG - 'host': app.NZBGET_HOST, - 'priority': app.NZBGET_PRIORITY - }, - 'layout': { - 'schedule': app.COMING_EPS_LAYOUT, - 'history': app.HISTORY_LAYOUT, - 'home': app.HOME_LAYOUT, - 'show': { - 'allSeasons': bool(app.DISPLAY_ALL_SEASONS), - 'specials': bool(app.DISPLAY_SHOW_SPECIALS) - } - } - } + if identifier and identifier != 'main': + return self._not_found('Config not found') + + config_data = NonEmptyDict() + config_data['anonRedirect'] = app.ANON_REDIRECT + config_data['animeSplitHome'] = app.ANIME_SPLIT_HOME + config_data['comingEpsSort'] = app.COMING_EPS_SORT + config_data['datePreset'] = app.DATE_PRESET + config_data['fuzzyDating'] = app.FUZZY_DATING + config_data['themeName'] = app.THEME_NAME + config_data['posterSortby'] = app.POSTER_SORTBY + config_data['posterSortdir'] = app.POSTER_SORTDIR + config_data['rootDirs'] = app.ROOT_DIRS + config_data['sortArticle'] = app.SORT_ARTICLE + config_data['timePreset'] = app.TIME_PRESET + config_data['trimZero'] = app.TRIM_ZERO + config_data['fanartBackground'] = app.FANART_BACKGROUND + config_data['fanartBackgroundOpacity'] = float(app.FANART_BACKGROUND_OPACITY or 0) + config_data['branch'] = app.BRANCH + config_data['commitHash'] = app.CUR_COMMIT_HASH + config_data['release'] = app.APP_VERSION + config_data['sslVersion'] = app.OPENSSL_VERSION + config_data['pythonVersion'] = sys.version + config_data['databaseVersion'] = NonEmptyDict() + config_data['databaseVersion']['major'] = app.MAJOR_DB_VERSION + config_data['databaseVersion']['minor'] = app.MINOR_DB_VERSION + config_data['os'] = platform.platform() + config_data['locale'] = '.'.join([text_type(loc or 'Unknown') for loc in app.LOCALE]) + config_data['localUser'] = app.OS_USER or 'Unknown' + config_data['programDir'] = app.PROG_DIR + config_data['configFile'] = app.CONFIG_FILE + config_data['dbFilename'] = db.dbFilename() + config_data['cacheDir'] = app.CACHE_DIR + config_data['logDir'] = app.LOG_DIR + config_data['appArgs'] = app.MY_ARGS + config_data['webRoot'] = app.WEB_ROOT + config_data['githubUrl'] = app.GITHUB_IO_URL + config_data['wikiUrl'] = app.WIKI_URL + config_data['sourceUrl'] = app.APPLICATION_URL + config_data['downloadUrl'] = app.DOWNLOAD_URL + config_data['subtitlesMulti'] = app.SUBTITLES_MULTI + config_data['namingForceFolders'] = app.NAMING_FORCE_FOLDERS + config_data['subtitles'] = NonEmptyDict() + config_data['subtitles']['enabled'] = bool(app.USE_SUBTITLES) + config_data['kodi'] = NonEmptyDict() + config_data['kodi']['enabled'] = bool(app.USE_KODI and app.KODI_UPDATE_LIBRARY) + config_data['plex'] = NonEmptyDict() + config_data['plex']['server'] = NonEmptyDict() + config_data['plex']['server']['enabled'] = bool(app.USE_PLEX_SERVER) + config_data['plex']['server']['notify'] = NonEmptyDict() + config_data['plex']['server']['notify']['snatch'] = bool(app.PLEX_NOTIFY_ONSNATCH) + config_data['plex']['server']['notify']['download'] = bool(app.PLEX_NOTIFY_ONDOWNLOAD) + config_data['plex']['server']['notify']['subtitleDownload'] = bool(app.PLEX_NOTIFY_ONSUBTITLEDOWNLOAD) + + config_data['plex']['server']['updateLibrary'] = bool(app.PLEX_UPDATE_LIBRARY) + config_data['plex']['server']['host'] = app.PLEX_SERVER_HOST + config_data['plex']['server']['token'] = app.PLEX_SERVER_TOKEN + config_data['plex']['server']['username'] = app.PLEX_SERVER_USERNAME + config_data['plex']['server']['password'] = app.PLEX_SERVER_PASSWORD + config_data['plex']['client'] = NonEmptyDict() + config_data['plex']['client']['enabled'] = bool(app.USE_PLEX_CLIENT) + config_data['plex']['client']['username'] = app.PLEX_CLIENT_USERNAME + config_data['plex']['client']['password'] = app.PLEX_CLIENT_PASSWORD + config_data['plex']['client']['host'] = app.PLEX_CLIENT_HOST + config_data['emby'] = NonEmptyDict() + config_data['emby']['enabled'] = bool(app.USE_EMBY) + config_data['torrents'] = NonEmptyDict() + config_data['torrents']['enabled'] = bool(app.USE_TORRENTS) + config_data['torrents']['method'] = app.TORRENT_METHOD + config_data['torrents']['username'] = app.TORRENT_USERNAME + config_data['torrents']['password'] = app.TORRENT_PASSWORD + config_data['torrents']['label'] = app.TORRENT_LABEL + config_data['torrents']['labelAnime'] = app.TORRENT_LABEL_ANIME + config_data['torrents']['verifySSL'] = app.TORRENT_VERIFY_CERT + config_data['torrents']['path'] = app.TORRENT_PATH + config_data['torrents']['seedTime'] = app.TORRENT_SEED_TIME + config_data['torrents']['paused'] = app.TORRENT_PAUSED + config_data['torrents']['highBandwidth'] = app.TORRENT_HIGH_BANDWIDTH + config_data['torrents']['host'] = app.TORRENT_HOST + config_data['torrents']['rpcurl'] = app.TORRENT_RPCURL + config_data['torrents']['authType'] = app.TORRENT_AUTH_TYPE + config_data['nzb'] = NonEmptyDict() + config_data['nzb']['enabled'] = bool(app.USE_NZBS) + config_data['nzb']['username'] = app.NZBGET_USERNAME + config_data['nzb']['password'] = app.NZBGET_PASSWORD + # app.NZBGET_CATEGORY + # app.NZBGET_CATEGORY_BACKLOG + # app.NZBGET_CATEGORY_ANIME + # app.NZBGET_CATEGORY_ANIME_BACKLOG + config_data['nzb']['host'] = app.NZBGET_HOST + config_data['nzb']['priority'] = app.NZBGET_PRIORITY + config_data['layout'] = NonEmptyDict() + config_data['layout']['schedule'] = app.COMING_EPS_LAYOUT + config_data['layout']['history'] = app.HISTORY_LAYOUT + config_data['layout']['home'] = app.HOME_LAYOUT + config_data['layout']['show'] = NonEmptyDict() + config_data['layout']['show']['allSeasons'] = bool(app.DISPLAY_ALL_SEASONS) + config_data['layout']['show']['specials'] = bool(app.DISPLAY_SHOW_SPECIALS) + config_data['selectedRootIndex'] = int(app.SELECTED_ROOT) if app.SELECTED_ROOT else None + config_data['backlogOverview'] = NonEmptyDict() + config_data['backlogOverview']['period'] = app.BACKLOG_PERIOD + config_data['backlogOverview']['status'] = app.BACKLOG_STATUS + + if not identifier: + return self._paginate([config_data]) - if query and query not in config_data: - return self.api_finish(status=404, error='{key} not found'.format(key=query)) + if path_param: + if path_param not in config_data: + return self._bad_request('{key} is a invalid path'.format(key=path_param)) - self.api_finish(data=config_data[query] if query else config_data) + config_data = config_data[path_param] - def patch(self, *args, **kwargs): + return self._ok(data=config_data) + + def patch(self, identifier, *args, **kwargs): """Patch general configuration.""" + if not identifier: + return self._bad_request('Config identifier not specified') + + if identifier != 'main': + return self._not_found('Config not found') + data = json_decode(self.request.body) - done_data = {} - done_errors = [] - for key in data.keys(): - if key == 'anonRedirect': - app.ANON_REDIRECT = data['anonRedirect'] - done_data.setdefault('anonRedirect', app.ANON_REDIRECT) - # 'animeSplitHome': app.ANIME_SPLIT_HOME, - # 'comingEpsSort': app.COMING_EPS_SORT, - # 'datePreset': app.DATE_PRESET, - # 'fuzzyDating': app.FUZZY_DATING, - # 'themeName': app.THEME_NAME, - # 'posterSortby': app.POSTER_SORTBY, - # 'posterSortdir': app.POSTER_SORTDIR, - # 'rootDirs': app.ROOT_DIRS, - # 'sortArticle': app.SORT_ARTICLE, - # 'timePreset': app.TIME_PRESET, - # 'trimZero': app.TRIM_ZERO, - # 'fanartBackground': app.FANART_BACKGROUND, - # 'fanartBackgroundOpacity': app.FANART_BACKGROUND_OPACITY, - # 'branch': app.BRANCH, # @TODO: If branch change we should checkout new branch and if success return 200 otherwise return error - if key in ['commitHash', 'release', 'sslVersion', 'pythonVersion', 'databaseVersion', 'os', 'locale', 'localUser', ]: - # This is for fields that are static within the API - # For example you shouldn't be able to change the OS - done_errors.append(key) - # 'programDir': app.PROG_DIR, - # 'configFile': app.CONFIG_FILE, - # 'dbFilename': db.dbFilename(), - # 'cacheDir': app.CACHE_DIR, - # 'logDir': app.LOG_DIR, - # 'appArgs': app.MY_ARGS, - # 'webRoot': app.WEB_ROOT, - # 'githubUrl': app.GITHUB_IO_URL, - # 'wikiUrl': app.WIKI_URL, - # 'sourceUrl': app.APPLICATION_URL, - # 'downloadUrl': app.DOWNLOAD_URL, - # 'subtitlesMulti': app.SUBTITLES_MULTI, - # 'namingForceFolders': app.NAMING_FORCE_FOLDERS, - # 'subtitles': { - # 'enabled': bool(app.USE_SUBTITLES) - # }, - # 'kodi': { - # 'enabled': bool(app.USE_KODI and app.KODI_UPDATE_LIBRARY) - # }, - # 'plex': { - # 'server': { - # 'enabled': bool(app.USE_PLEX_SERVER and app.PLEX_UPDATE_LIBRARY) - # }, - # 'client': { - # 'enabled': False # Replace this with plex client code - # } - # }, - if key == 'emby': - done_data.setdefault('emby', {}) - if 'enabled' in data['emby'] and str(data['emby']['enabled']).lower() in ['true', 'false']: - # @TODO: All booleans should be saved as booleans - app.USE_EMBY = int(data['emby']['enabled']) - done_data['emby'].setdefault('enabled', bool(app.USE_EMBY)) - if key == 'torrents': - done_data.setdefault('torrents', {}) - if 'enabled' in data['torrents'] and str(data['torrents']['enabled']).lower() in ['true', 'false']: - # @TODO: All booleans should be saved as booleans - app.USE_TORRENTS = int(data['torrents']['enabled']) - done_data['torrents'].setdefault('enabled', bool(app.USE_TORRENTS)) - if 'username' in data['torrents']: - app.TORRENT_USERNAME = str(data['torrents']['username']) - done_data['torrents'].setdefault('username', app.TORRENT_USERNAME) - if 'password' in data['torrents']: - app.TORRENT_PASSWORD = str(data['torrents']['password']) - done_data['torrents'].setdefault('password', app.TORRENT_PASSWORD) - if 'label' in data['torrents']: - app.TORRENT_LABEL = str(data['torrents']['label']) - done_data['torrents'].setdefault('label', app.TORRENT_LABEL) - if 'labelAnime' in data['torrents']: - app.TORRENT_LABEL_ANIME = str(data['torrents']['labelAnime']) - done_data['torrents'].setdefault('labelAnime', app.TORRENT_LABEL_ANIME) - if 'verifySSL' in data['torrents'] and str(data['torrents']['verifySSL']).lower() in ['true', 'false']: - # @TODO: All booleans should be saved as booleans - app.TORRENT_VERIFY_CERT = int(data['torrents']['verifySSL']) - done_data['torrents'].setdefault('verifySSL', bool(app.TORRENT_VERIFY_CERT)) - if 'path' in data['torrents']: - app.TORRENT_PATH = str(data['torrents']['path']) - done_data['torrents'].setdefault('verifySSL', app.TORRENT_VERIFY_CERT) - # 'path': app.TORRENT_PATH, - # 'seedTime': app.TORRENT_SEED_TIME, - # 'paused': app.TORRENT_PAUSED, - # 'highBandwidth': app.TORRENT_HIGH_BANDWIDTH, - # 'host': app.TORRENT_HOST, - # 'rpcurl': app.TORRENT_RPCURL, - # 'authType': app.TORRENT_AUTH_TYPE - # if 'method' in data['torrents']: - # if 'username' in data['torrents']: - # if 'password' in data['torrents']: - # if 'label' in data['torrents']: - # if 'labelAnime' in data['torrents']: - # if 'verifySSL' in data['torrents']: - # if 'seedTime' in data['torrents']: - # if 'highBandwidth' in data['torrents']: - # if 'host' in data['torrents']: - # if 'rpcurl' in data['torrents']: - # if 'authType' in data['torrents']: - if key == 'layout': - done_data.setdefault('layout', {}) - if 'schedule' in data['layout']: - if data['layout']['schedule'] in ('poster', 'banner', 'list', 'calendar'): - if data['layout']['schedule'] == 'calendar': - app.COMING_EPS_SORT = 'date' - app.COMING_EPS_LAYOUT = data['layout']['schedule'] - else: - app.COMING_EPS_LAYOUT = 'banner' - done_data['layout'].setdefault('schedule', app.COMING_EPS_LAYOUT) - if 'history' in data['layout']: - if data['layout']['history'] in ('compact', 'detailed'): - app.HISTORY_LAYOUT = data['layout']['history'] - else: - app.HISTORY_LAYOUT = 'detailed' - done_data['layout'].setdefault('history', app.HISTORY_LAYOUT) - if 'home' in data['layout']: - if data['layout']['home'] in ('poster', 'small', 'banner', 'simple', 'coverflow'): - app.HOME_LAYOUT = data['layout']['home'] - else: - app.HOME_LAYOUT = 'poster' - done_data['layout'].setdefault('home', app.HOME_LAYOUT) - if 'show' in data['layout']: - done_data['layout'].setdefault('show', {}) - if 'allSeasons' in data['layout']['show'] and str(data['layout']['show']['allSeasons']).lower() in ['true', 'false']: - app.DISPLAY_ALL_SEASONS = int(data['layout']['show']['allSeasons']) - done_data['layout']['show'].setdefault('allSeasons', bool(app.DISPLAY_ALL_SEASONS)) - if 'specials' in data['layout']['show'] and str(data['layout']['show']['specials']).lower() in ['true', 'false']: - app.DISPLAY_SHOW_SPECIALS = int(data['layout']['show']['specials']) - done_data['layout']['show'].setdefault('specials', bool(app.DISPLAY_SHOW_SPECIALS)) + accepted = {} + ignored = {} + + for key, value in iter_nested_items(data): + patch_field = self.patches.get(key) + if patch_field and patch_field.patch(app, value): + set_nested_value(accepted, key, value) + else: + set_nested_value(ignored, key, value) + + if ignored: + log.warning('Config patch ignored %r', ignored) + # Make sure to update the config file after everything is updated app.instance.save_config() - if len(done_errors): - logger.log('Can\'t PATCH [' + ', '.join(done_errors) + '] since ' + ["it's a static field.", "they're static fields."][len(done_errors) > 1]) - self.api_finish(data=done_data) + self._ok(data=accepted) diff --git a/medusa/server/api/v2/episode.py b/medusa/server/api/v2/episode.py new file mode 100644 index 0000000000..2334ef4a04 --- /dev/null +++ b/medusa/server/api/v2/episode.py @@ -0,0 +1,63 @@ +# coding=utf-8 +"""Request handler for series and episodes.""" + +from medusa.server.api.v2.base import BaseRequestHandler +from medusa.server.api.v2.series import SeriesHandler +from medusa.tv.episode import Episode, EpisodeNumber +from medusa.tv.series import Series, SeriesIdentifier + + +class EpisodeHandler(BaseRequestHandler): + """Episodes request handler.""" + + #: parent resource handler + parent_handler = SeriesHandler + #: resource name + name = 'episode' + #: identifier + identifier = ('episode_slug', r'[\w-]+') + #: path param + path_param = ('path_param', r'\w+') + #: allowed HTTP methods + allowed_methods = ('GET', ) + + def get(self, series_slug, episode_slug, path_param): + """Query episode information. + + :param series_slug: series slug. E.g.: tvdb1234 + :param episode_number: + :param path_param: + """ + series_identifier = SeriesIdentifier.from_slug(series_slug) + if not series_identifier: + return self._bad_request('Invalid series slug') + + series = Series.find_by_identifier(series_identifier) + if not series: + return self._not_found('Series not found') + + if not episode_slug: + detailed = self._parse_boolean(self.get_argument('detailed', default=False)) + season = self._parse(self.get_argument('season', None), int) + data = [e.to_json(detailed=detailed) for e in series.get_all_episodes(season=season)] + return self._paginate(data, sort='airDate') + + episode_number = EpisodeNumber.from_slug(episode_slug) + if not episode_number: + return self._bad_request('Invalid episode number') + + episode = Episode.find_by_series_and_episode(series, episode_number) + if not episode: + return self._not_found('Episode not found') + + detailed = self._parse_boolean(self.get_argument('detailed', default=True)) + data = episode.to_json(detailed=detailed) + if path_param: + if path_param == 'metadata': + data = episode.metadata() if episode.is_location_valid() else {} + elif path_param in data: + data = data[path_param] + else: + return self._bad_request("Invalid path parameter'{0}'".format(path_param)) + + return self._ok(data=data) diff --git a/medusa/server/api/v2/log.py b/medusa/server/api/v2/log.py index a4ec38018f..c2bab136e3 100644 --- a/medusa/server/api/v2/log.py +++ b/medusa/server/api/v2/log.py @@ -3,54 +3,59 @@ import json import logging -from .base import BaseRequestHandler -from ....logger import LOGGING_LEVELS, filter_logline, read_loglines +from medusa.logger import LOGGING_LEVELS, filter_logline, read_loglines +from medusa.logger.adapters.style import BraceAdapter +from medusa.server.api.v2.base import BaseRequestHandler -logger = logging.getLogger(__name__) +log = BraceAdapter(logging.getLogger(__name__)) class LogHandler(BaseRequestHandler): """Log request handler.""" - def get(self, log_level): - """Query logs. + #: resource name + name = 'log' + #: identifier + identifier = None + #: allowed HTTP methods + allowed_methods = ('GET', 'POST', ) + + def get(self): + """Query logs.""" + log_level = self.get_argument('level', 'INFO').upper() + if log_level not in LOGGING_LEVELS: + return self._bad_request('Invalid log level') - :param log_level: - :type log_level: str - """ - log_level = log_level or 'INFO' arg_page = self._get_page() arg_limit = self._get_limit() - min_level = LOGGING_LEVELS[log_level.upper()] + min_level = LOGGING_LEVELS[log_level] - data = [line.to_json() for line in read_loglines(max_lines=arg_limit + arg_page, - predicate=lambda l: filter_logline(l, min_level=min_level))] - start = (arg_page - 1) * arg_limit - end = start + arg_limit - data = data[start:end] + def data_generator(): + """Read log lines based on the specified criteria.""" + start = arg_limit * (arg_page - 1) + 1 + for l in read_loglines(start_index=start, max_lines=arg_limit * arg_page, + predicate=lambda li: filter_logline(li, min_level=min_level)): + yield l.to_json() - self.api_finish(data=data, headers={ - 'X-Pagination-Page': arg_page, - 'X-Pagination-Limit': arg_limit - }) + return self._paginate(data_generator=data_generator) - def delete(self, log_level='ERROR'): - """Delete logs. - - :param log_level: - """ - self.api_finish() - - def post(self, log_level): + def post(self): """Create a log line. By definition this method is NOT idempotent. """ data = json.loads(self.request.body) + if not data or not all([data.get('message')]): + return self._bad_request('Invalid request') + + data['level'] = data.get('level', 'INFO').upper() + if data['level'] not in LOGGING_LEVELS: + return self._bad_request('Invalid log level') + message = data['message'] args = data.get('args', []) - kwargs = data.get('kwargs', dict()) - level = LOGGING_LEVELS[data.get('level', 'ERROR').upper()] - logger.log(level, message, exc_info=False, *args, **kwargs) - self.api_finish(status=201) + kwargs = data.get('kwargs', {}) + level = LOGGING_LEVELS[data['level']] + log.log(level, message, exc_info=False, *args, **kwargs) + self._created() diff --git a/medusa/server/api/v2/series.py b/medusa/server/api/v2/series.py new file mode 100644 index 0000000000..851ed265aa --- /dev/null +++ b/medusa/server/api/v2/series.py @@ -0,0 +1,129 @@ +# coding=utf-8 +"""Request handler for series and episodes.""" + +from medusa.server.api.v2.base import BaseRequestHandler +from medusa.tv.series import Series, SeriesIdentifier +from tornado.escape import json_decode + + +class SeriesHandler(BaseRequestHandler): + """Series request handler.""" + + #: resource name + name = 'series' + #: identifier + identifier = ('series_slug', r'\w+') + #: path param + path_param = ('path_param', r'\w+') + #: allowed HTTP methods + allowed_methods = ('GET', 'PATCH', 'DELETE', ) + + def get(self, series_slug, path_param=None): + """Query series information. + + :param series_slug: series slug. E.g.: tvdb1234 + :param path_param: + """ + arg_paused = self._parse_boolean(self.get_argument('paused', default=None)) + + def filter_series(current): + return arg_paused is None or current.paused == arg_paused + + if not series_slug: + detailed = self._parse_boolean(self.get_argument('detailed', default=False)) + data = [s.to_json(detailed=detailed) for s in Series.find_series(predicate=filter_series)] + return self._paginate(data, sort='title') + + identifier = SeriesIdentifier.from_slug(series_slug) + if not identifier: + return self._bad_request('Invalid series slug') + + series = Series.find_by_identifier(identifier, predicate=filter_series) + if not series: + return self._not_found('Series not found') + + detailed = self._parse_boolean(self.get_argument('detailed', default=True)) + data = series.to_json(detailed=detailed) + if path_param: + if path_param not in data: + return self._bad_request("Invalid path parameter'{0}'".format(path_param)) + data = data[path_param] + + return self._ok(data) + + def post(self, series_slug=None, path_param=None): + """Add a new series.""" + if series_slug is not None: + return self._bad_request('Series slug should not be specified') + + data = json_decode(self.request.body) + if not data or 'id' not in data: + return self._bad_request('Invalid series data') + + ids = {k: v for k, v in data['id'].items() if k != 'imdb'} + if len(ids) != 1: + return self._bad_request('Only 1 indexer identifier should be specified') + + identifier = SeriesIdentifier.from_slug('{slug}{id}'.format(slug=ids.keys()[0], id=ids.values()[0])) + if not identifier: + return self._bad_request('Invalid series identifier') + + series = Series.find_by_identifier(identifier) + if series: + return self._conflict('Series already exist added') + + series = Series.from_identifier(identifier) + if not Series.save_series(series): + return self._not_found('Series not found in the specified indexer') + + return self._created(series.to_json(), identifier=identifier.slug) + + def patch(self, series_slug, path_param=None): + """Patch series.""" + if not series_slug: + return self._method_not_allowed('Patching multiple series are not allowed') + + identifier = SeriesIdentifier.from_slug(series_slug) + if not identifier: + return self._bad_request('Invalid series identifier') + + series = Series.find_by_identifier(identifier) + if not series: + return self._not_found('Series not found') + + data = json_decode(self.request.body) + indexer_id = data.get('id', {}).get(identifier.indexer.slug) + if indexer_id is not None and indexer_id != identifier.id: + return self._bad_request('Conflicting series identifier') + + done = {} + for key, value in data.items(): + if key == 'pause': + if value is True: + series.pause() + elif value is False: + series.unpause() + else: + return self._bad_request('Invalid request body: pause') + done[key] = value + + return self._ok(done) + + def delete(self, series_slug, path_param=None): + """Delete the series.""" + if not series_slug: + return self._method_not_allowed('Deleting multiple series are not allowed') + + identifier = SeriesIdentifier.from_slug(series_slug) + if not identifier: + return self._bad_request('Invalid series identifier') + + series = Series.find_by_identifier(identifier) + if not series: + return self._not_found('Series not found') + + remove_files = self._parse_boolean(self.get_argument('remove-files', default=None)) + if not series.delete(remove_files): + return self._conflict('Unable to delete series') + + return self._no_content() diff --git a/medusa/server/api/v2/series_asset.py b/medusa/server/api/v2/series_asset.py new file mode 100644 index 0000000000..e397bc2642 --- /dev/null +++ b/medusa/server/api/v2/series_asset.py @@ -0,0 +1,36 @@ +# coding=utf-8 +"""Request handler for series assets.""" + +from medusa.server.api.v2.base import BaseRequestHandler +from medusa.server.api.v2.series import SeriesHandler +from medusa.tv.series import Series, SeriesIdentifier + + +class SeriesAssetHandler(BaseRequestHandler): + """Series Asset request handler.""" + + #: parent resource handler + parent_handler = SeriesHandler + #: resource name + name = 'asset' + #: identifier + identifier = ('identifier', r'[a-zA-Z]+') + #: allowed HTTP methods + allowed_methods = ('GET', ) + + def get(self, series_slug, identifier, *args, **kwargs): + """Get an asset.""" + series_identifier = SeriesIdentifier.from_slug(series_slug) + if not series_identifier: + return self._bad_request('Invalid series slug') + + series = Series.find_by_identifier(series_identifier) + if not series: + return self._not_found('Series not found') + + asset_type = identifier or 'banner' + asset = series.get_asset(asset_type) + if not asset: + return self._not_found('Asset not found') + + self._ok(stream=asset.get_media(), content_type=asset.get_media_type()) diff --git a/medusa/server/api/v2/series_legacy.py b/medusa/server/api/v2/series_legacy.py new file mode 100644 index 0000000000..83106e749d --- /dev/null +++ b/medusa/server/api/v2/series_legacy.py @@ -0,0 +1,48 @@ +# coding=utf-8 +"""Request handler for series operations.""" + +from medusa.server.api.v2.base import BaseRequestHandler +from medusa.server.api.v2.series import SeriesHandler +from medusa.tv.series import Series, SeriesIdentifier + + +class SeriesLegacyHandler(BaseRequestHandler): + """To be removed/redesigned.""" + + #: parent resource handler + parent_handler = SeriesHandler + #: resource name + name = 'legacy' + #: identifier + identifier = ('identifier', r'\w+') + #: path param + path_param = None + #: allowed HTTP methods + allowed_methods = ('GET', ) + + def get(self, series_slug, identifier): + """Query series information. + + :param series_slug: series slug. E.g.: tvdb1234 + :param identifier: + """ + series_identifier = SeriesIdentifier.from_slug(series_slug) + if not series_identifier: + return self._bad_request('Invalid series slug') + + series = Series.find_by_identifier(series_identifier) + if not series: + return self._not_found('Series not found') + + if identifier == 'backlogged': + # TODO: revisit + allowed_qualities = self._parse(self.get_argument('allowed', default=None), str) + allowed_qualities = map(int, allowed_qualities.split(',')) if allowed_qualities else [] + preferred_qualities = self._parse(self.get_argument('preferred', default=None), str) + preferred_qualities = map(int, preferred_qualities.split(',')) if preferred_qualities else [] + new, existing = series.get_backlogged_episodes(allowed_qualities=allowed_qualities, + preferred_qualities=preferred_qualities) + data = {'new': new, 'existing': existing} + return self._ok(data=data) + + return self._bad_request('Invalid request') diff --git a/medusa/server/api/v2/series_operation.py b/medusa/server/api/v2/series_operation.py new file mode 100644 index 0000000000..7ea640f07a --- /dev/null +++ b/medusa/server/api/v2/series_operation.py @@ -0,0 +1,46 @@ +# coding=utf-8 +"""Request handler for series operations.""" + +from medusa.server.api.v2.base import BaseRequestHandler +from medusa.server.api.v2.series import SeriesHandler +from medusa.tv.series import Series, SeriesIdentifier +from tornado.escape import json_decode + + +class SeriesOperationHandler(BaseRequestHandler): + """Operation request handler for series.""" + + #: parent resource handler + parent_handler = SeriesHandler + #: resource name + name = 'operation' + #: identifier + identifier = None + #: path param + path_param = None + #: allowed HTTP methods + allowed_methods = ('POST', ) + + def post(self, series_slug): + """Query series information. + + :param series_slug: series slug. E.g.: tvdb1234 + """ + series_identifier = SeriesIdentifier.from_slug(series_slug) + if not series_identifier: + return self._bad_request('Invalid series slug') + + series = Series.find_by_identifier(series_identifier) + if not series: + return self._not_found('Series not found') + + data = json_decode(self.request.body) + if not data or not all([data.get('type')]) or len(data) != 1: + return self._bad_request('Invalid request body') + + if data['type'] == 'ARCHIVE_EPISODES': + if series.set_all_episodes_archived(final_status_only=True): + return self._created() + return self._no_content() + + return self._bad_request('Invalid operation') diff --git a/medusa/server/api/v2/show.py b/medusa/server/api/v2/show.py deleted file mode 100644 index 49261df1f6..0000000000 --- a/medusa/server/api/v2/show.py +++ /dev/null @@ -1,197 +0,0 @@ -# coding=utf-8 -"""Request handler for shows.""" - -from tornado.escape import json_decode -from .base import BaseRequestHandler -from .... import app -from ....indexers.indexer_config import indexerConfig, reverse_mappings -from ....show.show import Show -from ....show_queue import ShowQueueActions - - -class EpisodeIdentifier(object): - """Episode Identifier.""" - - def __init__(self, season, episode, absolute_episode, air_date): - """Default constructor.""" - self.season = season - self.episode = episode - self.absolute_episode = absolute_episode - self.air_date = air_date - - def __bool__(self): - """Boolean function.""" - return (self.season or self.episode or self.absolute_episode or self.air_date) is not None - - __nonzero__ = __bool__ - - -class ShowHandler(BaseRequestHandler): - """Shows request handler.""" - - def set_default_headers(self): - """Set default CORS headers.""" - super(ShowHandler, self).set_default_headers() - self.set_header('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE, OPTIONS') - - def get(self, show_indexer, show_id, season, episode, absolute_episode, air_date, query): - """Query show information. - - :param show_indexer: - :param show_id: - :type show_id: str - :param season: - :param episode: - :param absolute_episode: - :param air_date: - :param query: - """ - # @TODO: This should be completely replaced with show_id - indexer_cfg = indexerConfig.get(reverse_mappings.get('{0}_id'.format(show_indexer))) if show_indexer else None - show_indexer = indexer_cfg['id'] if indexer_cfg else None - indexerid = self._parse(show_id) - season = self._parse(season) - episode = self._parse(episode) - absolute_episode = self._parse(absolute_episode) - air_date = self._parse_date(air_date) - - # @TODO: https://github.com/SiCKRAGETV/SiCKRAGE/pull/2558 - - arg_paused = self._parse(self.get_argument('paused', default=None)) - if show_id is not None: - tv_show = Show.find(app.showList, indexerid, show_indexer) - if not self._match(tv_show, arg_paused): - return self.api_finish(status=404, error='Show not found') - - ep_id = EpisodeIdentifier(season, episode, absolute_episode, air_date) - if ep_id or query == 'episodes': - return self._handle_episode(tv_show, ep_id, query) - - return self._handle_detailed_show(tv_show, query) - - data = [s.to_json(detailed=self.get_argument('detailed', default=False)) for s in app.showList if self._match(s, arg_paused)] - return self._paginate(data, 'title') - - @staticmethod - def _match(tv_show, paused): - return tv_show and (paused is None or tv_show.paused == paused) - - def _handle_detailed_show(self, tv_show, query): - if query: - if query == 'backlogged': - allowed_qualities = self._parse(self.get_argument('allowed', default=None), str) - allowed_qualities = map(int, allowed_qualities.split(',')) if allowed_qualities else [] - preferred_qualities = self._parse(self.get_argument('preferred', default=None), str) - preferred_qualities = map(int, preferred_qualities.split(',')) if preferred_qualities else [] - new, existing = tv_show.get_backlogged_episodes(allowed_qualities=allowed_qualities, - preferred_qualities=preferred_qualities) - data = {'new': new, 'existing': existing} - elif query == 'archiveEpisodes': - data = {'archived': 'true' if tv_show.set_all_episodes_archived(final_status_only=True) else 'false'} - elif query == 'queue': - action, message = app.show_queue_scheduler.action.get_queue_action(tv_show) - data = { - 'action': ShowQueueActions.names[action], - 'message': message, - } if action is not None else dict() - elif query in tv_show.to_json(): - data = data[query] - else: - return self.api_finish(status=400, error="Invalid resource path '{0}'".format(query)) - else: - data = tv_show.to_json() - self.api_finish(data=data) - - def _handle_episode(self, tv_show, ep_id, query): - if (ep_id.episode or ep_id.absolute_episode or ep_id.air_date) is not None: - tv_episode = self._find_tv_episode(tv_show=tv_show, ep_id=ep_id) - if not tv_episode: - return self.api_finish(status=404, error='Episode not found') - return self._handle_detailed_episode(tv_episode, query) - - tv_episodes = tv_show.get_all_episodes(season=ep_id.season) - data = [e.to_json(detailed=False) for e in tv_episodes] - - return self._paginate(data, 'airDate') - - @staticmethod - def _find_tv_episode(tv_show, ep_id): - """Find Episode based on specified criteria. - - :param tv_show: - :param ep_id: - :return: - :rtype: medusa.tv.Episode or tuple(int, string) - """ - if ep_id.season is not None and ep_id.episode is not None: - tv_episode = tv_show.get_episode(season=ep_id.season, episode=ep_id.episode, should_cache=False) - elif ep_id.absolute_episode is not None: - tv_episode = tv_show.get_episode(absolute_number=ep_id.absolute_episode, should_cache=False) - elif ep_id.air_date: - tv_episode = tv_show.get_episode(air_date=ep_id.air_date, should_cache=False) - else: - # if this happens then it's a bug! - raise ValueError - - if tv_episode and tv_episode.loaded: - return tv_episode - - def _handle_detailed_episode(self, tv_episode, query): - data = tv_episode.to_json() - if query: - if query == 'metadata': - data = tv_episode.metadata() if tv_episode.is_location_valid() else dict() - elif query in data: - data = data[query] - else: - return self.api_finish(status=400, error="Invalid resource path '{0}'".format(query)) - - return self.api_finish(data=data) - - def put(self, show_id): - """Replace whole show object. - - :param show_id: - :type show_id: str - """ - return self.api_finish() - - def patch(self, show_indexer, show_id, *args, **kwargs): - """Update show object.""" - # @TODO: This should be completely replaced with show_id - indexer_cfg = indexerConfig.get(reverse_mappings.get('{0}_id'.format(show_indexer))) if show_indexer else None - show_indexer = indexer_cfg['id'] if indexer_cfg else None - indexerid = self._parse(show_id) - - if show_id is not None: - tv_show = Show.find(app.showList, indexerid, show_indexer) - print(tv_show) - - data = json_decode(self.request.body) - done_data = {} - done_errors = [] - for key in data.keys(): - if key == 'pause' and str(data['pause']).lower() in ['true', 'false']: - error, _ = Show.pause(indexerid, data['pause']) - if error is not None: - self.api_finish(error=error) - else: - done_data['pause'] = data['pause'] - if len(done_errors): - print('Can\'t PATCH [' + ', '.join(done_errors) + '] since ' + ["it's a static field.", "they're static fields."][len(done_errors) > 1]) - self.api_finish(data=done_data) - else: - return self.api_finish(status=404, error='Show not found') - - def post(self): - """Add a show.""" - return self.api_finish() - - def delete(self, show_id): - """Delete a show. - - :param show_id: - :type show_id: str - """ - error, show = Show.delete(indexer_id=show_id, remove_files=self.get_argument('remove_files', default=False)) - return self.api_finish(error=error, data=show) diff --git a/medusa/server/api/v2/status.py b/medusa/server/api/v2/status.py deleted file mode 100644 index 07b19b03e9..0000000000 --- a/medusa/server/api/v2/status.py +++ /dev/null @@ -1,16 +0,0 @@ -# coding=utf-8 -"""Request handler for server status.""" - -from .base import BaseRequestHandler - - -class StatusHandler(BaseRequestHandler): - """Status request handler.""" - - def get(self, query=''): - """Query server status. - - :param query: - :type query: str - """ - self.api_finish() diff --git a/medusa/server/core.py b/medusa/server/core.py index 0d74a4a2dc..718c39fe20 100644 --- a/medusa/server/core.py +++ b/medusa/server/core.py @@ -5,6 +5,20 @@ import os import threading +from medusa.server.api.v2.alias import AliasHandler +from medusa.server.api.v2.alias_source import ( + AliasSourceHandler, + AliasSourceOperationHandler, +) +from medusa.server.api.v2.auth import AuthHandler +from medusa.server.api.v2.base import NotFoundHandler +from medusa.server.api.v2.config import ConfigHandler +from medusa.server.api.v2.episode import EpisodeHandler +from medusa.server.api.v2.log import LogHandler +from medusa.server.api.v2.series import SeriesHandler +from medusa.server.api.v2.series_asset import SeriesAssetHandler +from medusa.server.api.v2.series_legacy import SeriesLegacyHandler +from medusa.server.api.v2.series_operation import SeriesOperationHandler from tornado.httpserver import HTTPServer from tornado.ioloop import IOLoop from tornado.web import Application, RedirectHandler, StaticFileHandler, url @@ -17,28 +31,39 @@ def get_apiv2_handlers(base): """Return api v2 handlers.""" - from .api.v2.config import ConfigHandler - from .api.v2.log import LogHandler - from .api.v2.show import ShowHandler - from .api.v2.auth import AuthHandler - from .api.v2.asset import AssetHandler - from .api.v2.base import NotFoundHandler - - show_id = r'(?P[a-z]+)(?P\d+)' - # This has to accept season of 1-4 as some seasons are years. For example Formula 1 - ep_id = r'(?:(?:s(?P\d{1,4})(?:e(?P\d{1,2}))?)|(?:e(?P\d{1,3}))|(?P\d{4}\-\d{2}\-\d{2}))' - query = r'(?P[\w]+)' - query_extended = r'(?P[\w \(\)%]+)' # This also accepts the space char, () and % - log_level = r'(?P[a-zA-Z]+)' - asset_group = r'(?P[a-zA-Z0-9]+)' - return [ - (r'{base}/show(?:/{show_id}(?:/{ep_id})?(?:/{query})?)?/?'.format(base=base, show_id=show_id, ep_id=ep_id, query=query), ShowHandler), - (r'{base}/config(?:/{query})?/?'.format(base=base, query=query), ConfigHandler), - (r'{base}/log(?:/{log_level})?/?'.format(base=base, log_level=log_level), LogHandler), - (r'{base}/authenticate(/?)'.format(base=base), AuthHandler), - (r'{base}/asset(?:/{asset_group})(?:/{query})?/?'.format(base=base, asset_group=asset_group, query=query_extended), AssetHandler), - (r'{base}(/?.*)'.format(base=base), NotFoundHandler) + # Order: Most specific to most generic + # /api/v2/series/tvdb1234/episode + EpisodeHandler.create_app_handler(base), + + # /api/v2/series/tvdb1234/operation + SeriesOperationHandler.create_app_handler(base), + # /api/v2/series/tvdb1234/asset + SeriesAssetHandler.create_app_handler(base), + # /api/v2/series/tvdb1234/legacy + SeriesLegacyHandler.create_app_handler(base), # To be removed + # /api/v2/series/tvdb1234 + SeriesHandler.create_app_handler(base), + + # /api/v2/config + ConfigHandler.create_app_handler(base), + + # /api/v2/log + LogHandler.create_app_handler(base), + + # /api/v2/alias-source/xem/operation + AliasSourceOperationHandler.create_app_handler(base), + # /api/v2/alias-source + AliasSourceHandler.create_app_handler(base), + + # /api/v2/alias + AliasHandler.create_app_handler(base), + + # /api/v2/authenticate + AuthHandler.create_app_handler(base), + + # Always keep this last! + NotFoundHandler.create_app_handler(base) ] diff --git a/medusa/server/web/config/general.py b/medusa/server/web/config/general.py index b817d0fb47..f814d5101d 100644 --- a/medusa/server/web/config/general.py +++ b/medusa/server/web/config/general.py @@ -64,8 +64,9 @@ def saveGeneral(self, log_dir=None, log_nr=5, log_size=1, web_port=None, notify_ calendar_unprotected=None, calendar_icons=None, debug=None, ssl_verify=None, no_restart=None, coming_eps_missed_range=None, fuzzy_dating=None, trim_zero=None, date_preset=None, date_preset_na=None, time_preset=None, indexer_timeout=None, download_url=None, rootDir=None, theme_name=None, default_page=None, - git_reset=None, git_reset_branches=None, git_username=None, git_password=None, display_all_seasons=None, subliminal_log=None, - privacy_level='normal', fanart_background=None, fanart_background_opacity=None, dbdebug=None): + git_reset=None, git_reset_branches=None, git_auth_type=0, git_username=None, git_password=None, git_token=None, + display_all_seasons=None, subliminal_log=None, privacy_level='normal', fanart_background=None, fanart_background_opacity=None, + dbdebug=None): results = [] # Misc @@ -91,11 +92,15 @@ def saveGeneral(self, log_dir=None, log_nr=5, log_size=1, web_port=None, notify_ app.ANON_REDIRECT = anon_redirect app.PROXY_SETTING = proxy_setting app.PROXY_INDEXERS = config.checkbox_to_value(proxy_indexers) + app.GIT_AUTH_TYPE = try_int(git_auth_type) app.GIT_USERNAME = git_username app.GIT_PASSWORD = git_password + app.GIT_TOKEN = git_token app.GIT_RESET = config.checkbox_to_value(git_reset) app.GIT_RESET_BRANCHES = helpers.ensure_list(git_reset_branches) - app.GIT_PATH = git_path + if app.GIT_PATH != git_path: + app.GIT_PATH = git_path + config.change_GIT_PATH() app.GIT_REMOTE = git_remote app.CALENDAR_UNPROTECTED = config.checkbox_to_value(calendar_unprotected) app.CALENDAR_ICONS = config.checkbox_to_value(calendar_icons) @@ -129,7 +134,12 @@ def saveGeneral(self, log_dir=None, log_nr=5, log_size=1, web_port=None, notify_ # Validate github credentials try: - github_client.authenticate(app.GIT_USERNAME, app.GIT_PASSWORD) + if app.GIT_AUTH_TYPE == 0: + github_client.authenticate(app.GIT_USERNAME, app.GIT_PASSWORD) + else: + github = github_client.token_authenticate(app.GIT_TOKEN) + if app.GIT_USERNAME and app.GIT_USERNAME != github_client.get_user(gh=github): + app.GIT_USERNAME = github_client.get_user(gh=github) except (GithubException, IOError): logger.log('Error while validating your Github credentials.', logger.WARNING) diff --git a/medusa/server/web/config/search.py b/medusa/server/web/config/search.py index 392cd4351e..64759bd1c4 100644 --- a/medusa/server/web/config/search.py +++ b/medusa/server/web/config/search.py @@ -45,7 +45,8 @@ def saveSearch(self, use_nzbs=None, use_torrents=None, nzb_dir=None, sab_usernam torrent_seed_time=None, torrent_paused=None, torrent_high_bandwidth=None, torrent_rpcurl=None, torrent_auth_type=None, ignore_words=None, torrent_checker_frequency=None, preferred_words=None, undesired_words=None, trackers_list=None, require_words=None, - ignored_subs_list=None, ignore_und_subs=None, cache_trimming=None, max_cache_age=None): + ignored_subs_list=None, ignore_und_subs=None, cache_trimming=None, max_cache_age=None, + torrent_seed_location=None): """ Save Search related settings """ @@ -86,7 +87,7 @@ def saveSearch(self, use_nzbs=None, use_torrents=None, nzb_dir=None, sab_usernam config.change_DOWNLOAD_PROPERS(download_propers) app.PROPERS_SEARCH_DAYS = try_int(propers_search_days, 2) app.REMOVE_FROM_CLIENT = config.checkbox_to_value(remove_from_client) - app.CHECK_PROPERS_INTERVAL = check_propers_interval + config.change_PROPERS_FREQUENCY(check_propers_interval) app.ALLOW_HIGH_PRIORITY = config.checkbox_to_value(allow_high_priority) @@ -125,6 +126,7 @@ def saveSearch(self, use_nzbs=None, use_torrents=None, nzb_dir=None, sab_usernam app.TORRENT_HOST = config.clean_url(torrent_host) app.TORRENT_RPCURL = torrent_rpcurl app.TORRENT_AUTH_TYPE = torrent_auth_type + app.TORRENT_SEED_LOCATION = torrent_seed_location.rstrip('/\\') app.instance.save_config() diff --git a/medusa/server/web/core/error_logs.py b/medusa/server/web/core/error_logs.py index 1ca21fc8d3..036f2daa0d 100644 --- a/medusa/server/web/core/error_logs.py +++ b/medusa/server/web/core/error_logs.py @@ -3,17 +3,22 @@ from __future__ import unicode_literals +import logging from datetime import datetime, timedelta from mako.filters import html_escape + +from medusa import ui +from medusa.classes import ErrorViewer, WarningViewer +from medusa.issue_submitter import IssueSubmitter +from medusa.logger import filter_logline, read_loglines +from medusa.server.web.core.base import PageTemplate, WebRoot +from medusa.version_checker import CheckVersion + from six import text_type from tornroutes import route -from .base import PageTemplate, WebRoot -from .... import logger, ui -from ....classes import ErrorViewer, WarningViewer -from ....issue_submitter import IssueSubmitter -from ....logger import filter_logline, read_loglines -from ....version_checker import CheckVersion + +log = logging.getLogger(__name__) log_name_filters = { None: html_escape(''), @@ -75,31 +80,31 @@ def _create_menu(self, level): { # Clear Errors 'title': 'Clear Errors', 'path': 'errorlogs/clearerrors/', - 'requires': self._has_errors() and level == logger.ERROR, + 'requires': self._has_errors() and level == logging.ERROR, 'icon': 'ui-icon ui-icon-trash' }, { # Clear Warnings 'title': 'Clear Warnings', - 'path': 'errorlogs/clearerrors/?level={level}'.format(level=logger.WARNING), - 'requires': self._has_warnings() and level == logger.WARNING, + 'path': 'errorlogs/clearerrors/?level={level}'.format(level=logging.WARNING), + 'requires': self._has_warnings() and level == logging.WARNING, 'icon': 'ui-icon ui-icon-trash' }, { # Submit Errors 'title': 'Submit Errors', 'path': 'errorlogs/submit_errors/', - 'requires': self._has_errors() and level == logger.ERROR, + 'requires': self._has_errors() and level == logging.ERROR, 'class': 'submiterrors', 'confirm': True, 'icon': 'ui-icon ui-icon-arrowreturnthick-1-n' }, ] - def index(self, level=logger.ERROR, **kwargs): + def index(self, level=logging.ERROR, **kwargs): """Default index page.""" try: level = int(level) except (TypeError, ValueError): - level = logger.ERROR + level = logging.ERROR t = PageTemplate(rh=self, filename='errorlogs.mako') return t.render(header='Logs & Errors', title='Logs & Errors', topmenu='system', @@ -113,17 +118,17 @@ def _has_errors(): def _has_warnings(): return bool(WarningViewer.errors) - def clearerrors(self, level=logger.ERROR): + def clearerrors(self, level=logging.ERROR): """Clear the errors or warnings.""" # @TODO: Replace this with DELETE /api/v2/log/{logLevel} or /api/v2/log/ - if int(level) == logger.WARNING: + if int(level) == logging.WARNING: WarningViewer.clear() else: ErrorViewer.clear() return self.redirect('/errorlogs/viewlog/') - def viewlog(self, min_level=logger.INFO, log_filter=None, log_search=None, max_lines=1000, log_period='one_day', + def viewlog(self, min_level=logging.INFO, log_filter=None, log_search=None, max_lines=1000, log_period='one_day', text_view=None, **kwargs): """View the log given the specified filters.""" # @TODO: Replace index with this or merge it so ?search=true or ?query={queryString} enables this "view" diff --git a/medusa/server/web/home/add_shows.py b/medusa/server/web/home/add_shows.py index 753ca400d5..1686e99500 100644 --- a/medusa/server/web/home/add_shows.py +++ b/medusa/server/web/home/add_shows.py @@ -7,6 +7,8 @@ import os import re +from requests import RequestException + from requests.compat import unquote_plus from simpleanidb import REQUEST_HOT from six import iteritems @@ -76,7 +78,7 @@ def searchIndexersForShowName(search_term, lang=None, indexer=None): l_indexer_api_parms['language'] = lang l_indexer_api_parms['custom_ui'] = classes.AllShowsListUI try: - t = indexerApi(indexer).indexer(**l_indexer_api_parms) + indexer_api = indexerApi(indexer).indexer(**l_indexer_api_parms) except IndexerUnavailable as msg: logger.log(u'Could not initialize Indexer {indexer}: {error}'. format(indexer=indexerApi(indexer).name, error=msg)) @@ -86,11 +88,11 @@ def searchIndexersForShowName(search_term, lang=None, indexer=None): search_terms, indexerApi(indexer).name), logger.DEBUG) for searchTerm in search_terms: try: - indexer_results = t[searchTerm] + indexer_results = indexer_api[searchTerm] # add search results results.setdefault(indexer, []).extend(indexer_results) - except IndexerException as msg: - logger.log(u'Error searching for show: {error}'.format(error=msg)) + except IndexerException as e: + logger.log(u'Error searching for show: {error}'.format(error=e.message)) for i, shows in iteritems(results): final_results.extend({(indexerApi(i).name, i, indexerApi(i).config['show_url'], int(show['id']), @@ -167,17 +169,6 @@ def massAddTable(self, rootDir=None): if not (indexer_id and show_name): (indexer_id, show_name, indexer) = cur_provider.retrieveShowMetadata(cur_path) - # default to TVDB if indexer was not detected - if show_name and not (indexer or indexer_id): - (_, idxr, i) = helpers.search_indexer_for_show_id(show_name, indexer, indexer_id) - - # set indexer and indexer_id from found info - if not indexer and idxr: - indexer = idxr - - if not indexer_id and i: - indexer_id = i - cur_dir['existing_info'] = (indexer_id, show_name, indexer) if indexer_id and Show.find(app.showList, indexer_id): @@ -317,8 +308,7 @@ def popularShows(self): try: recommended_shows = ImdbPopular().fetch_popular_shows() - except Exception as e: - # print traceback.format_exc() + except (RequestException, StandardError) as e: recommended_shows = None return t.render(title="Popular Shows", header="Popular Shows", @@ -402,7 +392,7 @@ def addShowByID(self, indexer_id, show_name=None, indexer="TVDB", which_series=N if Show.find(app.showList, int(indexer_id)): return - # Sanitize the paramater allowed_qualities and preferred_qualities. As these would normally be passed as lists + # Sanitize the parameter allowed_qualities and preferred_qualities. As these would normally be passed as lists if any_qualities: any_qualities = any_qualities.split(',') else: diff --git a/medusa/server/web/home/handler.py b/medusa/server/web/home/handler.py index c777848582..9231ab3740 100644 --- a/medusa/server/web/home/handler.py +++ b/medusa/server/web/home/handler.py @@ -9,42 +9,116 @@ from datetime import date, datetime import adba -from requests.compat import quote_plus, unquote_plus -from six import iteritems -from tornroutes import route -from traktor import MissingTokenException, TokenExpiredException, TraktApi, TraktException -from ..core import PageTemplate, WebRoot -from .... import app, clients, config, db, helpers, logger, notifiers, nzbget, providers, sab, subtitles, ui -from ....black_and_white_list import BlackAndWhiteList, short_group_names -from ....common import (DOWNLOADED, FAILED, IGNORED, Overview, Quality, SKIPPED, - SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, UNAIRED, WANTED, cpu_presets, statusStrings) -from ....failed_history import prepare_failed_name -from ....helper.common import enabled_providers, try_int -from ....helper.exceptions import CantRefreshShowException, CantUpdateShowException, ShowDirectoryNotFoundException, ex -from ....indexers.indexer_api import indexerApi -from ....indexers.indexer_config import INDEXER_TVDBV2 -from ....indexers.indexer_exceptions import IndexerException, IndexerShowNotFoundInLanguage -from ....providers.generic_provider import GenericProvider -from ....sbdatetime import sbdatetime -from ....scene_exceptions import get_all_scene_exceptions, get_scene_exceptions, update_scene_exceptions -from ....scene_numbering import ( - get_scene_absolute_numbering, get_scene_absolute_numbering_for_show, - get_scene_numbering, get_scene_numbering_for_show, - get_xem_absolute_numbering_for_show, get_xem_numbering_for_show, - set_scene_numbering, xem_refresh + +from medusa import ( + app, + config, + db, + helpers, + logger, + notifiers, + providers, + subtitles, + ui, +) +from medusa.black_and_white_list import ( + BlackAndWhiteList, + short_group_names, +) +from medusa.clients import torrent +from medusa.clients.nzb import ( + nzbget, + sab, +) +from medusa.common import ( + DOWNLOADED, + FAILED, + IGNORED, + Overview, + Quality, + SKIPPED, + SNATCHED, + SNATCHED_BEST, + SNATCHED_PROPER, + UNAIRED, + WANTED, + cpu_presets, + statusStrings, +) +from medusa.failed_history import prepare_failed_name +from medusa.helper.common import ( + enabled_providers, + pretty_file_size, + try_int, +) +from medusa.helper.exceptions import ( + CantRefreshShowException, + CantUpdateShowException, + ShowDirectoryNotFoundException, + ex, ) -from ....search.manual import ( - SEARCH_STATUS_FINISHED, SEARCH_STATUS_QUEUED, SEARCH_STATUS_SEARCHING, collect_episodes_from_search_thread, - get_episode, get_provider_cache_results, update_finished_search_queue_item +from medusa.indexers.indexer_api import indexerApi +from medusa.indexers.indexer_config import INDEXER_TVDBV2 +from medusa.indexers.indexer_exceptions import ( + IndexerException, + IndexerShowNotFoundInLanguage, ) -from ....search.queue import ( - BacklogQueueItem, FailedQueueItem, ForcedSearchQueueItem, ManualSnatchQueueItem +from medusa.providers.generic_provider import GenericProvider +from medusa.sbdatetime import sbdatetime +from medusa.scene_exceptions import ( + get_all_scene_exceptions, + get_scene_exceptions, + update_scene_exceptions, +) +from medusa.scene_numbering import ( + get_scene_absolute_numbering, + get_scene_absolute_numbering_for_show, + get_scene_numbering, + get_scene_numbering_for_show, + get_xem_absolute_numbering_for_show, + get_xem_numbering_for_show, + set_scene_numbering, + xem_refresh, +) +from medusa.search.manual import ( + SEARCH_STATUS_FINISHED, + SEARCH_STATUS_QUEUED, + SEARCH_STATUS_SEARCHING, + collect_episodes_from_search_thread, + get_episode, + get_provider_cache_results, + update_finished_search_queue_item, +) +from medusa.search.queue import ( + BacklogQueueItem, + FailedQueueItem, + ForcedSearchQueueItem, + ManualSnatchQueueItem, +) +from medusa.server.web.core import ( + PageTemplate, + WebRoot, +) +from medusa.show.history import History +from medusa.show.show import Show +from medusa.system.restart import Restart +from medusa.system.shutdown import Shutdown +from medusa.version_checker import CheckVersion + +from requests.compat import ( + quote_plus, + unquote_plus, +) +from six import iteritems + +from tornroutes import route + +from traktor import ( + MissingTokenException, + TokenExpiredException, + TraktApi, + TraktException, ) -from ....show.history import History -from ....show.show import Show -from ....system.restart import Restart -from ....system.shutdown import Shutdown -from ....version_checker import CheckVersion @route('/home(/?.*)') @@ -58,17 +132,30 @@ def _genericMessage(self, subject, message): def index(self): t = PageTemplate(rh=self, filename='home.mako') + selected_root = int(app.SELECTED_ROOT) + shows_dir = None + if selected_root is not None and app.ROOT_DIRS: + backend_pieces = app.ROOT_DIRS.split('|') + backend_dirs = backend_pieces[1:] + shows_dir = backend_dirs[selected_root] if selected_root != -1 else None + + shows = [] if app.ANIME_SPLIT_HOME: - shows = [] anime = [] for show in app.showList: + if shows_dir and not show._location.startswith(shows_dir): + continue if show.is_anime: anime.append(show) else: shows.append(show) show_lists = [['Shows', shows], ['Anime', anime]] else: - show_lists = [['Shows', app.showList]] + for show in app.showList: + if shows_dir and not show._location.startswith(shows_dir): + continue + shows.append(show) + show_lists = [['Shows', shows]] stats = self.show_statistics() return t.render(title='Home', header='Show List', topmenu='home', show_lists=show_lists, show_stat=stats[0], max_download_count=stats[1], controller='home', action='index') @@ -189,9 +276,9 @@ def haveTORRENT(): def testSABnzbd(host=None, username=None, password=None, apikey=None): host = config.clean_url(host) - connection, acces_msg = sab.getSabAccesMethod(host) + connection, acces_msg = sab.get_sab_access_method(host) if connection: - authed, auth_msg = sab.testAuthentication(host, username, password, apikey) # @UnusedVariable + authed, auth_msg = sab.test_authentication(host, username, password, apikey) # @UnusedVariable if authed: return 'Success. Connected and authenticated' else: @@ -213,7 +300,7 @@ def testTorrent(torrent_method=None, host=None, username=None, password=None): # @TODO: Move this to the validation section of each PATCH/PUT method for torrents host = config.clean_url(host) - client = clients.get_client_class(torrent_method) + client = torrent.get_client_class(torrent_method) _, acces_msg = client(host, username, password).test_authentication() @@ -1161,7 +1248,7 @@ def titler(x): try: main_db_con = db.DBConnection() episode_status_result = main_db_con.action( - b'SELECT date, action, provider, resource ' + b'SELECT date, action, provider, resource, size ' b'FROM history ' b'WHERE showid = ? ' b'AND season = ? ' @@ -1175,14 +1262,17 @@ def titler(x): i['status'], i['quality'] = Quality.split_composite_status(i['action']) i['action_date'] = sbdatetime.sbfdatetime(datetime.strptime(str(i['date']), History.date_format), show_seconds=True) i['resource_file'] = os.path.basename(i['resource']) + i['pretty_size'] = pretty_file_size(i['size']) if i['size'] > -1 else 'N/A' i['status_name'] = statusStrings[i['status']] + provider = None if i['status'] == DOWNLOADED: i['status_color_style'] = 'downloaded' elif i['status'] in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST): i['status_color_style'] = 'snatched' + provider = providers.get_provider_class(GenericProvider.make_id(i['provider'])) elif i['status'] == FAILED: i['status_color_style'] = 'failed' - provider = providers.get_provider_class(GenericProvider.make_id(i['provider'])) + provider = providers.get_provider_class(GenericProvider.make_id(i['provider'])) if provider is not None: i['provider_name'] = provider.name i['provider_img_link'] = 'images/providers/' + provider.image_name() @@ -1202,8 +1292,9 @@ def titler(x): provider_result['status_highlight'] = 'failed' elif any([item for item in episode_history if all([provider_result['name'] in item['resource'], - item['provider'] in (provider_result['provider'],), - item['status'] in snatched_statuses]) + item['provider'] in provider_result['provider'], + item['status'] in snatched_statuses, + item['size'] == provider_result['size']]) ]): provider_result['status_highlight'] = 'snatched' else: @@ -1322,26 +1413,29 @@ def editShow(self, show=None, location=None, allowed_qualities=None, preferred_q exceptions = exceptions_list or set() anidb_failed = False - errors = [] + errors = 0 if show is None: - error_string = 'Invalid show ID: {show}'.format(show=show) + error_string = 'No show was selected' if directCall: - return [error_string] + errors += 1 + return errors else: return self._genericMessage('Error', error_string) show_obj = Show.find(app.showList, int(show)) if not show_obj: - error_string = 'Unable to find the specified show: {show}'.format(show=show) + error_string = 'Unable to find the specified show ID: {show}'.format(show=show) if directCall: - return [error_string] + errors += 1 + return errors else: return self._genericMessage('Error', error_string) show_obj.exceptions = get_scene_exceptions(show_obj.indexerid, show_obj.indexer) + # If user set quality_preset remove all preferred_qualities if try_int(quality_preset, None): preferred_qualities = [] @@ -1359,9 +1453,10 @@ def editShow(self, show=None, location=None, allowed_qualities=None, preferred_q try: anime = adba.Anime(app.ADBA_CONNECTION, name=show_obj.name) groups = anime.get_groups() - except Exception as msg: - ui.notifications.error('Unable to retreive Fansub Groups from AniDB.') - logger.log(u'Unable to retreive Fansub Groups from AniDB. Error is {0}'.format(str(msg)), logger.DEBUG) + except Exception as e: + errors += 1 + logger.log(u'Unable to retreive Fansub Groups from AniDB. Error:{error}'.format + (error=e.message), logger.WARNING) with show_obj.lock: show = show_obj @@ -1384,7 +1479,8 @@ def editShow(self, show=None, location=None, allowed_qualities=None, preferred_q subtitles = config.checkbox_to_value(subtitles) do_update = False - if show_obj.lang != indexer_lang: + # In mass edit, we can't change language so we need to check if indexer_lang is set + if indexer_lang and show_obj.lang != indexer_lang: msg = ( '{{status}} {language}' ' for {indexer_name} show {show_id}'.format( @@ -1402,11 +1498,13 @@ def editShow(self, show=None, location=None, allowed_qualities=None, preferred_q indexer_lang, ) except IndexerShowNotFoundInLanguage: + errors += 1 status = 'Could not change language to' - except IndexerException as error: + except IndexerException as e: + errors += 1 status = u'Failed getting show in' - msg += u' Please try again later. Error: {err}'.format( - err=error, + msg += u' Please try again later. Error: {error}'.format( + error=e.message, ) else: language = indexer_lang @@ -1415,8 +1513,6 @@ def editShow(self, show=None, location=None, allowed_qualities=None, preferred_q finally: indexer_lang = language msg = msg.format(status=status) - if log_level >= logger.WARNING: - errors.append(msg) logger.log(msg, log_level) if scene == show_obj.scene and anime == show_obj.anime: @@ -1472,8 +1568,10 @@ def editShow(self, show=None, location=None, allowed_qualities=None, preferred_q show_obj.flatten_folders = flatten_folders try: app.show_queue_scheduler.action.refreshShow(show_obj) - except CantRefreshShowException as msg: - errors.append('Unable to refresh this show: {error}'.format(error=msg)) + except CantRefreshShowException as e: + errors += 1 + logger.log("Unable to refresh show '{show}': {error}".format + (show=show_obj.name, error=e.message), logger.WARNING) show_obj.paused = paused show_obj.scene = scene @@ -1482,10 +1580,10 @@ def editShow(self, show=None, location=None, allowed_qualities=None, preferred_q show_obj.subtitles = subtitles show_obj.air_by_date = air_by_date show_obj.default_ep_status = int(defaultEpStatus) + show_obj.dvd_order = dvd_order if not directCall: show_obj.lang = indexer_lang - show_obj.dvd_order = dvd_order show_obj.rls_ignore_words = rls_ignore_words.strip() show_obj.rls_require_words = rls_require_words.strip() @@ -1493,28 +1591,39 @@ def editShow(self, show=None, location=None, allowed_qualities=None, preferred_q old_location = os.path.normpath(show_obj._location) new_location = os.path.normpath(location) if old_location != new_location: - logger.log('{old} != {new}'.format(old=old_location, new=new_location), logger.DEBUG) # pylint: disable=protected-access - if not os.path.isdir(location) and not app.CREATE_MISSING_SHOW_DIRS: - errors.append('New location {location} does not exist'.format(location=location)) + changed_location = True + logger.log('Changing show location to: {new}'.format(new=new_location), logger.INFO) + if not os.path.isdir(new_location): + if app.CREATE_MISSING_SHOW_DIRS: + logger.log(u"Show directory doesn't exist, creating it", logger.INFO) + try: + os.mkdir(new_location) + except OSError as e: + errors += 1 + changed_location = False + logger.log(u"Unable to create the show directory '{location}. Error: {error}".format + (location=new_location, error=e.message or e.strerror), logger.WARNING) + else: + logger.log(u"New show directory created", logger.INFO) + helpers.chmod_as_parent(new_location) + else: + logger.log("New location '{location}' does not exist. " + "Enable setting 'Create missing show dirs'".format + (location=location), logger.WARNING) - # don't bother if we're going to update anyway - elif not do_update: - # change it + # Save new location to DB only if we changed it + if changed_location: + show_obj.location = new_location + + if (do_update or changed_location) and os.path.isdir(new_location): try: - show_obj.location = location - try: - app.show_queue_scheduler.action.refreshShow(show_obj) - except CantRefreshShowException as msg: - errors.append('Unable to refresh this show:{error}'.format(error=msg)) - # grab updated info from TVDB - # show_obj.load_episodes_from_indexer() - # rescan the episodes in the new folder - except ShowDirectoryNotFoundException: - errors.append('The folder at {location} doesn\'t contain a tvshow.nfo - ' - 'copy your files to that folder before you change the directory in Medusa.'.format - (location=location)) - - # save it to the DB + app.show_queue_scheduler.action.refreshShow(show_obj) + except CantRefreshShowException as e: + errors += 1 + logger.log("Unable to refresh show '{show}': {error}".format + (show=show_obj.name, error=e.message), logger.WARNING) + + # Save all settings changed while in show_obj.lock show_obj.save_to_db() # force the update @@ -1522,22 +1631,28 @@ def editShow(self, show=None, location=None, allowed_qualities=None, preferred_q try: app.show_queue_scheduler.action.updateShow(show_obj) time.sleep(cpu_presets[app.CPU_PRESET]) - except CantUpdateShowException as msg: - errors.append('Unable to update show: {0}'.format(str(msg))) + except CantUpdateShowException as e: + errors += 1 + logger.log("Unable to update show '{show}': {error}".format + (show=show_obj.name, error=e.message), logger.WARNING) if do_update_exceptions: try: - update_scene_exceptions(show_obj.indexerid, show_obj.indexer, exceptions) # @UndefinedVdexerid) + update_scene_exceptions(show_obj.indexerid, show_obj.indexer, exceptions) time.sleep(cpu_presets[app.CPU_PRESET]) except CantUpdateShowException: - errors.append('Unable to force an update on scene exceptions of the show.') + errors += 1 + logger.log("Unable to force an update on scene exceptions for show '{show}': {error}".format + (show=show_obj.name, error=e.message), logger.WARNING) if do_update_scene_numbering: try: xem_refresh(show_obj.indexerid, show_obj.indexer) time.sleep(cpu_presets[app.CPU_PRESET]) except CantUpdateShowException: - errors.append('Unable to force an update on scene numbering of the show.') + errors += 1 + logger.log("Unable to force an update on scene numbering for show '{show}': {error}".format + (show=show_obj.name, error=e.message), logger.WARNING) # Must erase cached results when toggling scene numbering self.erase_cache(show_obj) @@ -1546,11 +1661,10 @@ def editShow(self, show=None, location=None, allowed_qualities=None, preferred_q return errors if errors: - ui.notifications.error( - '{num} error{s} while saving changes:'.format(num=len(errors), s='s' if len(errors) > 1 else ''), - '
    \n{list}\n
'.format(list='\n'.join(['
  • {items}
  • '.format(items=error_item) - for error_item in errors]))) + ui.notifications.error('Errors', '{num} error{s} while saving changes. Please check logs'.format + (num=errors, s='s' if errors > 1 else '')) + logger.log(u"Finished editing show: {show}".format(show=show_obj.name), logger.DEBUG) return self.redirect('/home/displayShow?show={show}'.format(show=show)) def erase_cache(self, show_obj): @@ -1776,7 +1890,8 @@ def setStatus(self, show=None, eps=None, status=None, direct=False): if not ep_obj: return self._genericMessage('Error', 'Episode couldn\'t be retrieved') - if int(status) in [WANTED, FAILED]: + status = int(status) + if status in [WANTED, FAILED]: # figure out what episodes are wanted so we can backlog them if ep_obj.season in segments: segments[ep_obj.season].append(ep_obj) @@ -1791,7 +1906,7 @@ def setStatus(self, show=None, eps=None, status=None, direct=False): continue snatched_qualities = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST - if all([int(status) in Quality.DOWNLOADED, + if all([status in Quality.DOWNLOADED, ep_obj.status not in snatched_qualities + Quality.DOWNLOADED + [IGNORED], not os.path.isfile(ep_obj.location)]): logger.log(u'Refusing to change status of {episode} to DOWNLOADED ' @@ -1799,22 +1914,25 @@ def setStatus(self, show=None, eps=None, status=None, direct=False): (episode=cur_ep), logger.WARNING) continue - if all([int(status) == FAILED, + if all([status == FAILED, ep_obj.status not in snatched_qualities + Quality.DOWNLOADED + Quality.ARCHIVED]): logger.log(u'Refusing to change status of {episode} to FAILED ' u'because it\'s not SNATCHED/DOWNLOADED'.format(episode=cur_ep), logger.WARNING) continue - if all([int(status) == WANTED, + if all([status == WANTED, ep_obj.status in Quality.DOWNLOADED + Quality.ARCHIVED]): logger.log(u'Removing release_name for episode as as episode was changed to WANTED') ep_obj.release_name = '' - if ep_obj.manually_searched and int(status) == WANTED: + if ep_obj.manually_searched and status == WANTED: logger.log(u"Resetting 'manually searched' flag as episode was changed to WANTED", logger.DEBUG) ep_obj.manually_searched = False - ep_obj.status = int(status) + # Only in failed_history we set to FAILED. + # We need current snatched quality to log 'quality' column in failed action in history + if status != FAILED: + ep_obj.status = status # mass add to database sql_l.append(ep_obj.get_sql()) @@ -1824,9 +1942,9 @@ def setStatus(self, show=None, eps=None, status=None, direct=False): data = notifiers.trakt_notifier.trakt_episode_data_generate(trakt_data) if app.USE_TRAKT and app.TRAKT_SYNC_WATCHLIST: - if int(status) in [WANTED, FAILED]: + if status in [WANTED, FAILED]: upd = 'Add' - elif int(status) in [IGNORED, SKIPPED] + Quality.DOWNLOADED + Quality.ARCHIVED: + elif status in [IGNORED, SKIPPED] + Quality.DOWNLOADED + Quality.ARCHIVED: upd = 'Remove' logger.log(u'{action} episodes, showid: indexerid {show.indexerid}, Title {show.name} to Watchlist'.format @@ -1839,7 +1957,7 @@ def setStatus(self, show=None, eps=None, status=None, direct=False): main_db_con = db.DBConnection() main_db_con.mass_action(sql_l) - if int(status) == WANTED and not show_obj.paused: + if status == WANTED and not show_obj.paused: msg = 'Backlog was automatically started for the following seasons of {show}:
    '.format(show=show_obj.name) msg += '
      ' @@ -1856,12 +1974,12 @@ def setStatus(self, show=None, eps=None, status=None, direct=False): if segments: ui.notifications.message('Backlog started', msg) - elif int(status) == WANTED and show_obj.paused: + elif status == WANTED and show_obj.paused: logger.log(u'Some episodes were set to wanted, but {show} is paused. ' u'Not adding to Backlog until show is unpaused'.format (show=show_obj.name)) - if int(status) == FAILED: + if status == FAILED: msg = 'Retrying Search was automatically started for the following season of {show}:
      '.format(show=show_obj.name) msg += '
        ' diff --git a/medusa/server/web/home/post_process.py b/medusa/server/web/home/post_process.py index 0e507ef00f..95296a5bcb 100644 --- a/medusa/server/web/home/post_process.py +++ b/medusa/server/web/home/post_process.py @@ -3,7 +3,9 @@ from __future__ import unicode_literals from six import string_types + from tornroutes import route + from .handler import Home from ..core import PageTemplate from .... import process_tv @@ -12,16 +14,17 @@ @route('/home/postprocess(/?.*)') class HomePostProcess(Home): + def __init__(self, *args, **kwargs): super(HomePostProcess, self).__init__(*args, **kwargs) def index(self): t = PageTemplate(rh=self, filename='home_postprocess.mako') - return t.render(title='Post Processing', header='Post Processing', topmenu='home', controller='home', action='postProcess') + return t.render(title='Post Processing', header='Post Processing', topmenu='home', + controller='home', action='postProcess') def processEpisode(self, proc_dir=None, nzbName=None, jobName=None, quiet=None, process_method=None, force=None, is_priority=None, delete_on='0', failed='0', proc_type='auto', ignore_subs=None, *args, **kwargs): - nzb_name = nzbName def argToBool(argument): if isinstance(argument, string_types): @@ -39,11 +42,12 @@ def argToBool(argument): if not proc_dir: return self.redirect('/home/postprocess/') else: - nzb_name = ss(nzb_name) if nzb_name else nzb_name + resource_name = ss(nzbName) if nzbName else None - result = process_tv.processDir( - ss(proc_dir), nzb_name, process_method=process_method, force=argToBool(force), - is_priority=argToBool(is_priority), delete_on=argToBool(delete_on), failed=argToBool(failed), proc_type=type, ignore_subs=argToBool(ignore_subs) + result = process_tv.ProcessResult(ss(proc_dir), process_method=process_method).process( + resource_name=resource_name, force=argToBool(force), is_priority=argToBool(is_priority), + delete_on=argToBool(delete_on), failed=argToBool(failed), proc_type=type, + ignore_subs=argToBool(ignore_subs) ) if quiet is not None and int(quiet) == 1: diff --git a/medusa/server/web/manage/handler.py b/medusa/server/web/manage/handler.py index eed88c855a..8f3e2327e8 100644 --- a/medusa/server/web/manage/handler.py +++ b/medusa/server/web/manage/handler.py @@ -11,7 +11,7 @@ from ..core import PageTemplate, WebRoot from ..home import Home -from .... import app, db, helpers, logger, post_processor, subtitles, ui +from .... import app, db, helpers, logger, network_timezones, post_processor, sbdatetime, subtitles, ui from ....common import ( Overview, Quality, SNATCHED, ) @@ -23,6 +23,7 @@ CantUpdateShowException, ) from ....helpers import is_media_file +from ....network_timezones import app_timezone from ....show.show import Show from ....tv import Episode @@ -341,40 +342,75 @@ def backlogOverview(self): show_cats = {} show_sql_results = {} + backlog_periods = { + 'all': None, + 'one_day': datetime.timedelta(days=1), + 'three_days': datetime.timedelta(days=3), + 'one_week': datetime.timedelta(days=7), + 'one_month': datetime.timedelta(days=30), + } + backlog_period = backlog_periods.get(app.BACKLOG_PERIOD) + + backlog_status = { + 'all': [Overview.QUAL, Overview.WANTED], + 'quality': [Overview.QUAL], + 'wanted': [Overview.WANTED] + } + selected_backlog_status = backlog_status.get(app.BACKLOG_STATUS) + main_db_con = db.DBConnection() for cur_show in app.showList: + if cur_show.paused: + continue + ep_counts = { Overview.WANTED: 0, Overview.QUAL: 0, - Overview.GOOD: 0, } ep_cats = {} sql_results = main_db_con.select( """ - SELECT e.status, e.season, e.episode, e.name, e.airdate, e.manually_searched, e.status, s.quality + SELECT e.status, e.season, e.episode, e.name, e.airdate, e.manually_searched FROM tv_episodes as e - JOIN tv_shows as s WHERE e.season IS NOT NULL AND - s.paused = 0 AND - e.showid = s.indexer_id AND - s.indexer_id = ? + e.showid = ? ORDER BY e.season DESC, e.episode DESC """, [cur_show.indexerid] ) - - for cur_result in sql_results: + filtered_episodes = [] + backlogged_episodes = [dict(row) for row in sql_results] + for cur_result in backlogged_episodes: cur_ep_cat = cur_show.get_overview(cur_result[b'status'], backlog_mode=True, manually_searched=cur_result[b'manually_searched']) if cur_ep_cat: - ep_cats[u'{ep}'.format(ep=episode_num(cur_result[b'season'], cur_result[b'episode']))] = cur_ep_cat - ep_counts[cur_ep_cat] += 1 + if cur_ep_cat in selected_backlog_status and cur_result[b'airdate'] != 1: + air_date = datetime.datetime.fromordinal(cur_result[b'airdate']) + if air_date.year >= 1970 or cur_show.network: + air_date = sbdatetime.sbdatetime.convert_to_setting( + network_timezones.parse_date_time(cur_result[b'airdate'], + cur_show.airs, + cur_show.network)) + if backlog_period and air_date < datetime.datetime.now(app_timezone) - backlog_period: + continue + else: + air_date = None + episode_string = u'{ep}'.format(ep=(episode_num(cur_result[b'season'], + cur_result[b'episode']) or + episode_num(cur_result[b'season'], + cur_result[b'episode'], + numbering='absolute'))) + ep_cats[episode_string] = cur_ep_cat + ep_counts[cur_ep_cat] += 1 + cur_result[b'airdate'] = air_date + cur_result[b'episode_string'] = episode_string + filtered_episodes.append(cur_result) show_counts[cur_show.indexerid] = ep_counts show_cats[cur_show.indexerid] = ep_cats - show_sql_results[cur_show.indexerid] = sql_results + show_sql_results[cur_show.indexerid] = filtered_episodes return t.render( showCounts=show_counts, showCats=show_cats, @@ -425,6 +461,9 @@ def massEdit(self, toEdit=None): air_by_date_all_same = True last_air_by_date = None + dvd_order_all_same = True + last_dvd_order = None + root_dir_list = [] for cur_show in show_list: @@ -490,6 +529,12 @@ def massEdit(self, toEdit=None): else: last_air_by_date = cur_show.air_by_date + if dvd_order_all_same: + if last_dvd_order not in (None, cur_show.dvd_order): + dvd_order_all_same = False + else: + last_dvd_order = cur_show.dvd_order + default_ep_status_value = last_default_ep_status if default_ep_status_all_same else None paused_value = last_paused if paused_all_same else None anime_value = last_anime if anime_all_same else None @@ -499,14 +544,15 @@ def massEdit(self, toEdit=None): scene_value = last_scene if scene_all_same else None sports_value = last_sports if sports_all_same else None air_by_date_value = last_air_by_date if air_by_date_all_same else None + dvd_order_value = last_dvd_order if dvd_order_all_same else None root_dir_list = root_dir_list - return t.render(showList=toEdit, showNames=show_names, default_ep_status_value=default_ep_status_value, + return t.render(showList=toEdit, showNames=show_names, default_ep_status_value=default_ep_status_value, dvd_order_value=dvd_order_value, paused_value=paused_value, anime_value=anime_value, flatten_folders_value=flatten_folders_value, quality_value=quality_value, subtitles_value=subtitles_value, scene_value=scene_value, sports_value=sports_value, air_by_date_value=air_by_date_value, root_dir_list=root_dir_list, title='Mass Edit', header='Mass Edit', topmenu='manage') - def massEditSubmit(self, paused=None, default_ep_status=None, + def massEditSubmit(self, paused=None, default_ep_status=None, dvd_order=None, anime=None, sports=None, scene=None, flatten_folders=None, quality_preset=None, subtitles=None, air_by_date=None, allowed_qualities=None, preferred_qualities=None, toEdit=None, *args, **kwargs): @@ -521,10 +567,9 @@ def massEditSubmit(self, paused=None, default_ep_status=None, end_dir = kwargs['new_root_dir_{index}'.format(index=which_index)] dir_map[kwargs[cur_arg]] = end_dir - show_ids = toEdit.split('|') - errors = [] + show_ids = toEdit.split('|') if toEdit else [] + errors = 0 for cur_show in show_ids: - cur_errors = [] show_obj = Show.find(app.showList, int(cur_show)) if not show_obj: continue @@ -573,6 +618,12 @@ def massEditSubmit(self, paused=None, default_ep_status=None, new_air_by_date = True if air_by_date == 'enable' else False new_air_by_date = 'on' if new_air_by_date else 'off' + if dvd_order == 'keep': + new_dvd_order = show_obj.dvd_order + else: + new_dvd_order = True if dvd_order == 'enable' else False + new_dvd_order = 'on' if new_dvd_order else 'off' + if flatten_folders == 'keep': new_flatten_folders = show_obj.flatten_folders else: @@ -593,31 +644,18 @@ def massEditSubmit(self, paused=None, default_ep_status=None, exceptions_list = [] - cur_errors += self.editShow(cur_show, new_show_dir, allowed_qualities, - preferred_qualities, exceptions_list, - defaultEpStatus=new_default_ep_status, - flatten_folders=new_flatten_folders, - paused=new_paused, sports=new_sports, - subtitles=new_subtitles, anime=new_anime, - scene=new_scene, air_by_date=new_air_by_date, - directCall=True) - - if cur_errors: - logger.log(u'Errors: {errors}'.format(errors=cur_errors), logger.ERROR) - errors.append( - '{show}:\n
          {errors}
        '.format( - show=show_obj.name, - errors=' '.join(['
      • {error}
      • '.format(error=error) - for error in cur_errors]) - ) - ) + errors += self.editShow(cur_show, new_show_dir, allowed_qualities, + preferred_qualities, exceptions_list, + defaultEpStatus=new_default_ep_status, + flatten_folders=new_flatten_folders, + paused=new_paused, sports=new_sports, dvd_order=new_dvd_order, + subtitles=new_subtitles, anime=new_anime, + scene=new_scene, air_by_date=new_air_by_date, + directCall=True) + if errors: - ui.notifications.error( - '{num} error{s} while saving changes:'.format( - num=len(errors), - s='s' if len(errors) > 1 else ''), - ' '.join(errors) - ) + ui.notifications.error('Errors', '{num} error{s} while saving changes. Please check logs'.format + (num=errors, s='s' if errors > 1 else '')) return self.redirect('/manage/') @@ -674,30 +712,18 @@ def massUpdate(self, toUpdate=None, toRefresh=None, toRename=None, toDelete=None ui.notifications.error('Errors encountered', '
        \n'.join(errors)) - def message_detail(title, items): - """ - Create an unordered list of items with a title. - :return: The message if items else '' - """ - return '' if not items else """ -
        - {title} -
        -
          - {list} -
        - """.format(title=title, - list='\n'.join(['
      • {item}
      • '.format(item=cur_item) - for cur_item in items])) - message = '' - message += message_detail('Updates', updates) - message += message_detail('Refreshes', refreshes) - message += message_detail('Renames', renames) - message += message_detail('Subtitles', subtitles) + if updates: + message += '\nUpdates: {0}'.format(len(updates)) + if refreshes: + message += '\nRefreshes: {0}'.format(len(refreshes)) + if renames: + message += '\nRenames: {0}'.format(len(renames)) + if subtitles: + message += '\nSubtitles: {0}'.format(len(subtitles)) if message: - ui.notifications.message('The following actions were queued:', message) + ui.notifications.message('Queued actions:', message) return self.redirect('/manage/') diff --git a/medusa/show/coming_episodes.py b/medusa/show/coming_episodes.py index ebc6977d61..19b3b9e449 100644 --- a/medusa/show/coming_episodes.py +++ b/medusa/show/coming_episodes.py @@ -18,11 +18,12 @@ from datetime import date, timedelta +from medusa.helpers.quality import get_quality_string +from medusa.tv.series import SeriesIdentifier from .. import app from ..common import IGNORED, Quality, UNAIRED, WANTED from ..db import DBConnection from ..helper.common import dateFormat, timeFormat -from ..helper.quality import get_quality_string from ..network_timezones import parse_date_time from ..sbdatetime import sbdatetime @@ -113,6 +114,7 @@ def get_coming_episodes(categories, sort, group, paused=app.COMING_EPS_DISPLAY_P results = [dict(result) for result in results] for index, item in enumerate(results): + item['series_slug'] = str(SeriesIdentifier.from_id(int(item['indexer']), item['indexer_id'])) results[index]['localtime'] = sbdatetime.convert_to_setting( parse_date_time(item['airdate'], item['airs'], item['network'])) diff --git a/medusa/show/recommendations/imdb.py b/medusa/show/recommendations/imdb.py index eb1f59ca6a..1c016cbab4 100644 --- a/medusa/show/recommendations/imdb.py +++ b/medusa/show/recommendations/imdb.py @@ -4,11 +4,15 @@ import os import posixpath import re -import traceback + from datetime import date -from bs4 import BeautifulSoup +from imdbpie import imdbpie + +from requests import RequestException + from simpleanidb import Anidb + from .recommended import RecommendedShow from ... import app, helpers, logger from ...indexers.indexer_config import INDEXER_TVDBV2 @@ -60,48 +64,31 @@ def fetch_popular_shows(self): """Get popular show information from IMDB.""" popular_shows = [] - response = helpers.get_url(self.url, session=self.session, params=self.params, - headers={'Referer': 'http://akas.imdb.com/'}, returns='response') - if not response or not response.text: - return None - - soup = BeautifulSoup(response.text, 'html5lib') - results = soup.find('div', class_='lister-list') - rows = results.find_all('div', class_='lister-item mode-advanced') - - for row in rows: - show = {} - - image_div = row.find('div', class_='lister-item-image float-left') - if image_div: - image = image_div.find('img') - show['image_url_large'] = self.change_size(image['loadlate']) - show['image_path'] = posixpath.join('images', 'imdb_popular', - os.path.basename(show['image_url_large'])) - # self.cache_image(show['image_url_large']) - - content_div = row.find('div', class_='lister-item-content') - if content_div: - show_info = content_div.find('a') - show['name'] = show_info.get_text() - show['imdb_url'] = 'http://www.imdb.com' + show_info['href'] - show['imdb_tt'] = row.find('div', class_='ribbonize')['data-tconst'] - show['year'] = content_div.find('span', class_='lister-item-year text-muted unbold').get_text()[1:5] - - rating_div = content_div.find('div', class_='ratings-bar') - if rating_div: - rating_strong = rating_div.find('strong') - if rating_strong: - show['rating'] = rating_strong.get_text() - - votes_p = content_div.find('p', class_='sort-num_votes-visible') - if votes_p: - show['votes'] = votes_p.find('span', {'name': 'nv'}).get_text().replace(',', '') - - text_p = content_div.find('p', class_='text-muted') - if text_p: - show['outline'] = text_p.get_text(strip=True) - + imdb_api = imdbpie.Imdb() + imdb_result = imdb_api.popular_shows() + + for imdb_show in imdb_result: + show = dict() + imdb_tt = imdb_show['tconst'] + + if imdb_tt: + show['imdb_tt'] = imdb_show['tconst'] + show_details = imdb_api.get_title_by_id(imdb_tt) + + if show_details: + show['year'] = getattr(show_details, 'year') + show['name'] = getattr(show_details, 'title') + show['image_url_large'] = getattr(show_details, 'cover_url') + show['image_path'] = posixpath.join('images', 'imdb_popular', + os.path.basename(show['image_url_large'])) + show['imdb_url'] = 'http://www.imdb.com/title/{imdb_tt}'.format(imdb_tt=imdb_tt) + show['votes'] = getattr(show_details, 'votes', 0) + show['outline'] = getattr(show_details, 'plot_outline', 'Not available') + show['rating'] = getattr(show_details, 'rating', 0) + else: + continue + + if all([show['year'], show['name'], show['imdb_tt']]): popular_shows.append(show) result = [] @@ -110,9 +97,10 @@ def fetch_popular_shows(self): recommended_show = self._create_recommended_show(show) if recommended_show: result.append(recommended_show) - except Exception: - logger.log(u'Could not parse IMDB show, with exception: {0!r}'.format - (traceback.format_exc()), logger.WARNING) + except RequestException: + logger.log(u'Could not connect to indexers to check if you already have ' + u'this show in your library: {show} ({year})'.format + (show=show['name'], year=show['name']), logger.WARNING) return result diff --git a/medusa/show/recommendations/trakt.py b/medusa/show/recommendations/trakt.py index 172634402e..6431fc9aa9 100644 --- a/medusa/show/recommendations/trakt.py +++ b/medusa/show/recommendations/trakt.py @@ -16,33 +16,64 @@ # along with Medusa. If not, see . from __future__ import unicode_literals +import os +import time import requests from simpleanidb import Anidb from traktor import (TokenExpiredException, TraktApi, TraktException) -from tvdbapiv2 import (ApiClient, AuthenticationApi, SeriesApi) +from tvdbapiv2.exceptions import ApiException from .recommended import RecommendedShow from ... import app, logger from ...helper.common import try_int from ...helper.exceptions import MultipleShowObjectsException, ex +from ...indexers.indexer_api import indexerApi from ...indexers.indexer_config import INDEXER_TVDBV2 -def get_tvdbv2_api(): - """Initiate the tvdb api v2.""" - api_base_url = 'https://api.thetvdb.com' +class MissingPosterList(list): + """Smart custom list, with a cache expiration. - # client_id = 'username' # (optional! Only required for the /user routes) - # client_secret = 'pass' # (optional! Only required for the /user routes) - apikey = '0629B785CE550C8D' + A list used to store the trakt shows that do not have a poster on tvdb. This will prevent searches for posters + that have recently been searched using the tvdb's api, and resulted in a 404. + """ + + def __init__(self, items=None, cache_timeout=3600, implicit_clean=False): + """Initialize the MissingPosterList. + + :param items: Provide the initial list. + :param cache_timeout: Timeout after which the item expires. + :param implicit_clean: If enabled, run the clean() method, to check for expired items. Else you'll have to run + this periodically. + """ + list.__init__(self, items or []) + self.cache_timeout = cache_timeout + self.implicit_clean = implicit_clean + + def append(self, item): + """Add new items to the list.""" + if self.implicit_clean: + self.clean() + super(MissingPosterList, self).append((int(time.time()), item)) + + def clean(self): + """Use the cache_timeout to remove expired items.""" + new_list = [_ for _ in self if _[0] + self.cache_timeout > int(time.time())] + self.__init__(new_list, self.cache_timeout, self.implicit_clean) + + def has(self, value): + """Check if the value is in the list. + + We need a smarter method to check if an item is already in the list. This will return a list with items that + match the value. + :param value: The value to check for. + :return: A list of tuples with matches. For example: (141234234, '12342'). + """ + if self.implicit_clean: + self.clean() + return [_ for _ in self if _[1] == value] - authentication_string = {'apikey': apikey, 'username': '', 'userpass': ''} - unauthenticated_client = ApiClient(api_base_url) - auth_api = AuthenticationApi(unauthenticated_client) - access_token = auth_api.login_post(authentication_string) - auth_client = ApiClient(api_base_url, 'Authorization', 'Bearer ' + access_token.token) - series_api = SeriesApi(auth_client) - return series_api +missing_posters = MissingPosterList(cache_timeout=3600 * 24 * 3) # Cache 3 days class TraktPopular(object): @@ -58,7 +89,7 @@ def __init__(self): self.recommender = "Trakt Popular" self.default_img_src = 'trakt-default.png' self.anidb = Anidb(cache_dir=app.CACHE_DIR) - self.tvdb_api_v2 = get_tvdbv2_api() + self.tvdb_api_v2 = indexerApi(INDEXER_TVDBV2).indexer() def _create_recommended_show(self, show_obj): """Create the RecommendedShow object from the returned showobj.""" @@ -78,10 +109,20 @@ def _create_recommended_show(self, show_obj): use_default = None image = None try: - image = self.tvdb_api_v2.series_id_images_query_get(show_obj['show']['ids']['tvdb'], key_type='poster').data[0].file_name - except Exception: + if not missing_posters.has(show_obj['show']['ids']['tvdb']): + image = self.check_cache_for_poster(show_obj['show']['ids']['tvdb']) or \ + self.tvdb_api_v2.series_api.series_id_images_query_get(show_obj['show']['ids']['tvdb'], key_type='poster').data[0].file_name + else: + logger.log('CACHE: Missing poster on TheTVDB for show %s' % (show_obj['show']['title']), logger.INFO) + use_default = self.default_img_src + except ApiException as e: + use_default = self.default_img_src + if getattr(e, 'status', None) == 404: + logger.log('Missing poster on TheTVDB for show %s' % (show_obj['show']['title']), logger.INFO) + missing_posters.append(show_obj['show']['ids']['tvdb']) + except Exception as e: use_default = self.default_img_src - logger.log('Missing poster on TheTVDB for show %s' % (show_obj['show']['title']), logger.DEBUG) + logger.log('Missing poster on TheTVDB, cause: %r' % e, logger.DEBUG) rec_show.cache_image('http://thetvdb.com/banners/{0}'.format(image), default=use_default) # As the method below requires allot of resources, i've only enabled it when @@ -148,6 +189,9 @@ def fetch_popular_shows(self, page_url=None, trakt_list=None): # pylint: disabl shows = self.fetch_and_refresh_token(trakt_api, page_url + limit_show + 'extended=full,images') or [] + # Let's trigger a cache cleanup. + missing_posters.clean() + for show in shows: try: if 'show' not in show: @@ -170,3 +214,11 @@ def fetch_popular_shows(self, page_url=None, trakt_list=None): # pylint: disabl raise return blacklist, trending_shows, removed_from_medusa + + def check_cache_for_poster(self, tvdb_id): + """Verify if we already have a poster downloaded for this show.""" + for image_file_name in os.listdir(os.path.abspath(os.path.join(app.CACHE_DIR, 'images', self.cache_subfolder))): + if os.path.isfile(os.path.abspath(os.path.join(app.CACHE_DIR, 'images', self.cache_subfolder, image_file_name))): + if str(tvdb_id) == image_file_name.split('-')[0]: + return image_file_name + return False diff --git a/medusa/show/show.py b/medusa/show/show.py index 3a8d8f4511..dba7a269fb 100644 --- a/medusa/show/show.py +++ b/medusa/show/show.py @@ -62,13 +62,19 @@ def find(shows, indexer_id, indexer=None): :return: The desired show if found, ``None`` if not found :throw: ``MultipleShowObjectsException`` if multiple shows match the provided ``indexer_id`` """ - + from medusa.indexers.indexer_config import EXTERNAL_IMDB, EXTERNAL_TRAKT if indexer_id is None or shows is None or len(shows) == 0: return None indexer_ids = [indexer_id] if not isinstance(indexer_id, list) else indexer_id results = [show for show in shows if (indexer is None or show.indexer == indexer) and show.indexerid in indexer_ids] + # if can't find with supported indexers try with IMDB and TRAKT + if not results: + results = [show for show in shows + if show.imdb_id and show.imdb_id == indexer_id and indexer == EXTERNAL_IMDB or + show.externals.get('trakt_id', None) == indexer_id and indexer == EXTERNAL_TRAKT] + if not results: return None diff --git a/medusa/show_name_helpers.py b/medusa/show_name_helpers.py index c4239c75d0..62d2981965 100644 --- a/medusa/show_name_helpers.py +++ b/medusa/show_name_helpers.py @@ -21,9 +21,11 @@ import re from six import string_types -from . import app, common, logger + +from . import app, logger + from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser -from .scene_exceptions import get_scene_exceptions + resultFilters = [ "(dir|sub|nfo)fix", @@ -94,48 +96,6 @@ def filterBadReleases(name, parse=True): return True -def allPossibleShowNames(show, season=-1): - """ - Figures out every possible variation of the name for a particular show. - - Includes indexer name, and any scene exception names, and country code - at the end of the name (e.g. "Show Name (AU)". - - show: a Series object that we should get the names of - Returns: all possible show names - """ - - show_names = get_scene_exceptions(show.indexerid, show.indexer, season) - show_names.add(show.name) - - new_show_names = set() - - if not show.is_anime: - country_list = {} - # add the country list - country_list.update(common.countryList) - # add the reversed mapping of the country list - country_list.update({v: k for k, v in common.countryList.items()}) - - for name in show_names: - if not name: - continue - - # if we have "Show Name Australia" or "Show Name (Australia)" - # this will add "Show Name (AU)" for any countries defined in - # common.countryList (and vice versa) - for country in country_list: - pattern_1 = ' {0}'.format(country) - pattern_2 = ' ({0})'.format(country) - replacement = ' ({0})'.format(country_list[country]) - if name.endswith(pattern_1): - new_show_names.add(name.replace(pattern_1, replacement)) - elif name.endswith(pattern_2): - new_show_names.add(name.replace(pattern_2, replacement)) - - return show_names.union(new_show_names) - - def determineReleaseName(dir_name=None, nzb_name=None): """Determine a release name from an nzb and/or folder name.""" diff --git a/medusa/show_queue.py b/medusa/show_queue.py index f81d62a4a0..a51a297a60 100644 --- a/medusa/show_queue.py +++ b/medusa/show_queue.py @@ -21,26 +21,48 @@ from imdbpie.exceptions import HTTPError as IMDbHTTPError +from medusa import ( + app, + generic_queue, + logger, + name_cache, + notifiers, + scene_numbering, + ui, +) +from medusa.black_and_white_list import BlackAndWhiteList +from medusa.common import WANTED, statusStrings +from medusa.helper.common import episode_num, sanitize_filename +from medusa.helper.exceptions import ( + CantRefreshShowException, + CantRemoveShowException, + CantUpdateShowException, + EpisodeDeletedException, + MultipleShowObjectsException, + ShowDirectoryNotFoundException +) +from medusa.helpers import ( + chmod_as_parent, + delete_empty_folders, + get_showname_from_indexer, + make_dir, +) +from medusa.helpers.externals import check_existing_shows +from medusa.indexers.indexer_api import indexerApi +from medusa.indexers.indexer_exceptions import ( + IndexerAttributeNotFound, + IndexerError, + IndexerException, + IndexerShowAllreadyInLibrary, + IndexerShowIncomplete, + IndexerShowNotFoundInLanguage, +) +from medusa.tv import Series + from six import binary_type, text_type from traktor import TraktException -from . import app, generic_queue, logger, name_cache, notifiers, scene_numbering, ui -from .black_and_white_list import BlackAndWhiteList -from .common import WANTED, statusStrings -from .helper.common import episode_num, sanitize_filename -from .helper.exceptions import ( - CantRefreshShowException, CantRemoveShowException, CantUpdateShowException, - EpisodeDeletedException, MultipleShowObjectsException, ShowDirectoryNotFoundException, ex -) -from .helper.externals import check_existing_shows -from .helpers import chmod_as_parent, delete_empty_folders, get_showname_from_indexer, make_dir -from .indexers.indexer_api import indexerApi -from .indexers.indexer_exceptions import (IndexerAttributeNotFound, IndexerError, IndexerException, - IndexerShowAllreadyInLibrary, IndexerShowIncomplete, - IndexerShowNotFoundInLanguage) -from .tv import Series - class ShowQueueActions(object): @@ -344,14 +366,14 @@ def run(self): # make sure the Indexer IDs are valid try: - lINDEXER_API_PARMS = indexerApi(self.indexer).api_params.copy() + l_indexer_api_params = indexerApi(self.indexer).api_params.copy() if self.lang: - lINDEXER_API_PARMS['language'] = self.lang + l_indexer_api_params['language'] = self.lang - logger.log(u"" + str(indexerApi(self.indexer).name) + ": " + repr(lINDEXER_API_PARMS)) + logger.log(u"" + str(indexerApi(self.indexer).name) + ": " + repr(l_indexer_api_params)) - t = indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS) - s = t[self.indexer_id] + indexer_api = indexerApi(self.indexer).indexer(**l_indexer_api_params) + s = indexer_api[self.indexer_id] # Let's try to create the show Dir if it's not provided. This way we force the show dir # to build build using the Indexers provided series name @@ -416,13 +438,25 @@ def run(self): "Unable to add show", "Unable to look up the show in {0} on {1} using ID {2} " "Reason: {3}" - .format(self.showDir, indexerApi(self.indexer).name, self.indexer_id, e) + .format(self.showDir, indexerApi(self.indexer).name, self.indexer_id, e.message) ) self._finishEarly() return + except IndexerShowNotFoundInLanguage as e: + logger.log(u'{id}: Data retrieved from {indexer} was incomplete. The indexer does not provide ' + u'show information in the searched language {language}. Aborting: {error_msg}'.format + (id=self.indexer_id, indexer=indexerApi(self.indexer).name, + language=e.language, error_msg=e.message), logger.WARNING) + ui.notifications.error('Error adding show!', + 'Unable to add show {indexer_id} on {indexer} with this language: {language}'. + format(indexer_id=self.indexer_id, + indexer=indexerApi(self.indexer).name, + language=e.language)) + self._finishEarly() + return except Exception as e: logger.log(u"%s Error while loading information from indexer %s. " - u"Error: %r" % (self.indexer_id, indexerApi(self.indexer).name, ex(e)), logger.ERROR) + u"Error: %r" % (self.indexer_id, indexerApi(self.indexer).name, e.message), logger.ERROR) ui.notifications.error( "Unable to add show", "Unable to look up the show in {0} on {1} using ID {2}, not using the NFO. " @@ -434,7 +468,7 @@ def run(self): try: newShow = Series(self.indexer, self.indexer_id, self.lang) - newShow.load_from_indexer(t) + newShow.load_from_indexer(indexer_api) self.show = newShow @@ -470,7 +504,7 @@ def run(self): except IndexerException as e: logger.log( - u"Unable to add show due to an error with " + indexerApi(self.indexer).name + ": " + ex(e), + u"Unable to add show due to an error with " + indexerApi(self.indexer).name + ": " + e.message, logger.ERROR) if self.show: ui.notifications.error( @@ -489,7 +523,7 @@ def run(self): return except Exception as e: - logger.log(u"Error trying to add show: " + ex(e), logger.ERROR) + logger.log(u"Error trying to add show: " + e.message, logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) self._finishEarly() raise @@ -498,14 +532,14 @@ def run(self): try: self.show.load_imdb_info() except IMDbHTTPError as e: - logger.log(u"Something wrong on IMDb api: " + ex(e), logger.WARNING) + logger.log(u"Something wrong on IMDb api: " + e.message, logger.WARNING) except Exception as e: - logger.log(u"Error loading IMDb info: " + ex(e), logger.ERROR) + logger.log(u"Error loading IMDb info: " + e.message, logger.ERROR) try: self.show.save_to_db() except Exception as e: - logger.log(u"Error saving the show to the database: " + ex(e), logger.ERROR) + logger.log(u"Error saving the show to the database: " + e.message, logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) self._finishEarly() raise @@ -514,10 +548,10 @@ def run(self): app.showList.append(self.show) try: - self.show.load_episodes_from_indexer(tvapi=t) + self.show.load_episodes_from_indexer(tvapi=indexer_api) except Exception as e: logger.log( - u"Error with " + indexerApi(self.show.indexer).name + ", not creating episode list: " + ex(e), + u"Error with " + indexerApi(self.show.indexer).name + ", not creating episode list: " + e.message, logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) @@ -527,7 +561,7 @@ def run(self): try: self.show.load_episodes_from_dir() except Exception as e: - logger.log(u"Error searching dir for episodes: " + ex(e), logger.ERROR) + logger.log(u"Error searching dir for episodes: " + e.message, logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) # if they set default ep status to WANTED then run the backlog to search for episodes @@ -602,7 +636,7 @@ def run(self): scene_numbering.xem_refresh(self.show.indexerid, self.show.indexer) except Exception as e: logger.log(u"{id}: Error while refreshing show {show}. Error: {error_msg}".format - (id=self.show.indexerid, show=self.show.name, error_msg=e), logger.ERROR) + (id=self.show.indexerid, show=self.show.name, error_msg=e.message), logger.ERROR) self.finish() @@ -689,18 +723,18 @@ def run(self): except IndexerError as e: logger.log(u'{id}: Unable to contact {indexer}. Aborting: {error_msg}'.format (id=self.show.indexerid, indexer=indexerApi(self.show.indexer).name, - error_msg=ex(e)), logger.WARNING) + error_msg=e.message), logger.WARNING) return except IndexerAttributeNotFound as e: logger.log(u'{id}: Data retrieved from {indexer} was incomplete. Aborting: {error_msg}'.format (id=self.show.indexerid, indexer=indexerApi(self.show.indexer).name, - error_msg=ex(e)), logger.WARNING) + error_msg=e.message), logger.WARNING) return except IndexerShowNotFoundInLanguage as e: logger.log(u'{id}: Data retrieved from {indexer} was incomplete. The indexer does not provide ' u'show information in the searched language {language}. Aborting: {error_msg}'.format - (language=e.language, id=self.show.indexerid, indexer=indexerApi(self.show.indexer).name, - error_msg=ex(e)), logger.WARNING) + (id=self.show.indexerid, indexer=indexerApi(self.show.indexer).name, + language=e.language, error_msg=e.message), logger.WARNING) ui.notifications.error('Error changing language show!', 'Unable to change language for show {show_name} on {indexer} to language: {language}'. format(show_name=self.show.name, @@ -713,10 +747,10 @@ def run(self): self.show.load_imdb_info() except IMDbHTTPError as e: logger.log(u'{id}: Something wrong on IMDb api: {error_msg}'.format - (id=self.show.indexerid, error_msg=ex(e)), logger.WARNING) + (id=self.show.indexerid, error_msg=e.message), logger.WARNING) except Exception as e: logger.log(u'{id}: Error loading IMDb info: {error_msg}'.format - (id=self.show.indexerid, error_msg=ex(e)), logger.WARNING) + (id=self.show.indexerid, error_msg=e.message), logger.WARNING) # have to save show before reading episodes from db try: @@ -724,7 +758,7 @@ def run(self): self.show.save_to_db() except Exception as e: logger.log(u"{id}: Error saving new IMDb show info to database: {error_msg}".format - (id=self.show.indexerid, error_msg=ex(e)), logger.WARNING) + (id=self.show.indexerid, error_msg=e.message), logger.WARNING) logger.log(traceback.format_exc(), logger.ERROR) # get episode list from DB @@ -733,7 +767,7 @@ def run(self): except IndexerException as e: logger.log(u'{id}: Unable to contact {indexer}. Aborting: {error_msg}'.format (id=self.show.indexerid, indexer=indexerApi(self.show.indexer).name, - error_msg=e), logger.WARNING) + error_msg=e.message), logger.WARNING) return # get episode list from the indexer @@ -742,7 +776,7 @@ def run(self): except IndexerException as e: logger.log(u'{id}: Unable to get info from {indexer}. The show info will not be refreshed. ' u'Error: {error_msg}'.format - (id=self.show.indexerid, indexer=indexerApi(self.show.indexer).name, error_msg=ex(e)), + (id=self.show.indexerid, indexer=indexerApi(self.show.indexer).name, error_msg=e.message), logger.WARNING) episodes_from_indexer = None @@ -778,7 +812,7 @@ def run(self): self.show.save_to_db() except Exception as e: logger.log(u'{id}: Error saving all updated show info to database: {error_msg}'.format - (id=self.show.indexerid, error_msg=ex(e)), logger.WARNING) + (id=self.show.indexerid, error_msg=e.message), logger.WARNING) logger.log(traceback.format_exc(), logger.ERROR) logger.log(u'{id}: Finished update of {show}'.format @@ -821,12 +855,12 @@ def run(self): except IndexerError as e: logger.log(u'{id}: Unable to contact {indexer}. Aborting: {error_msg}'.format (id=self.show.indexerid, indexer=indexerApi(self.show.indexer).name, - error_msg=ex(e)), logger.WARNING) + error_msg=e.message), logger.WARNING) return except IndexerAttributeNotFound as e: logger.log(u'{id}: Data retrieved from {indexer} was incomplete. Aborting: {error_msg}'.format (id=self.show.indexerid, indexer=indexerApi(self.show.indexer).name, - error_msg=ex(e)), logger.WARNING) + error_msg=e.message), logger.WARNING) return logger.log(u'{id}: Retrieving show info from IMDb'.format(id=self.show.indexerid), logger.DEBUG) @@ -834,10 +868,10 @@ def run(self): self.show.load_imdb_info() except IMDbHTTPError as e: logger.log(u'{id}: Something wrong on IMDb api: {error_msg}'.format - (id=self.show.indexerid, error_msg=ex(e)), logger.WARNING) + (id=self.show.indexerid, error_msg=e.message), logger.WARNING) except Exception as e: logger.log(u'{id}: Error loading IMDb info: {error_msg}'.format - (id=self.show.indexerid, error_msg=ex(e)), logger.WARNING) + (id=self.show.indexerid, error_msg=e.message), logger.WARNING) # have to save show before reading episodes from db try: @@ -845,7 +879,7 @@ def run(self): self.show.save_to_db() except Exception as e: logger.log(u"{id}: Error saving new IMDb show info to database: {error_msg}".format - (id=self.show.indexerid, error_msg=ex(e)), logger.WARNING) + (id=self.show.indexerid, error_msg=e.message), logger.WARNING) logger.log(traceback.format_exc(), logger.ERROR) # get episode list from DB @@ -854,7 +888,7 @@ def run(self): except IndexerException as e: logger.log(u'{id}: Unable to contact {indexer}. Aborting: {error_msg}'.format (id=self.show.indexerid, indexer=indexerApi(self.show.indexer).name, - error_msg=e), logger.WARNING) + error_msg=e.message), logger.WARNING) return # get episode list from the indexer @@ -863,7 +897,7 @@ def run(self): except IndexerException as e: logger.log(u'{id}: Unable to get info from {indexer}. The show info will not be refreshed. ' u'Error: {error_msg}'.format - (id=self.show.indexerid, indexer=indexerApi(self.show.indexer).name, error_msg=ex(e)), + (id=self.show.indexerid, indexer=indexerApi(self.show.indexer).name, error_msg=e.message), logger.WARNING) episodes_from_indexer = None @@ -899,7 +933,7 @@ def run(self): self.show.save_to_db() except Exception as e: logger.log(u'{id}: Error saving all updated show info to database: {error_msg}'.format - (id=self.show.indexerid, error_msg=ex(e)), logger.WARNING) + (id=self.show.indexerid, error_msg=e.message), logger.WARNING) logger.log(traceback.format_exc(), logger.ERROR) logger.log(u'{id}: Finished update of {show}'.format @@ -930,7 +964,7 @@ def run(self): except TraktException as e: logger.log(u'{id}: Unable to delete show {show} from Trakt. ' u'Please remove manually otherwise it will be added again. Error: {error_msg}'.format - (id=self.show.indexerid, show=self.show.name, error_msg=ex(e)), logger.WARNING) + (id=self.show.indexerid, show=self.show.name, error_msg=e.message), logger.WARNING) self.show.delete_show(full=self.full) diff --git a/medusa/show_updater.py b/medusa/show_updater.py index 2e4069f392..8e955dcb71 100644 --- a/medusa/show_updater.py +++ b/medusa/show_updater.py @@ -64,7 +64,7 @@ def run(self, force=False): indexer_api_params = indexerApi(show.indexer).api_params.copy() try: - t = indexerApi(show.indexer).indexer(**indexer_api_params) + indexer_api = indexerApi(show.indexer).indexer(**indexer_api_params) except IndexerUnavailable: logger.warning(u'Problem running show_updater, Indexer {indexer_name} seems to be having ' u'connectivity issues. While trying to look for show updates on show: {show}', @@ -82,8 +82,9 @@ def run(self, force=False): if last_update and last_update > time.time() - (604800 * update_max_weeks): if show.indexer not in indexer_updated_shows: try: - indexer_updated_shows[show.indexer] = t.get_last_updated_series(last_update, - update_max_weeks) + indexer_updated_shows[show.indexer] = indexer_api.get_last_updated_series( + last_update, update_max_weeks + ) except IndexerUnavailable: logger.warning(u'Problem running show_updater, Indexer {indexer_name} seems to be having ' u'connectivity issues while trying to look for show updates on show: {show}', @@ -92,12 +93,11 @@ def run(self, force=False): except IndexerException as e: logger.warning(u'Problem running show_updater, Indexer {indexer_name} seems to be having ' u'issues while trying to get updates for show {show}. Cause: {cause}', - indexer_name=indexerApi(show.indexer).name, show=show.name, cause=e) + indexer_name=indexerApi(show.indexer).name, show=show.name, cause=e.message) continue except Exception as e: logger.exception(u'Problem running show_updater, Indexer {indexer_name} seems to be having ' - u'issues while trying to get updates for show {show}. Cause: {cause}. ' - u'Traceback: {trace}', + u'issues while trying to get updates for show {show}. Cause: {cause}.', indexer_name=indexerApi(show.indexer).name, show=show.name, cause=e) continue @@ -112,7 +112,7 @@ def run(self, force=False): continue # These are the criteria for performing a full show refresh. - if any([not hasattr(t, 'get_last_updated_seasons'), + if any([not hasattr(indexer_api, 'get_last_updated_seasons'), not last_update, last_update < time.time() - 604800 * update_max_weeks]): # no entry in lastUpdate, or last update was too long ago, @@ -124,10 +124,10 @@ def run(self, force=False): refresh_shows.append(show) # Else fall back to per season updates. - elif hasattr(t, 'get_last_updated_seasons'): + elif hasattr(indexer_api, 'get_last_updated_seasons'): # Get updated seasons and add them to the season update list. try: - updated_seasons = t.get_last_updated_seasons([show.indexerid], last_update, update_max_weeks) + updated_seasons = indexer_api.get_last_updated_seasons([show.indexerid], last_update, update_max_weeks) except IndexerUnavailable: logger.warning(u'Problem running show_updater, Indexer {indexer_name} seems to be having ' u'connectivity issues while trying to look for showupdates on show: {show}', @@ -136,7 +136,7 @@ def run(self, force=False): except IndexerException as e: logger.warning(u'Problem running show_updater, Indexer {indexer_name} seems to be having ' u'issues while trying to get updates for show {show}. Cause: {cause}', - indexer_name=indexerApi(show.indexer).name, show=show.name, cause=e) + indexer_name=indexerApi(show.indexer).name, show=show.name, cause=e.message) continue except Exception as e: logger.exception(u'Problem running show_updater, Indexer {indexer_name} seems to be having ' diff --git a/medusa/subtitles.py b/medusa/subtitles.py index e8a019cfdd..25c9c43b93 100644 --- a/medusa/subtitles.py +++ b/medusa/subtitles.py @@ -25,7 +25,6 @@ import re import subprocess import time -import traceback from babelfish import Language, language_converters from dogpile.cache.api import NO_VALUE @@ -399,54 +398,42 @@ def download_subtitles(tv_episode, video_path=None, subtitles=True, embedded_sub logger.debug(u'Episode already has all needed subtitles, skipping %s %s', show_name, ep_num) return [] - try: - logger.debug(u'Checking subtitle candidates for %s %s (%s)', show_name, ep_num, os.path.basename(video_path)) - video = get_video(tv_episode, video_path, subtitles_dir=subtitles_dir, subtitles=subtitles, - embedded_subtitles=embedded_subtitles, release_name=release_name) - if not video: - logger.info(u'Exception caught in subliminal.scan_video for %s', video_path) - return [] - - if app.SUBTITLES_PRE_SCRIPTS: - run_subs_pre_scripts(video_path) - - pool = get_provider_pool() - subtitles_list = pool.list_subtitles(video, languages) - for provider in pool.providers: - if provider in pool.discarded_providers: - logger.debug(u'Could not search in %s provider. Discarding for now', provider) - - if not subtitles_list: - logger.info(u'No subtitles found for %s', os.path.basename(video_path)) - return [] - - min_score = get_min_score() - scored_subtitles = score_subtitles(subtitles_list, video) - for subtitle, score in scored_subtitles: - logger.debug(u'[{0:>13s}:{1:<5s}] score = {2:3d}/{3:3d} for {4}'.format( - subtitle.provider_name, subtitle.language, score, min_score, get_subtitle_description(subtitle))) - - found_subtitles = pool.download_best_subtitles(subtitles_list, video, languages=languages, - hearing_impaired=app.SUBTITLES_HEARING_IMPAIRED, - min_score=min_score, only_one=not app.SUBTITLES_MULTI) - - if not found_subtitles: - logger.info(u'No subtitles found for %s with a minimum score of %d', - os.path.basename(video_path), min_score) - return [] - - return save_subs(tv_episode, video, found_subtitles, video_path=video_path) - except IOError as error: - if 'No space left on device' in ex(error): - logger.warning(u'Not enough space on the drive to save subtitles') - else: - logger.warning(traceback.format_exc()) - except Exception as error: - logger.debug(u'Exception: %s', error) - logger.info(u'Error occurred when downloading subtitles for: %s', video_path) - logger.error(traceback.format_exc()) + logger.debug(u'Checking subtitle candidates for %s %s (%s)', show_name, ep_num, os.path.basename(video_path)) + video = get_video(tv_episode, video_path, subtitles_dir=subtitles_dir, subtitles=subtitles, + embedded_subtitles=embedded_subtitles, release_name=release_name) + if not video: + logger.info(u'Exception caught in subliminal.scan_video for %s', video_path) + return [] - return [] + if app.SUBTITLES_PRE_SCRIPTS: + run_subs_pre_scripts(video_path) + + pool = get_provider_pool() + subtitles_list = pool.list_subtitles(video, languages) + for provider in pool.providers: + if provider in pool.discarded_providers: + logger.debug(u'Could not search in %s provider. Discarding for now', provider) + + if not subtitles_list: + logger.info(u'No subtitles found for %s', os.path.basename(video_path)) + return [] + + min_score = get_min_score() + scored_subtitles = score_subtitles(subtitles_list, video) + for subtitle, score in scored_subtitles: + logger.debug(u'[{0:>13s}:{1:<5s}] score = {2:3d}/{3:3d} for {4}'.format( + subtitle.provider_name, subtitle.language, score, min_score, get_subtitle_description(subtitle))) + + found_subtitles = pool.download_best_subtitles(subtitles_list, video, languages=languages, + hearing_impaired=app.SUBTITLES_HEARING_IMPAIRED, + min_score=min_score, only_one=not app.SUBTITLES_MULTI) + + if not found_subtitles: + logger.info(u'No subtitles found for %s with a minimum score of %d', + os.path.basename(video_path), min_score) + return [] + + return save_subs(tv_episode, video, found_subtitles, video_path=video_path) def save_subs(tv_episode, video, found_subtitles, video_path=None): @@ -704,13 +691,16 @@ def get_video(tv_episode, video_path, subtitles_dir=None, subtitles=True, embedd logger.debug(u'Found cached video information under key %s', key) return cached_payload['video'] - try: - video_path = _encode(video_path) - subtitles_dir = _encode(subtitles_dir or get_subtitles_dir(video_path)) + video_path = _encode(video_path) + subtitles_dir = _encode(subtitles_dir or get_subtitles_dir(video_path)) - logger.debug(u'Scanning video %s...', video_path) - video = scan_video(video_path) + logger.debug(u'Scanning video %s...', video_path) + try: + video = scan_video(video_path) + except ValueError as e: + logger.warning(u'Unable to scan video: %s. Error: %s', video_path, e.message) + else: # external subtitles if subtitles: video.subtitle_languages |= set(search_external_subtitles(video_path, directory=subtitles_dir).values()) @@ -726,8 +716,6 @@ def get_video(tv_episode, video_path, subtitles_dir=None, subtitles=True, embedd logger.debug(u'Video information cached under key %s', key) return video - except Exception as error: - logger.info(u'Exception: %s', error) def get_subtitles_dir(video_path): @@ -780,15 +768,16 @@ def delete_unwanted_subtitles(dirpath, filename): return code = filename.rsplit('.', 2)[1].lower().replace('_', '-') - language = from_code(code, unknown='') or from_ietf_code(code, unknown='und') + language = from_code(code, unknown='') or from_ietf_code(code) if language.opensubtitles not in app.SUBTITLES_LANGUAGES: try: os.remove(os.path.join(dirpath, filename)) + except OSError as error: + logger.info(u"Couldn't delete subtitle: %s. Error: %s", filename, ex(error)) + else: logger.debug(u"Deleted '%s' because we don't want subtitle language '%s'. We only want '%s' language(s)", filename, language, ','.join(app.SUBTITLES_LANGUAGES)) - except Exception as error: - logger.info(u"Couldn't delete subtitle: %s. Error: %s", filename, ex(error)) class SubtitlesFinder(object): @@ -869,7 +858,7 @@ def subtitles_download_in_pp(): # pylint: disable=too-many-locals, too-many-bra if run_post_process: logger.info(u'Starting post-process with default settings now that we found subtitles') - process_tv.processDir(app.TV_DOWNLOAD_DIR) + process_tv.ProcessResult(app.TV_DOWNLOAD_DIR, app.PROCESS_METHOD).process() @staticmethod def unpack_rar_files(dirpath): @@ -885,8 +874,7 @@ def unpack_rar_files(dirpath): video_files = [video_file for video_file in files if is_media_file(video_file)] if u'_UNPACK' not in root and (not video_files or root == app.TV_DOWNLOAD_DIR): logger.debug(u'Found rar files in post-process folder: %s', rar_files) - result = process_tv.ProcessResult() - process_tv.unRAR(root, rar_files, False, result) + process_tv.ProcessResult(app.TV_DOWNLOAD_DIR).unrar(root, rar_files, False) elif rar_files and not app.UNPACK: logger.warning(u'Unpack is disabled. Skipping: %s', rar_files) @@ -897,11 +885,11 @@ def run(self, force=False): # pylint: disable=too-many-branches, too-many-state :type force: bool """ if self.amActive: - logger.log(u"Subtitle finder is still running, not starting it again", logger.DEBUG) + logger.debug(u'Subtitle finder is still running, not starting it again') return if not app.USE_SUBTITLES: - logger.log(u"Subtitle search is disabled. Please enabled it", logger.WARNING) + logger.warning(u'Subtitle search is disabled. Please enabled it') return if not enabled_service_list(): @@ -912,17 +900,18 @@ def run(self, force=False): # pylint: disable=too-many-branches, too-many-state self.amActive = True def dhm(td): - days = td.days - hours = td.seconds // 60 ** 2 - minutes = (td.seconds // 60) % 60 - ret = (u'', '{} days, '.format(days))[days > 0] + \ - (u'', '{} hours, '.format(hours))[hours > 0] + \ - (u'', '{} minutes'.format(minutes))[minutes > 0] - if days == 1: + """Create the string for subtitles delay.""" + days_delay = td.days + hours_delay = td.seconds // 60 ** 2 + minutes_delay = (td.seconds // 60) % 60 + ret = (u'', '{days} days, '.format(days=days_delay))[days_delay > 0] + \ + (u'', '{hours} hours, '.format(hours=hours_delay))[hours_delay > 0] + \ + (u'', '{minutes} minutes'.format(minutes=minutes_delay))[minutes_delay > 0] + if days_delay == 1: ret = ret.replace('days', 'day') - if hours == 1: + if hours_delay == 1: ret = ret.replace('hours', 'hour') - if minutes == 1: + if minutes_delay == 1: ret = ret.replace('minutes', 'minute') return ret.rstrip(', ') @@ -1000,45 +989,34 @@ def dhm(td): except ValueError: lastsearched = datetime.datetime.min - try: - if not force: - now = datetime.datetime.now() - days = int(ep_to_sub['age']) - delay_time = datetime.timedelta(hours=1 if days <= 10 else 8 if days <= 30 else 30 * 24) - delay = lastsearched + delay_time - now - - # Search every hour until 10 days pass - # After 10 days, search every 8 hours, after 30 days search once a month - # Will always try an episode regardless of age for 3 times - # The time resolution is minute - # Only delay is the it's bigger than one minute and avoid wrongly skipping the search slot. - if delay.total_seconds() > 60 and int(ep_to_sub['searchcount']) > 2: - logger.debug(u'Subtitle search for %s %s delayed for %s', - ep_to_sub['show_name'], ep_num, dhm(delay)) - continue - - show_object = Show.find(app.showList, int(ep_to_sub['showid'])) - if not show_object: - logger.debug(u'Show with ID %s not found in the database', ep_to_sub['showid']) - continue - - episode_object = show_object.get_episode(ep_to_sub['season'], ep_to_sub['episode']) - if isinstance(episode_object, str): - logger.debug(u'%s %s not found in the database', ep_to_sub['show_name'], ep_num) + if not force: + now = datetime.datetime.now() + days = int(ep_to_sub['age']) + delay_time = datetime.timedelta(hours=1 if days <= 10 else 8 if days <= 30 else 30 * 24) + delay = lastsearched + delay_time - now + + # Search every hour until 10 days pass + # After 10 days, search every 8 hours, after 30 days search once a month + # Will always try an episode regardless of age for 3 times + # The time resolution is minute + # Only delay is the it's bigger than one minute and avoid wrongly skipping the search slot. + if delay.total_seconds() > 60 and int(ep_to_sub['searchcount']) > 2: + logger.debug(u'Subtitle search for %s %s delayed for %s', + ep_to_sub['show_name'], ep_num, dhm(delay)) continue - try: - episode_object.download_subtitles() - except Exception as error: - logger.error(u'Unable to find subtitles for %s %s. Error: %s', - ep_to_sub['show_name'], ep_num, ex(error)) - continue + show_object = Show.find(app.showList, int(ep_to_sub['showid'])) + if not show_object: + logger.debug(u'Show with ID %s not found in the database', ep_to_sub['showid']) + continue - except Exception as error: - logger.warning(u'Error while searching subtitles for %s %s. Error: %s', - ep_to_sub['show_name'], ep_num, ex(error)) + episode_object = show_object.get_episode(ep_to_sub['season'], ep_to_sub['episode']) + if isinstance(episode_object, str): + logger.debug(u'%s %s not found in the database', ep_to_sub['show_name'], ep_num) continue + episode_object.download_subtitles() + logger.info(u'Finished checking for missed subtitles') self.amActive = False diff --git a/medusa/system/restart.py b/medusa/system/restart.py index ca0a6fa626..01a9a2f37e 100644 --- a/medusa/system/restart.py +++ b/medusa/system/restart.py @@ -1,23 +1,7 @@ # coding=utf-8 -# This file is part of Medusa. -# -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . - -from .. import app -from ..event_queue import Events +from medusa import app +from medusa.event_queue import Events class Restart(object): diff --git a/medusa/system/shutdown.py b/medusa/system/shutdown.py index 634b53d126..073bea2e9b 100644 --- a/medusa/system/shutdown.py +++ b/medusa/system/shutdown.py @@ -1,23 +1,7 @@ # coding=utf-8 -# This file is part of Medusa. -# -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . - -from .. import app -from ..event_queue import Events +from medusa import app +from medusa.event_queue import Events class Shutdown(object): diff --git a/medusa/tagger/episode.py b/medusa/tagger/episode.py index 339c797880..a0fa27771f 100644 --- a/medusa/tagger/episode.py +++ b/medusa/tagger/episode.py @@ -6,8 +6,8 @@ import re -from ..helper.common import try_int -from ..recompiled import tags +from medusa.helper.common import try_int +from medusa.recompiled import tags class EpisodeTags(object): diff --git a/medusa/torrent_checker.py b/medusa/torrent_checker.py index 28acec7fcb..3261d64fda 100644 --- a/medusa/torrent_checker.py +++ b/medusa/torrent_checker.py @@ -20,8 +20,7 @@ import logging import app - -from . import clients +from medusa.clients import torrent logger = logging.getLogger(__name__) @@ -41,7 +40,7 @@ def run(self, force=False): self.amActive = True try: - client = clients.get_client_class(app.TORRENT_METHOD)() + client = torrent.get_client_class(app.TORRENT_METHOD)() client.remove_ratio_reached() except Exception as e: logger.debug('Failed to check torrent status. Error: {error}', error=e) diff --git a/medusa/trakt_checker.py b/medusa/trakt_checker.py index cb978774a3..735a739996 100644 --- a/medusa/trakt_checker.py +++ b/medusa/trakt_checker.py @@ -15,34 +15,32 @@ # # You should have received a copy of the GNU General Public License # along with Medusa. If not, see . - +"""Trakt checker module.""" from __future__ import unicode_literals import datetime -import traceback -from traktor import TokenExpiredException, TraktApi, TraktException +from traktor import AuthException, TokenExpiredException, TraktApi, TraktException + from . import app, db, logger, ui -from .common import Quality, SKIPPED, UNKNOWN, WANTED +from .common import Quality, SKIPPED, WANTED from .helper.common import episode_num -from .helper.exceptions import ex -from .indexers.indexer_api import indexerApi +from .helpers import get_title_without_year +from .indexers.indexer_config import EXTERNAL_IMDB, EXTERNAL_TRAKT, get_trakt_indexer, indexerConfig from .search.queue import BacklogQueueItem from .show.show import Show -def setEpisodeToWanted(show, s, e): - """ - Sets an episode to wanted, only if it is currently skipped - """ - ep_obj = show.get_episode(s, e) +def set_episode_to_wanted(show, s, e): + """Set an episode to wanted, only if it is currently skipped.""" + ep_obj = show.get_episode(s, e, no_create=True) if ep_obj: with ep_obj.lock: if ep_obj.status != SKIPPED or ep_obj.airdate == datetime.date.fromordinal(1): return - logger.log('Setting episode {show} {ep} to wanted'.format + logger.log("Setting episode '{show}' {ep} to wanted".format (show=show.name, ep=episode_num(s, e))) # figure out what segment the episode is in and remember it so we can backlog it @@ -52,12 +50,15 @@ def setEpisodeToWanted(show, s, e): cur_backlog_queue_item = BacklogQueueItem(show, [ep_obj]) app.search_queue_scheduler.action.add_item(cur_backlog_queue_item) - logger.log('Starting backlog search for {show} {ep} because some episodes were set to wanted'.format + logger.log("Starting backlog search for '{show}' {ep} because some episodes were set to wanted".format (show=show.name, ep=episode_num(s, e))) class TraktChecker(object): + """Trakt checker class.""" + def __init__(self): + """Initialize the class.""" trakt_settings = {'trakt_api_key': app.TRAKT_API_KEY, 'trakt_api_secret': app.TRAKT_API_SECRET, 'trakt_access_token': app.TRAKT_ACCESS_TOKEN, @@ -69,7 +70,8 @@ def __init__(self): self.collection_list = {} self.amActive = False - def run(self, force=False): # pylint: disable=unused-argument + def run(self, force=False): + """Run Trakt Checker.""" self.amActive = True # add shows from Trakt watchlist @@ -78,20 +80,12 @@ def run(self, force=False): # pylint: disable=unused-argument if len(app.ROOT_DIRS.split('|')) < 2: logger.log('No default root directory', logger.WARNING) ui.notifications.error('Unable to add show', - 'You do not have any default root directory configured. ' + 'You do not have any default root directory. ' 'Please configure in general settings!') return - try: - self.sync_watchlist() - except Exception: - logger.log(traceback.format_exc(), logger.DEBUG) - - try: - # sync Trakt library with medusa library - self.sync_library() - except Exception: - logger.log(traceback.format_exc(), logger.DEBUG) + self.sync_watchlist() + self.sync_library() self.amActive = False @@ -104,390 +98,475 @@ def _request(self, path, data=None, method='GET'): app.TRAKT_REFRESH_TOKEN = self.trakt_api.refresh_token app.instance.save_config() except TokenExpiredException: + logger.log(u'You need to get a PIN and authorize Medusa app', logger.WARNING) app.TRAKT_ACCESS_TOKEN = '' - raise + app.TRAKT_REFRESH_TOKEN = '' + app.instance.save_config() + raise TokenExpiredException('You need to get a PIN and authorize Medusa app') return library_shows - def find_show(self, indexerid): - + def find_show(self, indexerid, indexer): + """Find show in Trakt library.""" + trakt_library = [] try: - trakt_library = self._request('sync/collection/shows') or [] - if not trakt_library: - logger.log('No shows found in your library, aborting library update', logger.DEBUG) - return + trakt_library = self._request('sync/collection/shows') + except (TraktException, AuthException, TokenExpiredException) as e: + logger.log("Unable to retrieve shows from Trakt collection. Error: {error}".format + (error=e.message), logger.INFO) - trakt_show = [x for x in trakt_library if int(indexerid) in [int(x['show']['ids']['tvdb'] or 0), int(x['show']['ids']['tvrage'] or 0)]] - except TraktException as e: - logger.log('Could not connect to Trakt. Aborting library check. Error: {0}'.format(repr(e)), logger.WARNING) + if not trakt_library: + logger.log('No shows found in your Trakt library. Nothing to sync', logger.INFO) + return + trakt_show = [x for x in trakt_library if + int(indexerid) in [int(x['show']['ids'].get(get_trakt_indexer(indexer)))]] return trakt_show if trakt_show else None def remove_show_trakt_library(self, show_obj): - """Remove Show from trakt collections.""" - if self.find_show(show_obj.indexerid): - trakt_id = indexerApi(show_obj.indexer).config['trakt_id'] + """Remove show from trakt library.""" + if self.find_show(show_obj.indexerid, show_obj.indexer): + + # Check if TRAKT supports that indexer + if not get_trakt_indexer(show_obj.indexer): + return # URL parameters + title = get_title_without_year(show_obj.name, show_obj.start_year) data = { 'shows': [ { - 'title': show_obj.name, + 'title': title, 'year': show_obj.start_year, 'ids': {} } ] } - if trakt_id == 'tvdb_id': - data['shows'][0]['ids']['tvdb'] = show_obj.indexerid - else: - data['shows'][0]['ids']['tvrage'] = show_obj.indexerid + data['shows'][0]['ids'][get_trakt_indexer(show_obj.indexer)] = show_obj.indexerid - logger.log('Removing {0} from Trakt library'.format(show_obj.name), logger.DEBUG) + logger.log("Removing '{show}' from Trakt library".format(show=show_obj.name), logger.INFO) # Remove all episodes from the Trakt collection for this show try: self.remove_episode_trakt_collection(filter_show=show_obj) - except TraktException as e: - logger.log('Could not connect to Trakt. Aborting removing episodes for show {0} from Trakt library. Error: {1}'. - format(show_obj.name, repr(e)), logger.WARNING) + except (TraktException, AuthException, TokenExpiredException) as e: + logger.log("Unable to remove all episodes from show '{show}' from Trakt library. Error: {error}".format + (show=show_obj.name, error=e.message), logger.INFO) try: self._request('sync/collection/remove', data, method='POST') - except TraktException as e: - logger.log('Could not connect to Trakt. Aborting removing show {0} from Trakt library. Error: {1}'. - format(show_obj.name, repr(e)), logger.WARNING) + except (TraktException, AuthException, TokenExpiredException) as e: + logger.log("Unable to remove show '{show}' from Trakt library. Error: {error}".format + (show=show_obj.name, error=e.message), logger.INFO) def add_show_trakt_library(self, show_obj): - """ - Sends a request to trakt indicating that the given show and all its episodes is part of our library. - - show_obj: The Series object to add to trakt - """ + """Add show to trakt library.""" data = {} - if not self.find_show(show_obj.indexerid): - trakt_id = indexerApi(show_obj.indexer).config['trakt_id'] + if not self.find_show(show_obj.indexerid, show_obj.indexer): + + # Check if TRAKT supports that indexer + if not get_trakt_indexer(show_obj.indexer): + return + # URL parameters + title = get_title_without_year(show_obj.name, show_obj.start_year) data = { 'shows': [ { - 'title': show_obj.name, + 'title': title, 'year': show_obj.start_year, 'ids': {} } ] } - if trakt_id == 'tvdb_id': - data['shows'][0]['ids']['tvdb'] = show_obj.indexerid - else: - data['shows'][0]['ids']['tvrage'] = show_obj.indexerid + data['shows'][0]['ids'][get_trakt_indexer(show_obj.indexer)] = show_obj.indexerid if data: - logger.log('Adding {0} to Trakt library'.format(show_obj.name), logger.DEBUG) + logger.log("Adding show '{show}' to Trakt library".format(show=show_obj.name), logger.INFO) try: self._request('sync/collection', data, method='POST') - except TraktException as e: - logger.log('Could not connect to Trakt. Aborting adding show {0} to Trakt library. Error: {1}'.format(show_obj.name, repr(e)), logger.WARNING) + except (TraktException, AuthException, TokenExpiredException) as e: + logger.log("Unable to add show '{show}' to Trakt library. Error: {error}".format + (show=show_obj.name, error=e.message), logger.INFO) return def sync_library(self): + """Sync Trakt library.""" if app.TRAKT_SYNC and app.USE_TRAKT: - logger.log('Starting to sync Medusa with Trakt collection', logger.DEBUG) + logger.log('Syncing Trakt collection', logger.DEBUG) if self._get_show_collection(): self.add_episode_trakt_collection() if app.TRAKT_SYNC_REMOVE: self.remove_episode_trakt_collection() + logger.log(u"Synced Trakt collection", logger.DEBUG) def remove_episode_trakt_collection(self, filter_show=None): + """Remove episode from trakt collection. + + For episodes that no longer have a media file (location) + :param filter_show: optional. Only remove episodes from trakt collection for given shows + """ if app.TRAKT_SYNC_REMOVE and app.TRAKT_SYNC and app.USE_TRAKT: params = [] main_db_con = db.DBConnection() - sql_selection = b'select tv_shows.indexer, tv_shows.startyear, showid, show_name, season, episode, tv_episodes.status,' \ - b'tv_episodes.location from tv_episodes, tv_shows where tv_shows.indexer_id = tv_episodes.showid' + selection_status = ['?' for _ in Quality.DOWNLOADED + Quality.ARCHIVED] + sql_selection = b'SELECT s.indexer, s.startyear, s.indexer_id, s.show_name,' \ + b'e.season, e.episode, e.status ' \ + b'FROM tv_episodes AS e, tv_shows AS s WHERE s.indexer_id = e.showid and e.location = "" ' \ + b'AND e.status in ({0})'.format(','.join(selection_status)) if filter_show: - sql_selection += b' AND tv_shows.indexer_id = ? AND tv_shows.indexer = ?' + sql_selection += b' AND s.indexer_id = ? AND e.indexer = ?' params = [filter_show.indexerid, filter_show.indexer] - episodes = main_db_con.select(sql_selection, params) + sql_result = main_db_con.select(sql_selection, Quality.DOWNLOADED + Quality.ARCHIVED + params) + episodes = [dict(e) for e in sql_result] if episodes: trakt_data = [] for cur_episode in episodes: - trakt_id = indexerApi(cur_episode[b'indexer']).config['trakt_id'] - - if self._check_list(trakt_id, cur_episode[b'showid'], cur_episode[b'season'], cur_episode[b'episode'], - List='Collection'): - - if cur_episode[b'location'] == '': - logger.log('Removing Episode {show} {ep} from collection'.format - (show=cur_episode[b'show_name'], - ep=episode_num(cur_episode[b'season'], cur_episode[b'episode'])), - logger.DEBUG) - trakt_data.append((cur_episode[b'showid'], cur_episode[b'indexer'], cur_episode[b'show_name'], - cur_episode[b'startyear'], cur_episode[b'season'], cur_episode[b'episode'])) + # Check if TRAKT supports that indexer + if not get_trakt_indexer(cur_episode[b'indexer']): + continue + if self._check_list(indexer=cur_episode[b'indexer'], indexer_id=cur_episode[b'indexer_id'], + season=cur_episode[b'season'], episode=cur_episode[b'episode'], + list_type='Collection'): + logger.log("Removing episode '{show}' {ep} from Trakt collection".format + (show=cur_episode[b'show_name'], + ep=episode_num(cur_episode[b'season'], cur_episode[b'episode'])), logger.INFO) + title = get_title_without_year(cur_episode[b'show_name'], cur_episode[b'startyear']) + trakt_data.append((cur_episode[b'indexer_id'], cur_episode[b'indexer'], + title, cur_episode[b'startyear'], + cur_episode[b'season'], cur_episode[b'episode'])) if trakt_data: try: data = self.trakt_bulk_data_generate(trakt_data) self._request('sync/collection/remove', data, method='POST') self._get_show_collection() - except TraktException as e: - logger.log('Could not connect to Trakt. Error: {0}'.format(ex(e)), logger.WARNING) + except (TraktException, AuthException, TokenExpiredException) as e: + logger.log("Unable to remove episodes from Trakt collection. Error: {error}".format + (error=e.message), logger.INFO) def add_episode_trakt_collection(self): - """Add all episodes from local library to Trakt collections. Enabled in app.TRAKT_SYNC_WATCHLIST setting.""" + """Add all existing episodes to Trakt collections. + + For episodes that have a media file (location) + """ if app.TRAKT_SYNC and app.USE_TRAKT: main_db_con = db.DBConnection() selection_status = ['?' for _ in Quality.DOWNLOADED + Quality.ARCHIVED] - sql_selection = b'select tv_shows.indexer, tv_shows.startyear, showid, show_name, season, ' \ - b'episode from tv_episodes,tv_shows where tv_shows.indexer_id = tv_episodes.showid ' \ - b"and tv_episodes.status in ({0}) and tv_episodes.location <> ''".format(','.join(selection_status)) - episodes = main_db_con.select(sql_selection, Quality.DOWNLOADED + Quality.ARCHIVED) + sql_selection = b'SELECT s.indexer, s.startyear, s.indexer_id, s.show_name, e.season, e.episode ' \ + b'FROM tv_episodes AS e, tv_shows AS s WHERE s.indexer_id = e.showid ' \ + b"AND e.status in ({0}) AND e.location <> ''".format(','.join(selection_status)) + + sql_result = main_db_con.select(sql_selection, Quality.DOWNLOADED + Quality.ARCHIVED) + episodes = [dict(e) for e in sql_result] if episodes: trakt_data = [] for cur_episode in episodes: - trakt_id = indexerApi(cur_episode[b'indexer']).config['trakt_id'] - - if not self._check_list(trakt_id, cur_episode[b'showid'], cur_episode[b'season'], cur_episode[b'episode'], List='Collection'): - logger.log('Adding Episode {show} {ep} to collection'.format + # Check if TRAKT supports that indexer + if not get_trakt_indexer(cur_episode[b'indexer']): + continue + + if not self._check_list(indexer=cur_episode[b'indexer'], indexer_id=cur_episode[b'indexer_id'], + season=cur_episode[b'season'], episode=cur_episode[b'episode'], + list_type='Collection'): + logger.log("Adding episode '{show}' {ep} to Trakt collection".format (show=cur_episode[b'show_name'], ep=episode_num(cur_episode[b'season'], cur_episode[b'episode'])), - logger.DEBUG) - trakt_data.append((cur_episode[b'showid'], cur_episode[b'indexer'], cur_episode[b'show_name'], cur_episode[b'startyear'], cur_episode[b'season'], cur_episode[b'episode'])) + logger.INFO) + title = get_title_without_year(cur_episode[b'show_name'], cur_episode[b'startyear']) + trakt_data.append((cur_episode[b'indexer_id'], cur_episode[b'indexer'], + title, cur_episode[b'startyear'], + cur_episode[b'season'], cur_episode[b'episode'])) if trakt_data: try: data = self.trakt_bulk_data_generate(trakt_data) self._request('sync/collection', data, method='POST') self._get_show_collection() - except TraktException as e: - logger.log('Could not connect to Trakt. Error: {0}'.format(ex(e)), logger.WARNING) + except (TraktException, AuthException, TokenExpiredException) as e: + logger.log("Unable to add episodes to Trakt collection. Error: {error}".format + (error=e.message), logger.INFO) def sync_watchlist(self): + """Sync Trakt watchlist.""" if app.TRAKT_SYNC_WATCHLIST and app.USE_TRAKT: - logger.log('Starting to sync Medusa with Trakt Watchlist', logger.DEBUG) + logger.log('Syncing Trakt Watchlist', logger.DEBUG) self.remove_from_library() if self._get_show_watchlist(): logger.log('Syncing shows with Trakt watchlist', logger.DEBUG) self.add_show_watchlist() - self.fetch_trakt_shows() + self.sync_trakt_shows() if self._get_episode_watchlist(): logger.log('Syncing episodes with Trakt watchlist', logger.DEBUG) self.remove_episode_watchlist() self.add_episode_watchlist() - self.fetch_trakt_episodes() + self.sync_trakt_episodes() - logger.log('Medusa is synced with Trakt watchlist', logger.DEBUG) + logger.log('Synced Trakt watchlist', logger.DEBUG) def remove_episode_watchlist(self): + """Remove episode from Trakt watchlist.""" if app.TRAKT_SYNC_WATCHLIST and app.USE_TRAKT: main_db_con = db.DBConnection() - sql_selection = b'select tv_shows.indexer, tv_shows.startyear, showid, show_name, season, episode, ' \ - b'tv_episodes.status from tv_episodes,tv_shows where tv_shows.indexer_id = tv_episodes.showid' - episodes = main_db_con.select(sql_selection) + status = Quality.DOWNLOADED + Quality.ARCHIVED + selection_status = [b'?' for _ in status] + sql_selection = b'SELECT s.indexer, s.startyear, e.showid, s.show_name, e.season, e.episode ' \ + b'FROM tv_episodes AS e, tv_shows AS s ' \ + b'WHERE s.indexer_id = e.showid AND e.status in ({0})'.format(b','.join(selection_status)) + sql_result = main_db_con.select(sql_selection, status) + episodes = [dict(i) for i in sql_result] if episodes: trakt_data = [] for cur_episode in episodes: - trakt_id = indexerApi(cur_episode[b'indexer']).config['trakt_id'] - if self._check_list(trakt_id, cur_episode[b'showid'], cur_episode[b'season'], cur_episode[b'episode']): - if cur_episode[b'status'] not in Quality.SNATCHED + Quality.SNATCHED_PROPER + [UNKNOWN] + [WANTED]: - logger.log('Removing Episode {show} {ep} from watchlist'.format - (show=cur_episode[b'show_name'], - ep=episode_num(cur_episode[b'season'], cur_episode[b'episode'])), - logger.DEBUG) - trakt_data.append((cur_episode[b'showid'], cur_episode[b'indexer'], cur_episode[b'show_name'], cur_episode[b'startyear'], cur_episode[b'season'], cur_episode[b'episode'])) + # Check if TRAKT supports that indexer + if not get_trakt_indexer(cur_episode[b'indexer']): + continue + + if self._check_list(indexer=cur_episode[b'indexer'], indexer_id=cur_episode[b'showid'], + season=cur_episode[b'season'], episode=cur_episode[b'episode']): + logger.log("Removing episode '{show}' {ep} from Trakt watchlist".format + (show=cur_episode[b'show_name'], + ep=episode_num(cur_episode[b'season'], cur_episode[b'episode'])), logger.INFO) + title = get_title_without_year(cur_episode[b'show_name'], cur_episode[b'startyear']) + trakt_data.append((cur_episode[b'showid'], cur_episode[b'indexer'], + title, cur_episode[b'startyear'], + cur_episode[b'season'], cur_episode[b'episode'])) if trakt_data: try: data = self.trakt_bulk_data_generate(trakt_data) self._request('sync/watchlist/remove', data, method='POST') self._get_episode_watchlist() - except TraktException as e: - logger.log('Could not connect to Trakt. Error: {0}'.format(ex(e)), logger.WARNING) + except (TraktException, AuthException, TokenExpiredException) as e: + logger.log("Unable to remove episodes from Trakt watchlist. Error: {error}".format + (error=e.message), logger.INFO) def add_episode_watchlist(self): + """Add episode to Tratk watchlist.""" if app.TRAKT_SYNC_WATCHLIST and app.USE_TRAKT: main_db_con = db.DBConnection() - selection_status = [b'?' for _ in Quality.SNATCHED + Quality.SNATCHED_PROPER + [WANTED]] - sql_selection = b'select tv_shows.indexer, tv_shows.startyear, showid, show_name, season, episode from tv_episodes, ' \ - b'tv_shows where tv_shows.indexer_id = tv_episodes.showid and tv_episodes.status in ({0})'.format(b','.join(selection_status)) - episodes = main_db_con.select(sql_selection, Quality.SNATCHED + Quality.SNATCHED_PROPER + [WANTED]) + status = Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER + [WANTED] + selection_status = [b'?' for _ in status] + sql_selection = b'SELECT s.indexer, s.startyear, e.showid, s.show_name, e.season, e.episode ' \ + b'FROM tv_episodes AS e, tv_shows AS s ' \ + b'WHERE s.indexer_id = e.showid AND s.paused = 0 ' \ + b'AND e.status in ({0})'.format(b','.join(selection_status)) + sql_result = main_db_con.select(sql_selection, status) + episodes = [dict(i) for i in sql_result] if episodes: trakt_data = [] for cur_episode in episodes: - trakt_id = indexerApi(cur_episode[b'indexer']).config['trakt_id'] + # Check if TRAKT supports that indexer + if not get_trakt_indexer(cur_episode[b'indexer']): + continue - if not self._check_list(trakt_id, cur_episode[b'showid'], cur_episode[b'season'], cur_episode[b'episode']): - logger.log('Adding Episode {show} {ep} to watchlist'.format + if not self._check_list(indexer=cur_episode[b'indexer'], indexer_id=cur_episode[b'showid'], + season=cur_episode[b'season'], episode=cur_episode[b'episode']): + logger.log("Adding episode '{show}' {ep} to Trakt watchlist".format (show=cur_episode[b'show_name'], ep=episode_num(cur_episode[b'season'], cur_episode[b'episode'])), - logger.DEBUG) - trakt_data.append((cur_episode[b'showid'], cur_episode[b'indexer'], cur_episode[b'show_name'], cur_episode[b'startyear'], cur_episode[b'season'], - cur_episode[b'episode'])) + logger.INFO) + title = get_title_without_year(cur_episode[b'show_name'], cur_episode[b'startyear']) + trakt_data.append((cur_episode[b'showid'], cur_episode[b'indexer'], title, + cur_episode[b'startyear'], cur_episode[b'season'], cur_episode[b'episode'])) if trakt_data: try: data = self.trakt_bulk_data_generate(trakt_data) self._request('sync/watchlist', data, method='POST') self._get_episode_watchlist() - except TraktException as e: - logger.log('Could not connect to Trakt. Error: {0}'.format(ex(e)), logger.WARNING) + except (TraktException, AuthException, TokenExpiredException) as e: + logger.log('Unable to add episode to Trakt watchlist. ' + 'Error: {error}'.format(error=e.message), logger.INFO) + logger.log("Unable to add episodes to Trakt watchlist. Error: {error}".format + (error=e.message), logger.INFO) def add_show_watchlist(self): - if app.TRAKT_SYNC_WATCHLIST and app.USE_TRAKT: - logger.log('Syncing shows to Trakt watchlist', logger.DEBUG) + """Add show to Trakt watchlist. + It will add all shows from Medusa library + """ + if app.TRAKT_SYNC_WATCHLIST and app.USE_TRAKT: if app.showList: trakt_data = [] for show_obj in app.showList: - trakt_id = indexerApi(show_obj.indexer).config['trakt_id'] - - if not self._check_list(trakt_id, show_obj.indexerid, 0, 0, List='Show'): - logger.log('Adding Show {0} with ID: {1} to Trakt watchlist'.format(show_obj.name, show_obj.indexerid), logger.DEBUG) - show_el = {'title': show_obj.name, 'year': show_obj.start_year, 'ids': {}} - if trakt_id == 'tvdb_id': - show_el['ids']['tvdb'] = show_obj.indexerid - else: - show_el['ids']['tvrage'] = show_obj.indexerid + if not self._check_list(show_obj=show_obj, list_type='Show'): + logger.log("Adding show '{show}' to Trakt watchlist".format + (show=show_obj.name), logger.INFO) + title = get_title_without_year(show_obj.name, show_obj.start_year) + show_el = {'title': title, 'year': show_obj.start_year, 'ids': {}} trakt_data.append(show_el) if trakt_data: try: data = {'shows': trakt_data} self._request('sync/watchlist', data, method='POST') - self._get_show_watchlist() - except TraktException as e: - logger.log('Could not connect to Trakt. Error: {0}'.format(ex(e)), logger.WARNING) + except (TraktException, AuthException, TokenExpiredException) as e: + logger.log("Unable to add shows to Trakt watchlist. Error: {error}".format + (error=e.message), logger.INFO) + self._get_show_watchlist() def remove_from_library(self): + """Remove show from Medusa library is if ended/completed.""" if app.TRAKT_SYNC_WATCHLIST and app.USE_TRAKT and app.TRAKT_REMOVE_SHOW_FROM_APPLICATION: logger.log('Retrieving ended/completed shows to remove from Medusa', logger.DEBUG) if app.showList: for show in app.showList: if show.status == 'Ended': - if not show.imdb_id: - logger.log('Could not check trakt progress for {0} because the imdb id is missing from tvdb data, skipping'.format - (show.name), logger.WARNING) + trakt_id = show.externals.get('trakt_id', None) + if not (trakt_id or show.imdb_id): + logger.log("Unable to check Trakt progress for show '{show}' " + 'because Trakt|IMDB ID is missing. Skipping'.format(show=show.name), + logger.INFO) continue try: - progress = self._request('shows/{0}/progress/watched'.format(show.imdb_id)) or [] - except TraktException as e: - logger.log('Could not connect to Trakt. Aborting removing show {0} from Medusa. Error: {1}'.format(show.name, repr(e)), logger.WARNING) + progress = self._request('shows/{0}/progress/watched'.format(trakt_id or show.imdb_id)) + except (TraktException, AuthException, TokenExpiredException) as e: + logger.log("Unable to check if show '{show}' is ended/completed. Error: {error}".format + (show=show.name, error=e.message), logger.INFO) continue + else: + if progress.get('aired', True) == progress.get('completed', False): + app.show_queue_scheduler.action.removeShow(show, full=True) + logger.log("Show '{show}' has being queued to be removed from Medusa library".format + (show=show.name), logger.INFO) - if not progress: - continue - - if progress.get('aired', True) == progress.get('completed', False): - app.show_queue_scheduler.action.removeShow(show, full=True) - logger.log('Show {0} has been removed from Medusa'.format(show.name), logger.DEBUG) - - def fetch_trakt_shows(self): - + def sync_trakt_shows(self): + """Sync Trakt shows watchlist.""" if not self.show_watchlist: - logger.log('No shows found in your watchlist, aborting watchlist update', logger.DEBUG) + logger.log('No shows found in your Trakt watchlist. Nothing to sync', logger.INFO) else: - indexer = int(app.TRAKT_DEFAULT_INDEXER) - trakt_id = indexerApi(indexer).config['trakt_id'] - - for watchlisted_show in self.show_watchlist[trakt_id]: - indexer_id = int(watchlisted_show) - show_obj = self.show_watchlist[trakt_id][watchlisted_show] - if show_obj['year'] and show_obj['slug'].endswith(str(show_obj['year'])): - show_name = '{0} ({1})'.format(show_obj['title'], show_obj['year']) - else: - show_name = show_obj['title'] + trakt_default_indexer = int(app.TRAKT_DEFAULT_INDEXER) + for watchlisted_show in self.show_watchlist: + trakt_show = watchlisted_show['show'] + + if trakt_show['year'] and trakt_show['ids']['slug'].endswith(str(trakt_show['year'])): + show_name = '{title} ({year})'.format(title=trakt_show['title'], year=trakt_show['year']) + else: + show_name = trakt_show['title'] + + show = None + for i in indexerConfig: + trakt_indexer = get_trakt_indexer(i) + indexer_id = trakt_show['ids'].get(trakt_indexer, -1) + indexer = indexerConfig[i]['id'] + show = Show.find(app.showList, indexer_id, indexer) + if show: + break + if not show: + # If can't find with available indexers try IMDB + trakt_indexer = get_trakt_indexer(EXTERNAL_IMDB) + indexer_id = trakt_show['ids'].get(trakt_indexer, -1) + show = Show.find(app.showList, indexer_id, EXTERNAL_IMDB) + if not show: + # If can't find with available indexers try TRAKT + trakt_indexer = get_trakt_indexer(EXTERNAL_TRAKT) + indexer_id = trakt_show['ids'].get(trakt_indexer, -1) + show = Show.find(app.showList, indexer_id, EXTERNAL_TRAKT) + + if show: + continue + + indexer_id = trakt_show['ids'].get(get_trakt_indexer(trakt_default_indexer), -1) if int(app.TRAKT_METHOD_ADD) != 2: - self.add_show(indexer, indexer_id, show_name, SKIPPED) + self.add_show(trakt_default_indexer, indexer_id, show_name, SKIPPED) else: - self.add_show(indexer, indexer_id, show_name, WANTED) + self.add_show(trakt_default_indexer, indexer_id, show_name, WANTED) if int(app.TRAKT_METHOD_ADD) == 1: new_show = Show.find(app.showList, indexer_id) if new_show: - setEpisodeToWanted(new_show, 1, 1) + set_episode_to_wanted(new_show, 1, 1) else: self.todoWanted.append(indexer_id) + logger.log(u"Synced shows with Trakt watchlist", logger.DEBUG) - def fetch_trakt_episodes(self): - """ - Sets episodes to wanted that are in trakt watchlist - """ - logger.log(u"Retrieving episodes to sync with Trakt episode's watchlist", logger.DEBUG) - + def sync_trakt_episodes(self): + """Sync Trakt episodes watchlist.""" if not self.episode_watchlist: - logger.log('No episode found in your watchlist, aborting episode update', logger.DEBUG) + logger.log('No episodes found in your Trakt watchlist. Nothing to sync', logger.INFO) return - managed_show = [] - - indexer = int(app.TRAKT_DEFAULT_INDEXER) - trakt_id = indexerApi(indexer).config['trakt_id'] - - for watchlist_item in self.episode_watchlist[trakt_id]: - indexer_id = int(watchlist_item) - show = self.episode_watchlist[trakt_id][watchlist_item] - - new_show = Show.find(app.showList, indexer_id) - - try: - if not new_show: - if indexer_id not in managed_show: - self.add_show(indexer, indexer_id, show['title'], SKIPPED) - managed_show.append(indexer_id) - - for season_item in show['seasons']: - season = int(season_item) - - for episode_item in show['seasons'][season_item]['episodes']: - self.todoWanted.append((indexer_id, season, int(episode_item))) - else: - if new_show.indexer == indexer: - for season_item in show['seasons']: - season = int(season_item) - - for episode_item in show['seasons'][season_item]['episodes']: - setEpisodeToWanted(new_show, season, int(episode_item)) - except TypeError: - logger.log('Could not parse the output from trakt for {0} '.format(show['title']), logger.DEBUG) + added_shows = [] + trakt_default_indexer = int(app.TRAKT_DEFAULT_INDEXER) + + for watchlist_item in self.episode_watchlist: + trakt_show = watchlist_item['show'] + trakt_episode = watchlist_item['episode'].get('number', -1) + trakt_season = watchlist_item['episode'].get('season', -1) + + show = None + for i in indexerConfig: + trakt_indexer = get_trakt_indexer(i) + indexer_id = trakt_show['ids'].get(trakt_indexer, -1) + indexer = indexerConfig[i]['id'] + show = Show.find(app.showList, indexer_id, indexer) + if show: + break + + if not show: + # If can't find with available indexers try IMDB + trakt_indexer = get_trakt_indexer(EXTERNAL_IMDB) + indexer_id = trakt_show['ids'].get(trakt_indexer, -1) + show = Show.find(app.showList, indexer_id, EXTERNAL_IMDB) + if not show: + # If can't find with available indexers try TRAKT + trakt_indexer = get_trakt_indexer(EXTERNAL_TRAKT) + indexer_id = trakt_show['ids'].get(trakt_indexer, -1) + show = Show.find(app.showList, indexer_id, EXTERNAL_TRAKT) + + # If can't find show add with default trakt indexer + if not show: + indexer_id = trakt_show['ids'].get(get_trakt_indexer(trakt_default_indexer), -1) + # Only add show if we didn't added it before + if indexer_id not in added_shows: + self.add_show(trakt_default_indexer, indexer_id, trakt_show['title'], SKIPPED) + added_shows.append(indexer_id) + if not trakt_season == 0 or not show.paused: + set_episode_to_wanted(show, trakt_season, trakt_episode) + + logger.log(u"Synced episodes with Trakt watchlist", logger.DEBUG) @staticmethod def add_show(indexer, indexer_id, show_name, status): - """ - Adds a new show with the default settings - """ + """Add a new show with default settings.""" if not Show.find(app.showList, int(indexer_id)): root_dirs = app.ROOT_DIRS.split('|') location = root_dirs[int(root_dirs[0]) + 1] if root_dirs else None if location: - logger.log('Adding show {0} with ID: {1}'.format(show_name, indexer_id)) + logger.log("Adding show '{show}' using indexer: '{indexer_name}' and ID: {id}".format + (show=show_name, + indexer_name=indexerConfig[indexer]['identifier'], + id=indexer_id)) app.show_queue_scheduler.action.addShow(indexer, indexer_id, None, default_status=status, @@ -496,210 +575,116 @@ def add_show(indexer, indexer_id, show_name, status): paused=app.TRAKT_START_PAUSED, default_status_after=status, root_dir=location) else: - logger.log('There was an error creating the show, no root directory setting found', logger.WARNING) + logger.log("Error creating show '{show}' folder. No default root directory".format + (show=show_name), logger.WARNING) return def manage_new_show(self, show): - logger.log('Checking if trakt watchlist wants to search for episodes from new show {0}'.format(show.name), logger.DEBUG) + """Set episodes to wanted for the recently added show.""" + logger.log("Checking for wanted episodes for show '{show}' in Trakt watchlist".format + (show=show.name), logger.DEBUG) episodes = [i for i in self.todoWanted if i[0] == show.indexerid] for episode in episodes: self.todoWanted.remove(episode) - setEpisodeToWanted(show, episode[1], episode[2]) - - def _check_list(self, trakt_id, showid, season, episode, List=None): # pylint: disable=too-many-arguments - """ - Check in the Watchlist or collection list for Show - Is the Show, Season and Episode in the trakt_id list (tvdb / tvrage) - """ - - if 'Collection' == List: - try: - if self.collection_list[trakt_id][showid]['seasons'][season]['episodes'][episode] == episode: - return True - except KeyError: - return False - elif 'Show' == List: - try: - if self.show_watchlist[trakt_id][showid]['id'] == showid: + set_episode_to_wanted(show, episode[1], episode[2]) + + def _check_list(self, show_obj=None, indexer=None, indexer_id=None, season=None, episode=None, list_type=None): + """Check if we can find the show in the Trakt watchlist|collection list.""" + if 'Collection' == list_type: + trakt_indexer = get_trakt_indexer(indexer) + for collected_show in self.collection_list: + if not collected_show['show']['ids'].get(trakt_indexer, '') == indexer_id: + continue + if 'seasons' in collected_show: + for season_item in collected_show['seasons']: + for episode_item in season_item['episodes']: + trakt_season = season_item['number'] + trakt_episode = episode_item['number'] + if trakt_season == season and trakt_episode == episode: + return True + else: + return False + elif 'Show' == list_type: + trakt_indexer = get_trakt_indexer(show_obj.indexer) + for watchlisted_show in self.show_watchlist: + if watchlisted_show['show']['ids'].get(trakt_indexer) == show_obj.indexerid or \ + watchlisted_show['show']['ids'].get(get_trakt_indexer(EXTERNAL_IMDB), '') == show_obj.imdb_id: return True - except KeyError: - return False + return False else: - try: - if self.episode_watchlist[trakt_id][showid]['seasons'][season]['episodes'][episode] == episode: + trakt_indexer = get_trakt_indexer(indexer) + for watchlisted_episode in self.episode_watchlist: + if watchlisted_episode['episode'].get('season', -1) == season and \ + watchlisted_episode['episode'].get('number', -1) == episode and \ + watchlisted_episode['show']['ids'].get(trakt_indexer, '') == indexer_id: return True - except KeyError: - return False + return False def _get_show_watchlist(self): - """ - Get Watchlist and parse once into addressable structure - """ + """Get shows watchlist.""" try: - self.show_watchlist = {'tvdb_id': {}, 'tvrage_id': {}} - trakt_show_watchlist = self._request('sync/watchlist/shows') - - tvdb_id = 'tvdb' - tvrage_id = 'tvrage' - - for watchlist_item in trakt_show_watchlist: - tvdb = True if watchlist_item['show']['ids']['tvdb'] else False - tvrage = True if watchlist_item['show']['ids']['tvrage'] else False - title = watchlist_item['show']['title'] - year = watchlist_item['show']['year'] - slug = watchlist_item['show']['ids']['slug'] - - if tvdb: - showid = watchlist_item['show']['ids'][tvdb_id] - self.show_watchlist['{0}_id'.format(tvdb_id)][showid] = {'id': showid, 'title': title, 'year': year, 'slug': slug} - - if tvrage: - showid = watchlist_item['show']['ids'][tvrage_id] - self.show_watchlist['{0}_id'.format(tvrage_id)][showid] = {'id': showid, 'title': title, 'year': year, 'slug': slug} - except TraktException as e: - logger.log(u"Could not connect to Trakt. Unable to retrieve show's watchlist: {0!r}".format(e), logger.WARNING) + self.show_watchlist = self._request('sync/watchlist/shows') + except (TraktException, AuthException, TokenExpiredException) as e: + logger.log(u'Unable to retrieve shows from Trakt watchlist. Error: {error}'.format + (error=e.message), logger.INFO) return False return True def _get_episode_watchlist(self): - """ - Get Watchlist and parse once into addressable structure - """ + """Get episodes watchlist.""" try: - self.episode_watchlist = {'tvdb_id': {}, 'tvrage_id': {}} - trakt_episode_watchlist = self._request('sync/watchlist/episodes') - - tvdb_id = 'tvdb' - tvrage_id = 'tvrage' - - for watchlist_item in trakt_episode_watchlist: - tvdb = True if watchlist_item['show']['ids']['tvdb'] else False - tvrage = True if watchlist_item['show']['ids']['tvrage'] else False - title = watchlist_item['show']['title'] - year = watchlist_item['show']['year'] - season = watchlist_item['episode']['season'] - episode = watchlist_item['episode']['number'] - - if tvdb: - showid = watchlist_item['show']['ids'][tvdb_id] - - if showid not in self.episode_watchlist['{0}_id'.format(tvdb_id)].keys(): - self.episode_watchlist['{0}_id'.format(tvdb_id)][showid] = {'id': showid, 'title': title, 'year': year, 'seasons': {}} - - if season not in self.episode_watchlist['{0}_id'.format(tvdb_id)][showid]['seasons'].keys(): - self.episode_watchlist['{0}_id'.format(tvdb_id)][showid]['seasons'][season] = {'s': season, 'episodes': {}} - - if episode not in self.episode_watchlist['{0}_id'.format(tvdb_id)][showid]['seasons'][season]['episodes'].keys(): - self.episode_watchlist['{0}_id'.format(tvdb_id)][showid]['seasons'][season]['episodes'][episode] = episode - - if tvrage: - showid = watchlist_item['show']['ids'][tvrage_id] - - if showid not in self.episode_watchlist['{0}_id'.format(tvrage_id)].keys(): - self.episode_watchlist['{0}_id'.format(tvrage_id)][showid] = {'id': showid, 'title': title, 'year': year, 'seasons': {}} - - if season not in self.episode_watchlist['{0}_id'.format(tvrage_id)][showid]['seasons'].keys(): - self.episode_watchlist['{0}_id'.format(tvrage_id)][showid]['seasons'][season] = {'s': season, 'episodes': {}} - - if episode not in self.episode_watchlist['{0}_id'.format(tvrage_id)][showid]['seasons'][season]['episodes'].keys(): - self.episode_watchlist['{0}_id'.format(tvrage_id)][showid]['seasons'][season]['episodes'][episode] = episode - except TraktException as e: - logger.log(u"Could not connect to Trakt. Unable to retrieve episode's watchlist: {0!r}".format(e), logger.WARNING) + self.episode_watchlist = self._request('sync/watchlist/episodes') + except (TraktException, AuthException, TokenExpiredException) as e: + logger.log(u'Unable to retrieve episodes from Trakt watchlist. Error: {error}'.format + (error=e.message), logger.INFO) return False return True - def _get_show_collection(self): # pylint: disable=too-many-branches - """ - Get Collection and parse once into addressable structure - """ + def _get_show_collection(self): + """Get show collection.""" try: - self.collection_list = {'tvdb_id': {}, 'tvrage_id': {}} - logger.log('Getting Show Collection', logger.DEBUG) - trakt_collection = self._request('sync/collection/shows') - - tvdb_id = 'tvdb' - tvrage_id = 'tvrage' - - for watchlist_item in trakt_collection: - tvdb = True if watchlist_item['show']['ids']['tvdb'] else False - tvrage = True if watchlist_item['show']['ids']['tvrage'] else False - title = watchlist_item['show']['title'] - year = watchlist_item['show']['year'] - - if 'seasons' in watchlist_item: - for season_item in watchlist_item['seasons']: - for episode_item in season_item['episodes']: - season = season_item['number'] - episode = episode_item['number'] - - if tvdb: - showid = watchlist_item['show']['ids'][tvdb_id] - - if showid not in self.collection_list['{0}_id'.format(tvdb_id)].keys(): - self.collection_list['{0}_id'.format(tvdb_id)][showid] = {'id': showid, 'title': title, 'year': year, 'seasons': {}} - - if season not in self.collection_list['{0}_id'.format(tvdb_id)][showid]['seasons'].keys(): - self.collection_list['{0}_id'.format(tvdb_id)][showid]['seasons'][season] = {'s': season, 'episodes': {}} - - if episode not in self.collection_list['{0}_id'.format(tvdb_id)][showid]['seasons'][season]['episodes'].keys(): - self.collection_list['{0}_id'.format(tvdb_id)][showid]['seasons'][season]['episodes'][episode] = episode - - if tvrage: - showid = watchlist_item['show']['ids'][tvrage_id] - - if showid not in self.collection_list[tvrage_id + '_id'].keys(): - self.collection_list[tvrage_id + '_id'][showid] = {'id': showid, 'title': title, 'year': year, 'seasons': {}} - - if season not in self.collection_list[tvrage_id + '_id'][showid]['seasons'].keys(): - self.collection_list[tvrage_id + '_id'][showid]['seasons'][season] = {'s': season, 'episodes': {}} - - if episode not in self.collection_list[tvrage_id + '_id'][showid]['seasons'][season]['episodes'].keys(): - self.collection_list[tvrage_id + '_id'][showid]['seasons'][season]['episodes'][episode] = episode - except TraktException as e: - logger.log(u"Could not connect to Trakt. Unable to retrieve show's collection: {0!r}".format(e), logger.WARNING) + self.collection_list = self._request('sync/collection/shows') + except (TraktException, AuthException, TokenExpiredException) as e: + logger.log(u"Unable to retrieve shows from Trakt collection. Error: {error}".format + (error=e.message), logger.INFO) return False return True @staticmethod - def trakt_bulk_data_generate(data): # pylint: disable=too-many-locals - """ - Build the JSON structure to send back to Trakt - """ - uniqueShows = {} - uniqueSeasons = {} - - for showid, indexerid, show_name, start_year, season, episode in data: - if showid not in uniqueShows: - uniqueShows[showid] = {'title': show_name, 'year': start_year, 'ids': {}, 'seasons': []} - trakt_id = indexerApi(indexerid).config['trakt_id'] + def trakt_bulk_data_generate(trakt_data): + """Build the JSON structure to send back to Trakt.""" + unique_shows = {} + unique_seasons = {} - if trakt_id == 'tvdb_id': - uniqueShows[showid]['ids']['tvdb'] = showid - else: - uniqueShows[showid]['ids']['tvrage'] = showid - uniqueSeasons[showid] = [] + for indexer_id, indexer, show_name, start_year, season, episode in trakt_data: + if indexer_id not in unique_shows: + unique_shows[indexer_id] = {'title': show_name, 'year': start_year, 'ids': {}, 'seasons': []} + unique_shows[indexer_id]['ids'][get_trakt_indexer(indexer)] = indexer_id + unique_seasons[indexer_id] = [] # Get the unique seasons per Show - for showid, indexerid, show_name, start_year, season, episode in data: - if season not in uniqueSeasons[showid]: - uniqueSeasons[showid].append(season) + for indexer_id, indexer, show_name, start_year, season, episode in trakt_data: + if season not in unique_seasons[indexer_id]: + unique_seasons[indexer_id].append(season) # build the query - showList = [] - seasonsList = {} - - for searchedShow in uniqueShows: - seasonsList[searchedShow] = [] - - for searchedSeason in uniqueSeasons[searchedShow]: - episodesList = [] - - for showid, indexerid, show_name, start_year, season, episode in data: - if season == searchedSeason and showid == searchedShow: - episodesList.append({'number': episode}) - show = uniqueShows[searchedShow] - show['seasons'].append({'number': searchedSeason, 'episodes': episodesList}) - showList.append(show) - post_data = {'shows': showList} + show_list = [] + seasons_list = {} + + for searched_show in unique_shows: + show = [] + seasons_list[searched_show] = [] + + for searched_season in unique_seasons[searched_show]: + episodes_list = [] + + for indexer_id, indexer, show_name, start_year, season, episode in trakt_data: + if season == searched_season and indexer_id == searched_show: + episodes_list.append({'number': episode}) + show = unique_shows[searched_show] + show['seasons'].append({'number': searched_season, 'episodes': episodes_list}) + if show: + show_list.append(show) + post_data = {'shows': show_list} return post_data diff --git a/medusa/tv/__init__.py b/medusa/tv/__init__.py index 965c266831..7c35c4bc96 100644 --- a/medusa/tv/__init__.py +++ b/medusa/tv/__init__.py @@ -3,4 +3,5 @@ from medusa.tv.base import TV from medusa.tv.cache import Cache from medusa.tv.episode import Episode +from medusa.tv.indexer import Indexer from medusa.tv.series import Series diff --git a/medusa/tv/base.py b/medusa/tv/base.py index 79bbd37fbd..4e8f229dd2 100644 --- a/medusa/tv/base.py +++ b/medusa/tv/base.py @@ -1,16 +1,23 @@ -"""TVShow and TVEpisode classes.""" +# coding=utf-8 +"""TV base class.""" -import datetime -import shutil import threading from medusa.indexers.indexer_config import INDEXER_TVDBV2 -import shutil_custom -shutil.copyfile = shutil_custom.copyfile_custom +class Identifier(object): + """Base identifier class.""" -MILLIS_YEAR_1900 = datetime.datetime(year=1900, month=1, day=1).toordinal() + def __nonzero__(self): + """Magic method.""" + raise NotImplementedError + + __bool__ = __nonzero__ + + def __ne__(self, other): + """Magic method.""" + return not self == other class TV(object): diff --git a/medusa/tv/cache.py b/medusa/tv/cache.py index c9b5956e08..ab023088fa 100644 --- a/medusa/tv/cache.py +++ b/medusa/tv/cache.py @@ -1,35 +1,22 @@ # coding=utf-8 -# Author: Nic Wolfe -# -# This file is part of Medusa. -# -# Medusa is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Medusa is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Medusa. If not, see . + """tv_cache code.""" + from __future__ import unicode_literals import itertools +import logging import traceback from time import time from medusa import ( app, db, - logger, show_name_helpers, ) from medusa.helper.common import episode_num from medusa.helper.exceptions import AuthException +from medusa.logger.adapters.style import BraceAdapter from medusa.name_parser.parser import ( InvalidNameException, InvalidShowException, @@ -40,6 +27,9 @@ from six import text_type +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) + class CacheDBConnection(db.DBConnection): """Cache database class.""" @@ -51,12 +41,7 @@ def __init__(self, provider_id): # Create the table if it's not already there try: if not self.hasTable(provider_id): - logger.log( - 'Creating cache table for provider {0}'.format( - provider_id - ), - logger.DEBUG - ) + log.debug('Creating cache table for provider {0}', provider_id) self.action( b'CREATE TABLE [{name}]' b' (name TEXT,' @@ -85,10 +70,7 @@ def __init__(self, provider_id): self.action(b'DROP INDEX IF EXISTS idx_url') # add unique index if one does not exist to prevent further dupes - logger.log( - 'Creating UNIQUE URL index for {0}'.format(provider_id), - logger.DEBUG - ) + log.debug('Creating UNIQUE URL index for {0}', provider_id) self.action( b'CREATE UNIQUE INDEX ' b'IF NOT EXISTS idx_url_{name} ' @@ -124,9 +106,9 @@ def __init__(self, provider_id): b' time NUMERIC)' ) except Exception as error: - logger.log('Error while searching {provider_id}, skipping: {e!r}'. - format(provider_id=provider_id, e=error), logger.DEBUG) - logger.log(traceback.format_exc(), logger.DEBUG) + log.debug('Error while searching {provider_id}, skipping: {error!r}', + {'provider_id': provider_id, 'error': error}) + log.debug(traceback.format_exc()) msg = 'table [{name}] already exists'.format(name='last_update') if str(error) != msg: raise @@ -166,12 +148,8 @@ def trim(self, days=None): if days: now = int(time()) # current timestamp retention_period = now - (days * 86400) - logger.log( - 'Removing cache entries older than {x} days' - ' from {provider}'.format( - x=days, provider=self.provider_id - ) - ) + log.info('Removing cache entries older than {x} days from {provider}', + {'x': days, 'provider': self.provider_id}) cache_db_con = self._get_db() cache_db_con.action( b'DELETE FROM [{provider}] ' @@ -237,24 +215,17 @@ def update_cache(self): found_recent_results += 1 if found_recent_results >= self.provider.stop_at: - logger.log( - 'Hit old cached items, not parsing any more' - ' for: {0}'.format(self.provider_id), - logger.DEBUG - ) + log.debug('Hit old cached items, not parsing any more for: {0}', + self.provider_id) break try: result = self._parse_item(item) if result is not None: results.append(result) - except UnicodeDecodeError as e: - logger.log( - 'Unicode decoding error, missed parsing item' - ' from provider {0}: {1!r}'.format( - self.provider.name, e - ), - logger.WARNING - ) + except UnicodeDecodeError as error: + log.warning('Unicode decoding error, missed parsing item' + ' from provider {0}: {1!r}', + self.provider.name, error) cache_db_con = self._get_db() if results: @@ -265,8 +236,8 @@ def update_cache(self): limit = min(index, self.provider.max_recent_items) self.provider.recent_results = data['entries'][0:limit] - except AuthException as e: - logger.log('Authentication error: {0!r}'.format(e), logger.ERROR) + except AuthException as error: + log.error('Authentication error: {0!r}', error) def update_cache_manual_search(self, manual_data=None): """Update cache using manual search results.""" @@ -276,33 +247,23 @@ def update_cache_manual_search(self, manual_data=None): results = [] try: for item in manual_data: - logger.log( - 'Adding to cache item found in' - ' manual search: {0}'.format(item.name), - logger.DEBUG - ) + log.debug('Adding to cache item found in manual search: {0}', + item.name) result = self.add_cache_entry( item.name, item.url, item.seeders, item.leechers, item.size, item.pubdate ) if result is not None: results.append(result) - except Exception as e: - logger.log( - 'Error while adding to cache item found in manual search' - ' for provider {0}, skipping: {1!r}'.format( - self.provider.name, e - ), - logger.WARNING - ) + except Exception as error: + log.warning('Error while adding to cache item found in manual search' + ' for provider {0}, skipping: {1!r}', + self.provider.name, error) cache_db_con = self._get_db() if results: - logger.log( - 'Mass updating cache table with manual results' - ' for provider: {0}'.format(self.provider.name), - logger.DEBUG - ) + log.debug('Mass updating cache table with manual results' + ' for provider: {0}', self.provider.name) return bool(cache_db_con.mass_action(results)) def get_rss_feed(self, url, params=None): @@ -339,12 +300,8 @@ def _parse_item(self, item): leechers, size, pubdate) else: - logger.log( - 'The data returned from the {0} feed is incomplete,' - ' this result is unusable'.format(self.provider.name), - logger.DEBUG - ) - + log.debug('The data returned from the {0} feed is incomplete,' + ' this result is unusable', self.provider.name) return False @property @@ -397,16 +354,11 @@ def should_update(self): """Check if we should update provider cache.""" # if we've updated recently then skip the update if time() - self.updated < self.minTime * 60: - logger.log( - 'Last update was too soon, using old cache: {0}.' - ' Updated less then {1} minutes ago'.format( - self.updated, - self.minTime, - ), - logger.DEBUG - ) + log.debug('Last update was too soon, using old cache: {0}.' + ' Updated less then {1} minutes ago', + self.updated, self.minTime) return False - logger.log("Updating providers cache", logger.DEBUG) + log.debug('Updating providers cache') return True @@ -416,7 +368,7 @@ def add_cache_entry(self, name, url, seeders, leechers, size, pubdate, parsed_re # Use the already passed parsed_result of possible. parse_result = parsed_result or NameParser().parse(name) except (InvalidNameException, InvalidShowException) as error: - logger.log('{0}'.format(error), logger.DEBUG) + log.debug('{0}', error) return None if not parse_result or not parse_result.series_name: @@ -452,13 +404,7 @@ def add_cache_entry(self, name, url, seeders, leechers, size, pubdate, parsed_re # Store proper_tags as proper1|proper2|proper3 proper_tags = '|'.join(parse_result.proper_tags) - logger.log( - 'Added RSS item: [{0}] to cache: [{1}]'.format( - name, self.provider_id - ), - logger.DEBUG - ) - + log.debug('Added RSS item: {0} to cache: {1}', name, self.provider_id) return [ b'INSERT OR REPLACE INTO [{name}] ' b' (name, season, episodes, indexerid, url, ' @@ -520,12 +466,14 @@ def find_needed_episodes(self, episode, forced_search=False, sql_results = list(itertools.chain(*sql_results)) else: sql_results = [] - logger.log("{id}: No cached results in {provider} for show '{show_name}' episode '{ep}'".format - (id=ep_obj.show.indexerid, - provider=self.provider.name, - show_name=ep_obj.show.name, - ep=episode_num(ep_obj.season, ep_obj.episode)), - logger.DEBUG) + log.debug( + '{id}: No cached results in {provider} for series {show_name!r} episode {ep}', { + 'id': ep_obj.show.indexerid, + 'provider': self.provider.name, + 'show_name': ep_obj.show.name, + 'ep': episode_num(ep_obj.season, ep_obj.episode), + } + ) # for each cache entry for cur_result in sql_results: @@ -540,10 +488,7 @@ def find_needed_episodes(self, episode, forced_search=False, # skip if provider is anime only and show is not anime if self.provider.anime_only and not show_obj.is_anime: - logger.log( - '{0} is not an anime, skiping'.format(show_obj.name), - logger.DEBUG - ) + log.debug('{0} is not an anime, skipping', show_obj.name) continue # get season and ep data (ignoring multi-eps for now) @@ -564,10 +509,7 @@ def find_needed_episodes(self, episode, forced_search=False, # if the show says we want that episode then add it to the list if not show_obj.want_episode(cur_season, cur_ep, cur_quality, forced_search, down_cur_quality): - logger.log( - 'Ignoring {0}'.format(cur_result[b'name']), - logger.DEBUG - ) + log.debug('Ignoring {0}', cur_result[b'name']) continue ep_obj = show_obj.get_episode(cur_season, cur_ep) @@ -576,12 +518,14 @@ def find_needed_episodes(self, episode, forced_search=False, title = cur_result[b'name'] url = cur_result[b'url'] - logger.log("{id}: Using cached results from {provider} for show '{show_name}' episode '{ep}'".format - (id=ep_obj.show.indexerid, - provider=self.provider.name, - show_name=ep_obj.show.name, - ep=episode_num(ep_obj.season, ep_obj.episode)), - logger.DEBUG) + log.debug( + '{id}: Using cached results from {provider} for series {show_name!r} episode {ep}', { + 'id': ep_obj.show.indexerid, + 'provider': self.provider.name, + 'show_name': ep_obj.show.name, + 'ep': episode_num(ep_obj.season, ep_obj.episode), + } + ) result = self.provider.get_result([ep_obj]) result.show = show_obj diff --git a/medusa/tv/episode.py b/medusa/tv/episode.py index 830635cdf1..7dc67b4115 100644 --- a/medusa/tv/episode.py +++ b/medusa/tv/episode.py @@ -15,24 +15,22 @@ # # You should have received a copy of the GNU General Public License # along with Medusa. If not, see . -"""Series and Episode classes.""" +"""Episode classes.""" -import datetime +from __future__ import unicode_literals + +import logging import os.path import re -import shutil import time -from collections import ( - OrderedDict, -) +from datetime import date, datetime import knowit from medusa import ( app, db, helpers, - logger, network_timezones, notifiers, post_processor, @@ -51,6 +49,7 @@ WANTED, statusStrings, ) +from medusa.helper.collections import NonEmptyDict from medusa.helper.common import ( dateFormat, dateTimeFormat, @@ -85,18 +84,141 @@ get_scene_numbering, xem_refresh, ) -from medusa.tv.base import TV - -import shutil_custom +from medusa.tv.base import Identifier, TV try: import xml.etree.cElementTree as ETree except ImportError: import xml.etree.ElementTree as ETree -shutil.copyfile = shutil_custom.copyfile_custom +logger = logging.getLogger(__name__) + + +class EpisodeNumber(Identifier): + """Episode Number: season/episode, absolute or air by date.""" + + date_fmt = '%Y-%m-%d' + regex = re.compile(r'\b(?:(?P\d{4}-\d{2}-\d{2})|' + r'(?:s(?P\d{1,4}))(?:e(?P\d{1,2}))|' + r'(?:e(?P\d{1,3})))\b', re.IGNORECASE) + + @classmethod + def from_slug(cls, slug): + """Create episode number from slug. E.g.: s01e02.""" + match = cls.regex.match(slug) + if match: + try: + result = {k: int(v) if k != 'air_date' else datetime.strptime(v, cls.date_fmt) + for k, v in match.groupdict().items() if v is not None} + if result: + if 'air_date' in result: + return AirByDateNumber(**result) + if 'season' in result and 'episode' in result: + return RelativeNumber(**result) + if 'abs_episode' in result: + return AbsoluteNumber(**result) + except ValueError: + pass + + +class RelativeNumber(Identifier): + """Regular episode number: season and episode.""" + + def __init__(self, season, episode): + """Constructor. + + :param season: + :type season: int + :param episode: + :type episode: int + """ + self.season = season + self.episode = episode + + def __nonzero__(self): + """Magic method.""" + return self.season is not None and self.episode is not None + + def __repr__(self): + """Magic method.""" + return ''.format(self.season, self.episode) + + def __str__(self): + """Magic method.""" + return 's{0:02d}e{1:02d}'.format(self.season, self.episode) + + def __hash__(self): + """Magic method.""" + return hash((self.season, self.episode)) + + def __eq__(self, other): + """Magic method.""" + return isinstance(other, RelativeNumber) and ( + self.season == other.season and self.episode == other.episode) + + +class AbsoluteNumber(EpisodeNumber): + """Episode number class that handles absolute episode numbers.""" + + def __init__(self, abs_episode): + """Constructor. + + :param abs_episode: + :type abs_episode: int + """ + self.episode = abs_episode + + def __nonzero__(self): + """Magic method.""" + return self.episode is not None + + def __repr__(self): + """Magic method.""" + return ''.format(self.episode) + + def __str__(self): + """Magic method.""" + return 'e{0:02d}'.format(self.episode) + + def __hash__(self): + """Magic method.""" + return hash(self.episode) -MILLIS_YEAR_1900 = datetime.datetime(year=1900, month=1, day=1).toordinal() + def __eq__(self, other): + """Magic method.""" + return isinstance(other, AbsoluteNumber) and self.episode == other.episode + + +class AirByDateNumber(EpisodeNumber): + """Episode number class that handles air-by-date episode numbers.""" + + def __init__(self, air_date): + """Constructor. + + :param air_date: + :type air_date: datetime + """ + self.air_date = air_date + + def __nonzero__(self): + """Magic method.""" + return self.air_date is not None + + def __repr__(self): + """Magic method.""" + return ''.format(self.air_date) + + def __str__(self): + """Magic method.""" + return self.air_date.strftime(self.date_fmt) + + def __hash__(self): + """Magic method.""" + return hash(self.air_date) + + def __eq__(self, other): + """Magic method.""" + return isinstance(other, AirByDateNumber) and self.air_date == other.air_date class Episode(TV): @@ -112,7 +234,9 @@ def __init__(self, show, season, episode, filepath=''): 'scene_episode', 'scene_absolute_number', 'related_episodes', - 'wanted_quality'} + 'wanted_quality', + 'loaded' + } ) self.show = show self.name = '' @@ -122,8 +246,8 @@ def __init__(self, show, season, episode, filepath=''): self.description = '' self.subtitles = list() self.subtitles_searchcount = 0 - self.subtitles_lastsearch = str(datetime.datetime.min) - self.airdate = datetime.date.fromordinal(1) + self.subtitles_lastsearch = str(datetime.min) + self.airdate = date.fromordinal(1) self.hasnfo = False self.hastbn = False self.status = UNKNOWN @@ -144,6 +268,32 @@ def __init__(self, show, season, episode, filepath=''): self._specify_episode(self.season, self.episode) self.check_for_meta_files() + @classmethod + def find_by_series_and_episode(cls, series, episode_number): + """Find Episode based on series and episode number. + + :param series: + :type series: medusa.tv.series.Series + :param episode_number: + :type episode_number: EpisodeNumber + :return: + :rtype: medusa.tv.Episode + """ + if isinstance(episode_number, RelativeNumber): + episode = series.get_episode(season=episode_number.season, episode=episode_number.episode) + elif isinstance(episode_number, AbsoluteNumber): + episode = series.get_episode(absolute_number=episode_number.episode) + + elif isinstance(episode_number, AirByDateNumber): + episode = series.get_episode(air_date=episode_number.air_date) + else: + # if this happens then it's a bug! + raise ValueError + + if episode: + if episode.loaded or episode.load_from_db(episode.season, episode.episode): + return episode + @staticmethod def from_filepath(filepath): """Return an Episode for the given filepath. @@ -175,7 +325,7 @@ def from_filepath(filepath): return episode # only root episode has related_episodes except (InvalidNameException, InvalidShowException): - logger.log(u'Cannot create Episode from path {path}'.format(path=filepath), logger.WARNING) + logger.warning('Cannot create Episode from path {path}', path=filepath) @property def identifier(self): @@ -202,11 +352,32 @@ def location(self): @location.setter def location(self, value): - logger.log(u'{id}: Setter sets location to {location}'.format - (id=self.show.indexerid, location=value), logger.DEBUG) + logger.debug('{id}: Setter sets location to {location}', + id=self.show.indexerid, location=value) self._location = value self.file_size = os.path.getsize(value) if value and self.is_location_valid(value) else 0 + @property + def indexer_name(self): + """Return the indexer name identifier. Example: tvdb.""" + return indexerConfig[self.indexer].get('identifier') + + @property + def air_date(self): + """Return air date from the episode.""" + return sbdatetime.convert_to_setting( + network_timezones.parse_date_time( + date.toordinal(self.airdate), + self.show.airs, + self.show.network + ) + ).isoformat(b'T') + + @property + def status_name(self): + """Return the status name.""" + return statusStrings[Quality.split_composite_status(self.status).status] + def is_location_valid(self, location=None): """Whether the location is a valid file. @@ -227,13 +398,13 @@ def refresh_subtitles(self): ep_num = (episode_num(self.season, self.episode) or episode_num(self.season, self.episode, numbering='absolute')) if self.subtitles == current_subtitles: - logger.log(u'{id}: No changed subtitles for {show} {ep}. Current subtitles: {subs}'.format - (id=self.show.indexerid, show=self.show.name, ep=ep_num, subs=current_subtitles), logger.DEBUG) + logger.debug('{id}: No changed subtitles for {show} {ep}. Current subtitles: {subs}', + id=self.show.indexerid, show=self.show.name, ep=ep_num, subs=current_subtitles) else: - logger.log(u'{id}: Subtitle changes detected for this show {show} {ep}. Current subtitles: {subs}'.format - (id=self.show.indexerid, show=self.show.name, ep=ep_num, subs=current_subtitles), logger.DEBUG) + logger.debug('{id}: Subtitle changes detected for this show {show} {ep}. Current subtitles: {subs}', + id=self.show.indexerid, show=self.show.name, ep=ep_num, subs=current_subtitles) self.subtitles = current_subtitles if current_subtitles else [] - logger.log(u'{id}: Saving subtitles changes to database'.format(id=self.show.indexerid), logger.DEBUG) + logger.debug('{id}: Saving subtitles changes to database', id=self.show.indexerid) self.save_to_db() def download_subtitles(self, lang=None): @@ -243,11 +414,10 @@ def download_subtitles(self, lang=None): :type lang: string """ if not self.is_location_valid(): - logger.log(u"{id}: {show} {ep} file doesn't exist, can't download subtitles".format - (id=self.show.indexerid, show=self.show.name, - ep=(episode_num(self.season, self.episode) or episode_num(self.season, self.episode, - numbering='absolute'))), - logger.DEBUG) + logger.debug("{id}: {show} {ep} file doesn't exist, can't download subtitles", + id=self.show.indexerid, show=self.show.name, + ep=(episode_num(self.season, self.episode) or episode_num(self.season, self.episode, + numbering='absolute'))) return new_subtitles = subtitles.download_subtitles(self, lang=lang) @@ -255,23 +425,23 @@ def download_subtitles(self, lang=None): self.subtitles = subtitles.merge_subtitles(self.subtitles, new_subtitles) self.subtitles_searchcount += 1 if self.subtitles_searchcount else 1 - self.subtitles_lastsearch = datetime.datetime.now().strftime(dateTimeFormat) - logger.log(u'{id}: Saving last subtitles search to database'.format(id=self.show.indexerid), logger.DEBUG) + self.subtitles_lastsearch = datetime.now().strftime(dateTimeFormat) + logger.debug('{id}: Saving last subtitles search to database', id=self.show.indexerid) self.save_to_db() if new_subtitles: subtitle_list = ', '.join([subtitles.name_from_code(code) for code in new_subtitles]) - logger.log(u'{id}: Downloaded {subs} subtitles for {show} {ep}'.format - (id=self.show.indexerid, subs=subtitle_list, show=self.show.name, + logger.info('{id}: Downloaded {subs} subtitles for {show} {ep}', + id=self.show.indexerid, subs=subtitle_list, show=self.show.name, ep=(episode_num(self.season, self.episode) or - episode_num(self.season, self.episode, numbering='absolute')))) + episode_num(self.season, self.episode, numbering='absolute'))) notifiers.notify_subtitle_download(self.pretty_name(), subtitle_list) else: - logger.log(u'{id}: No subtitles found for {show} {ep}'.format - (id=self.show.indexerid, show=self.show.name, + logger.info('{id}: No subtitles found for {show} {ep}', + id=self.show.indexerid, show=self.show.name, ep=(episode_num(self.season, self.episode) or - episode_num(self.season, self.episode, numbering='absolute')))) + episode_num(self.season, self.episode, numbering='absolute'))) return new_subtitles @@ -319,9 +489,8 @@ def _specify_episode(self, season, episode): try: self.__load_from_nfo(self.location) except NoNFOException: - logger.log(u'{id}: There was an error loading the NFO for episode {show} {ep}'.format - (id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode)), - logger.ERROR) + logger.error('{id}: There was an error loading the NFO for episode {show} {ep}', + id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode)) # if we tried loading it from NFO and didn't find the NFO, try the Indexers if not self.hasnfo: @@ -332,7 +501,7 @@ def _specify_episode(self, season, episode): # if we failed SQL *and* NFO, Indexers then fail if not result: - raise EpisodeNotFoundException(u"{id}: Couldn't find episode {show} {ep}".format + raise EpisodeNotFoundException("{id}: Couldn't find episode {show} {ep}".format (id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode))) @@ -362,9 +531,8 @@ def load_from_db(self, season, episode): if len(sql_results) > 1: raise MultipleEpisodesInDatabaseException('Your DB has two records for the same show somehow.') elif not sql_results: - logger.log(u'{id}: {show} {ep} not found in the database'.format - (id=self.show.indexerid, show=self.show.name, ep=episode_num(self.season, self.episode)), - logger.DEBUG) + logger.debug('{id}: {show} {ep} not found in the database', + id=self.show.indexerid, show=self.show.name, ep=episode_num(self.season, self.episode)) return False else: if sql_results[0][b'name']: @@ -380,7 +548,7 @@ def load_from_db(self, season, episode): self.subtitles = sql_results[0][b'subtitles'].split(',') self.subtitles_searchcount = sql_results[0][b'subtitles_searchcount'] self.subtitles_lastsearch = sql_results[0][b'subtitles_lastsearch'] - self.airdate = datetime.date.fromordinal(int(sql_results[0][b'airdate'])) + self.airdate = date.fromordinal(int(sql_results[0][b'airdate'])) self.status = int(sql_results[0][b'status'] or -1) # don't overwrite my location @@ -455,42 +623,41 @@ def load_from_indexer(self, season=None, episode=None, tvapi=None, cached_season my_ep = show[season][episode] except (IndexerError, IOError) as e: - logger.log(u'{id}: {indexer} threw up an error: {error_msg}'.format - (id=self.show.indexerid, indexer=indexerApi(self.indexer).name, error_msg=ex(e)), - logger.WARNING) + logger.warning('{id}: {indexer} threw up an error: {error_msg}', + id=self.show.indexerid, indexer=indexerApi(self.indexer).name, error_msg=ex(e)) + # if the episode is already valid just log it, if not throw it up if self.name: - logger.log( - u'{id}: {indexer} timed out but we have enough info from other sources, allowing the error'.format - (id=self.show.indexerid, indexer=indexerApi(self.indexer).name), logger.DEBUG) + logger.debug( + '{id}: {indexer} timed out but we have enough info from other sources, allowing the error', + id=self.show.indexerid, indexer=indexerApi(self.indexer).name) return else: - logger.log(u'{id}: {indexer} timed out, unable to create the episode'.format - (id=self.show.indexerid, indexer=indexerApi(self.indexer).name), logger.WARNING) + logger.warning('{id}: {indexer} timed out, unable to create the episode', + id=self.show.indexerid, indexer=indexerApi(self.indexer).name) return False except (IndexerEpisodeNotFound, IndexerSeasonNotFound): - logger.log(u'{id}: Unable to find the episode on {indexer}. Deleting it from db'.format - (id=self.show.indexerid, indexer=indexerApi(self.indexer).name), logger.DEBUG) + logger.debug('{id}: Unable to find the episode on {indexer}. Deleting it from db', + id=self.show.indexerid, indexer=indexerApi(self.indexer).name) # if I'm no longer on the Indexers but I once was then delete myself from the DB if self.indexerid != -1: self.delete_episode() return if getattr(my_ep, 'episodename', None) is None: - logger.log(u'{id}: {show} {ep} has no name on {indexer}. Setting to an empty string'.format - (id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), - indexer=indexerApi(self.indexer).name)) + logger.info('{id}: {show} {ep} has no name on {indexer}. Setting to an empty string', + id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), + indexer=indexerApi(self.indexer).name) setattr(my_ep, 'episodename', '') if getattr(my_ep, 'absolute_number', None) is None: - logger.log(u'{id}: {show} {ep} has no absolute number on {indexer}'.format - (id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), - indexer=indexerApi(self.indexer).name), logger.DEBUG) + logger.debug('{id}: {show} {ep} has no absolute number on {indexer}', + id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), + indexer=indexerApi(self.indexer).name) else: - logger.log(u'{id}: {show} {ep} has absolute number: {absolute} '.format - (id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), - absolute=my_ep['absolute_number']), - logger.DEBUG) + logger.debug('{id}: {show} {ep} has absolute number: {absolute} ', + id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), + absolute=my_ep['absolute_number']) self.absolute_number = int(my_ep['absolute_number']) self.name = getattr(my_ep, 'episodename', '') @@ -515,15 +682,15 @@ def load_from_indexer(self, season=None, episode=None, tvapi=None, cached_season firstaired = getattr(my_ep, 'firstaired', None) if not firstaired or firstaired == '0000-00-00': - firstaired = str(datetime.date.fromordinal(1)) + firstaired = str(date.fromordinal(1)) raw_airdate = [int(x) for x in firstaired.split('-')] try: - self.airdate = datetime.date(raw_airdate[0], raw_airdate[1], raw_airdate[2]) + self.airdate = date(raw_airdate[0], raw_airdate[1], raw_airdate[2]) except (ValueError, IndexError): - logger.log(u'{id}: Malformed air date of {aired} retrieved from {indexer} for {show} {ep}'.format - (id=self.show.indexerid, aired=firstaired, indexer=indexerApi(self.indexer).name, - show=self.show.name, ep=episode_num(season, episode)), logger.WARNING) + logger.warning('{id}: Malformed air date of {aired} retrieved from {indexer} for {show} {ep}', + id=self.show.indexerid, aired=firstaired, indexer=indexerApi(self.indexer).name, + show=self.show.name, ep=episode_num(season, episode)) # if I'm incomplete on the indexer but I once was complete then just delete myself from the DB for now if self.indexerid != -1: self.delete_episode() @@ -532,8 +699,8 @@ def load_from_indexer(self, season=None, episode=None, tvapi=None, cached_season # early conversion to int so that episode doesn't get marked dirty self.indexerid = getattr(my_ep, 'id', None) if self.indexerid is None: - logger.log(u'{id}: Failed to retrieve ID from {indexer}'.format - (id=self.show.indexerid, indexer=indexerApi(self.indexer).name), logger.ERROR) + logger.error('{id}: Failed to retrieve ID from {indexer}', + id=self.show.indexerid, indexer=indexerApi(self.indexer).name) if self.indexerid != -1: self.delete_episode() return False @@ -542,40 +709,39 @@ def load_from_indexer(self, season=None, episode=None, tvapi=None, cached_season if all([not self.show.is_location_valid(), not app.CREATE_MISSING_SHOW_DIRS, not app.ADD_SHOWS_WO_DIR]): - logger.log(u"{id}: Show {show} location '{location}' is missing. Keeping current episode statuses" - .format(id=self.show.indexerid, show=self.show.name, location=self.show.raw_location), - logger.WARNING) + logger.warning("{id}: Show {show} location '{location}' is missing. Keeping current episode statuses", + id=self.show.indexerid, show=self.show.name, location=self.show.raw_location) return if self.location: - logger.log(u"{id}: {show} {ep} has status '{status}' and location {location}".format - (id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), - status=statusStrings[self.status].upper(), location=self.location), logger.DEBUG) + logger.debug("{id}: {show} {ep} has status '{status}' and location {location}", + id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), + status=statusStrings[self.status].upper(), location=self.location) if not os.path.isfile(self.location): - if (self.airdate >= datetime.date.today() or self.airdate == datetime.date.fromordinal(1)) and \ + if (self.airdate >= date.today() or self.airdate == date.fromordinal(1)) and \ self.status in (UNAIRED, UNKNOWN, WANTED): # Need to check if is UNAIRED otherwise code will step into second 'IF' # and make episode as default_ep_status # If is a leaked episode and user manually snatched, it will respect status # If is a fake (manually snatched), when user set as FAILED, status will be WANTED # and code below will make it UNAIRED again - logger.log(u"{id}: {show} {ep} airs in the future or has no airdate, marking it '{status}'".format - (id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), - status=statusStrings[UNAIRED].upper()), logger.DEBUG) + logger.debug("{id}: {show} {ep} airs in the future or has no airdate, marking it '{status}'", + id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), + status=statusStrings[UNAIRED].upper()) self.status = UNAIRED elif self.status in (UNAIRED, UNKNOWN): # Only do UNAIRED/UNKNOWN, it could already be snatched/ignored/skipped, # or downloaded/archived to disconnected media new_status = self.show.default_ep_status if self.season > 0 else SKIPPED # auto-skip specials - logger.log(u"{id}: {show} {ep} has already aired, marking it '{status}'".format - (id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), - status=statusStrings[new_status].upper()), logger.DEBUG) + logger.debug("{id}: {show} {ep} has already aired, marking it '{status}'", + id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), + status=statusStrings[new_status].upper()) self.status = new_status else: - logger.log(u"{id}: {show} {ep} status untouched: '{status}'".format - (id=self.show.indexerid, show=self.show.name, - ep=episode_num(season, episode), status=statusStrings[self.status].upper()), logger.DEBUG) + logger.debug("{id}: {show} {ep} status untouched: '{status}'", + id=self.show.indexerid, show=self.show.name, + ep=episode_num(season, episode), status=statusStrings[self.status].upper()) # We only change the episode's status if a file exists and the status is not SNATCHED|DOWNLOADED|ARCHIVED elif helpers.is_media_file(self.location): @@ -583,33 +749,33 @@ def load_from_indexer(self, season=None, episode=None, tvapi=None, cached_season Quality.ARCHIVED + Quality.SNATCHED_BEST: old_status = self.status self.status = Quality.status_from_name(self.location, anime=self.show.is_anime) - logger.log(u"{id}: {show} {ep} status changed from '{old_status}' to '{new_status}' " - u"as current status is not SNATCHED|DOWNLOADED|ARCHIVED".format - (id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), - old_status=statusStrings[old_status].upper(), - new_status=statusStrings[self.status].upper()), logger.DEBUG) + logger.debug("{id}: {show} {ep} status changed from '{old_status}' to '{new_status}' " + "as current status is not SNATCHED|DOWNLOADED|ARCHIVED", + id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), + old_status=statusStrings[old_status].upper(), + new_status=statusStrings[self.status].upper()) else: - logger.log(u"{id}: {show} {ep} status untouched: '{status}'".format - (id=self.show.indexerid, show=self.show.name, - ep=episode_num(season, episode), status=statusStrings[self.status].upper()), logger.DEBUG) + logger.debug("{id}: {show} {ep} status untouched: '{status}'", + id=self.show.indexerid, show=self.show.name, + ep=episode_num(season, episode), status=statusStrings[self.status].upper()) # shouldn't get here probably else: - logger.log(u"{id}: {show} {ep} status changed from '{old_status}' to 'UNKNOWN'".format - (id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), - old_status=statusStrings[self.status].upper()), logger.WARNING) + logger.warning("{id}: {show} {ep} status changed from '{old_status}' to 'UNKNOWN'", + id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), + old_status=statusStrings[self.status].upper()) self.status = UNKNOWN def __load_from_nfo(self, location): if not self.show.is_location_valid(): - logger.log(u'{id}: The show location {location} is missing, unable to load metadata'.format - (id=self.show.indexerid, location=location), logger.WARNING) + logger.warning('{id}: The show location {location} is missing, unable to load metadata', + id=self.show.indexerid, location=location) return - logger.log(u'{id}: Loading episode details from the NFO file associated with {location}'.format - (id=self.show.indexerid, location=location), logger.DEBUG) + logger.debug('{id}: Loading episode details from the NFO file associated with {location}', + id=self.show.indexerid, location=location) self.location = location @@ -617,36 +783,36 @@ def __load_from_nfo(self, location): if self.status == UNKNOWN and helpers.is_media_file(self.location): self.status = Quality.status_from_name(self.location, anime=self.show.is_anime) - logger.log(u"{id}: {show} {ep} status changed from 'UNKNOWN' to '{new_status}'".format - (id=self.show.indexerid, show=self.show.name, ep=episode_num(self.season, self.episode), - new_status=self.status), logger.DEBUG) + logger.debug("{id}: {show} {ep} status changed from 'UNKNOWN' to '{new_status}'", + id=self.show.indexerid, show=self.show.name, ep=episode_num(self.season, self.episode), + new_status=self.status) nfo_file = replace_extension(self.location, 'nfo') - logger.log(u'{id}: Using NFO name {nfo}'.format(id=self.show.indexerid, nfo=nfo_file), logger.DEBUG) + logger.debug('{id}: Using NFO name {nfo}', id=self.show.indexerid, nfo=nfo_file) if os.path.isfile(nfo_file): try: show_xml = ETree.ElementTree(file=nfo_file) except (SyntaxError, ValueError) as e: - logger.log(u'{id}: Error loading the NFO, backing up the NFO and skipping for now: '.format - (id=self.show.indexerid, error_msg=ex(e)), logger.ERROR) + logger.error('{id}: Error loading the NFO, backing up the NFO and skipping for now: {error_msg}', + id=self.show.indexerid, error_msg=ex(e)) try: os.rename(nfo_file, nfo_file + '.old') except Exception as e: - logger.log(u"{id}: Failed to rename your episode's NFO file. " - u'You need to delete it or fix it: {error_msg}'.format - (id=self.show.indexerid, error_msg=ex(e)), logger.WARNING) + logger.warning("{id}: Failed to rename your episode's NFO file. " + 'You need to delete it or fix it: {error_msg}', + id=self.show.indexerid, error_msg=ex(e)) raise NoNFOException('Error in NFO format') for ep_details in list(show_xml.iter('episodedetails')): if (ep_details.findtext('season') is None or int(ep_details.findtext('season')) != self.season or ep_details.findtext('episode') is None or int(ep_details.findtext('episode')) != self.episode): - logger.log(u'{id}: NFO has an block for a different episode - ' - u'wanted {ep_wanted} but got {ep_found}'.format - (id=self.show.indexerid, ep_wanted=episode_num(self.season, self.episode), - ep_found=episode_num(ep_details.findtext('season'), - ep_details.findtext('episode'))), logger.DEBUG) + logger.debug('{id}: NFO has an block for a different episode - ' + 'wanted {ep_wanted} but got {ep_found}', + id=self.show.indexerid, ep_wanted=episode_num(self.season, self.episode), + ep_found=episode_num(ep_details.findtext('season'), + ep_details.findtext('episode'))) continue if ep_details.findtext('title') is None or ep_details.findtext('aired') is None: @@ -674,9 +840,9 @@ def __load_from_nfo(self, location): if ep_details.findtext('aired'): raw_airdate = [int(x) for x in ep_details.findtext('aired').split('-')] - self.airdate = datetime.date(raw_airdate[0], raw_airdate[1], raw_airdate[2]) + self.airdate = date(raw_airdate[0], raw_airdate[1], raw_airdate[2]) else: - self.airdate = datetime.date.fromordinal(1) + self.airdate = date.fromordinal(1) self.hasnfo = True else: @@ -690,70 +856,73 @@ def __str__(self): :return: :rtype: unicode """ - result = u'' - result += u'%r - %r - %r\n' % (self.show.name, episode_num(self.season, self.episode), self.name) - result += u'location: %r\n' % self.location - result += u'description: %r\n' % self.description - result += u'subtitles: %r\n' % u','.join(self.subtitles) - result += u'subtitles_searchcount: %r\n' % self.subtitles_searchcount - result += u'subtitles_lastsearch: %r\n' % self.subtitles_lastsearch - result += u'airdate: %r (%r)\n' % (self.airdate.toordinal(), self.airdate) - result += u'hasnfo: %r\n' % self.hasnfo - result += u'hastbn: %r\n' % self.hastbn - result += u'status: %r\n' % self.status + result = '' + result += '%r - %r - %r\n' % (self.show.name, episode_num(self.season, self.episode), self.name) + result += 'location: %r\n' % self.location + result += 'description: %r\n' % self.description + result += 'subtitles: %r\n' % ','.join(self.subtitles) + result += 'subtitles_searchcount: %r\n' % self.subtitles_searchcount + result += 'subtitles_lastsearch: %r\n' % self.subtitles_lastsearch + result += 'airdate: %r (%r)\n' % (self.airdate.toordinal(), self.airdate) + result += 'hasnfo: %r\n' % self.hasnfo + result += 'hastbn: %r\n' % self.hastbn + result += 'status: %r\n' % self.status return result def to_json(self, detailed=True): """Return the json representation.""" - indexer_name = indexerConfig[self.indexer]['identifier'] - parsed_airdate = sbdatetime.convert_to_setting( - network_timezones.parse_date_time( - datetime.datetime.toordinal(self.airdate), - self.show.airs, - self.show.network - ) - ).isoformat('T') - data = OrderedDict([ - ('identifier', self.identifier), - ('id', OrderedDict([ - (indexer_name, self.indexerid), - ])), - ('season', self.season), - ('episode', self.episode), - ('absoluteNumber', self.absolute_number), - ('airDate', parsed_airdate), - ('title', self.name), - ('description', self.description), - ('hasNfo', self.hasnfo), - ('hasTbn', self.hastbn), - ('subtitles', self.subtitles), - ('status', statusStrings[Quality.split_composite_status(self.status).status]), - ('releaseName', self.release_name), - ('isProper', self.is_proper), - ('version', self.version), - ('scene', OrderedDict([ - ('season', self.scene_season), - ('episode', self.scene_episode), - ('absoluteNumber', self.scene_absolute_number), - ])), - ('location', self.location), - ('fileSize', self.file_size), - ]) + data = NonEmptyDict() + data['identifier'] = self.identifier + data['id'] = {self.indexer_name: self.indexerid} + data['season'] = self.season + data['episode'] = self.episode + + if self.absolute_number: + data['absoluteNumber'] = self.absolute_number + + data['airDate'] = self.air_date + data['title'] = self.name + data['description'] = self.description + data['content'] = [] + data['title'] = self.name + data['subtitles'] = self.subtitles + data['status'] = self.status_name + data['release'] = NonEmptyDict() + data['release']['name'] = self.release_name + data['release']['group'] = self.release_group + data['release']['proper'] = self.is_proper + data['release']['version'] = self.version + data['scene'] = NonEmptyDict() + data['scene']['season'] = self.scene_season + data['scene']['episode'] = self.scene_episode + + if self.scene_absolute_number: + data['scene']['absoluteNumber'] = self.scene_absolute_number + + data['file'] = NonEmptyDict() + data['file']['location'] = self.location + if self.file_size: + data['file']['size'] = self.file_size + + if self.hasnfo: + data['content'].append('NFO') + if self.hastbn: + data['content'].append('thumbnail') + if detailed: - data.update(OrderedDict([ - ('releaseGroup', self.release_group), - ('subtitlesSearchCount', self.subtitles_searchcount), - ('subtitlesLastSearched', self.subtitles_lastsearch), - ('wantedQualities', self.wanted_quality), - ('relatedEpisodes', [ep.identifier() for ep in self.related_episodes]), - ])) + data['statistics'] = NonEmptyDict() + data['statistics']['subtitleSearch'] = NonEmptyDict() + data['statistics']['subtitleSearch']['last'] = self.subtitles_lastsearch + data['statistics']['subtitleSearch']['count'] = self.subtitles_searchcount + data['wantedQualities'] = self.wanted_quality + data['wantedQualities'] = [ep.identifier() for ep in self.related_episodes] + return data def create_meta_files(self): """Create episode metadata files.""" if not self.show.is_location_valid(): - logger.log(u'{id}: The show dir is missing, unable to create metadata'.format(id=self.show.indexerid), - logger.WARNING) + logger.warning('{id}: The show dir is missing, unable to create metadata', id=self.show.indexerid) return for metadata_provider in app.metadata_provider_dict.values(): @@ -761,7 +930,7 @@ def create_meta_files(self): self.__create_thumbnail(metadata_provider) if self.check_for_meta_files(): - logger.log(u'{id}: Saving metadata changes to database'.format(id=self.show.indexerid), logger.DEBUG) + logger.debug('{id}: Saving metadata changes to database', id=self.show.indexerid) self.save_to_db() def __create_nfo(self, metadata_provider): @@ -786,19 +955,19 @@ def __create_thumbnail(self, metadata_provider): def delete_episode(self): """Delete episode from database.""" - logger.log(u'{id}: Deleting {show} {ep} from the DB'.format - (id=self.show.indexerid, show=self.show.name, - ep=episode_num(self.season, self.episode)), logger.DEBUG) + logger.debug('{id}: Deleting {show} {ep} from the DB', + id=self.show.indexerid, show=self.show.name, + ep=episode_num(self.season, self.episode)) # remove myself from the show dictionary if self.show.get_episode(self.season, self.episode, no_create=True) == self: - logger.log(u"{id}: Removing myself from my show's list".format - (id=self.show.indexerid), logger.DEBUG) + logger.debug("{id}: Removing myself from my show's list", + id=self.show.indexerid) del self.show.episodes[self.season][self.episode] # delete myself from the DB - logger.log(u'{id}: Deleting myself from the database'.format - (id=self.show.indexerid), logger.DEBUG) + logger.debug('{id}: Deleting myself from the database', + id=self.show.indexerid) main_db_con = db.DBConnection() sql = b'DELETE FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?' main_db_con.action(sql, [self.show.indexerid, self.season, self.episode]) @@ -809,8 +978,8 @@ def get_sql(self): """Create SQL queue for this episode if any of its data has been changed since the last save.""" try: if not self.dirty: - logger.log(u'{id}: Not creating SQL queue - record is not dirty'.format - (id=self.show.indexerid), logger.DEBUG) + logger.debug('{id}: Not creating SQL queue - record is not dirty', + id=self.show.indexerid) return main_db_con = db.DBConnection() @@ -938,40 +1107,40 @@ def get_sql(self): self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode, self.absolute_number, self.version, self.release_group]] except Exception as e: - logger.log(u'{id}: Error while updating database: {error_msg}'.format - (id=self.show.indexerid, error_msg=repr(e)), logger.ERROR) + logger.error('{id}: Error while updating database: {error_msg}', id=self.show.indexerid, error_msg=repr(e)) def save_to_db(self): """Save this episode to the database if any of its data has been changed since the last save.""" if not self.dirty: return - new_value_dict = {'indexerid': self.indexerid, - 'indexer': self.indexer, - 'name': self.name, - 'description': self.description, - 'subtitles': ','.join(self.subtitles), - 'subtitles_searchcount': self.subtitles_searchcount, - 'subtitles_lastsearch': self.subtitles_lastsearch, - 'airdate': self.airdate.toordinal(), - 'hasnfo': self.hasnfo, - 'hastbn': self.hastbn, - 'status': self.status, - 'location': self.location, - 'file_size': self.file_size, - 'release_name': self.release_name, - 'is_proper': self.is_proper, - 'absolute_number': self.absolute_number, - 'version': self.version, - 'release_group': self.release_group} - - control_value_dict = {'showid': self.show.indexerid, - 'season': self.season, - 'episode': self.episode} + new_value_dict = {b'indexerid': self.indexerid, + b'indexer': self.indexer, + b'name': self.name, + b'description': self.description, + b'subtitles': ','.join(self.subtitles), + b'subtitles_searchcount': self.subtitles_searchcount, + b'subtitles_lastsearch': self.subtitles_lastsearch, + b'airdate': self.airdate.toordinal(), + b'hasnfo': self.hasnfo, + b'hastbn': self.hastbn, + b'status': self.status, + b'location': self.location, + b'file_size': self.file_size, + b'release_name': self.release_name, + b'is_proper': self.is_proper, + b'absolute_number': self.absolute_number, + b'version': self.version, + b'release_group': self.release_group, + b'manually_searched': self.manually_searched} + + control_value_dict = {b'showid': self.show.indexerid, + b'season': self.season, + b'episode': self.episode} # use a custom update/insert method to get the data into the DB main_db_con = db.DBConnection() - main_db_con.upsert('tv_episodes', new_value_dict, control_value_dict) + main_db_con.upsert(b'tv_episodes', new_value_dict, control_value_dict) self.loaded = False self.reset_dirty() @@ -1071,7 +1240,7 @@ def release_group(show, name): try: parse_result = NameParser(show=show, naming_pattern=True).parse(name) except (InvalidNameException, InvalidShowException) as e: - logger.log(u'Unable to parse release_group: {error_msg}'.format(error_msg=ex(e)), logger.DEBUG) + logger.debug('Unable to parse release_group: {error_msg}', error_msg=ex(e)) return '' if not parse_result.release_group: @@ -1115,7 +1284,7 @@ def release_group(show, name): # try to get the release encoder to comply with scene naming standards encoder = Quality.scene_quality_from_name(self.release_name.replace(rel_grp[relgrp], ''), ep_qual) if encoder: - logger.log(u'Found codec for {show} {ep}'.format(show=show_name, ep=ep_name), logger.DEBUG) + logger.debug('Found codec for {show} {ep}', show=show_name, ep=ep_name) return { '%SN': show_name, @@ -1150,9 +1319,9 @@ def release_group(show, name): '%Y': str(self.airdate.year), '%M': str(self.airdate.month), '%D': str(self.airdate.day), - '%CY': str(datetime.date.today().year), - '%CM': str(datetime.date.today().month), - '%CD': str(datetime.date.today().day), + '%CY': str(date.today().year), + '%CM': str(date.today().month), + '%CD': str(date.today().day), '%0M': '%02d' % self.airdate.month, '%0D': '%02d' % self.airdate.day, '%RT': 'PROPER' if self.is_proper else '', @@ -1210,8 +1379,8 @@ def _format_pattern(self, pattern=None, multi=None, anime_type=None): # if there's no release group in the db, let the user know we replaced it if replace_map['%RG'] and replace_map['%RG'] != app.UNKNOWN_RELEASE_GROUP: if not hasattr(self, 'release_group') or not self.release_group: - logger.log(u'{id}: Episode has no release group, replacing it with {rg}'.format - (id=self.show.indexerid, rg=replace_map['%RG']), logger.DEBUG) + logger.debug('{id}: Episode has no release group, replacing it with {rg}', + id=self.show.indexerid, rg=replace_map['%RG']) self.release_group = replace_map['%RG'] # if release_group is not in the db, put it there # if there's no release name then replace it with a reasonable facsimile @@ -1342,8 +1511,8 @@ def _format_pattern(self, pattern=None, multi=None, anime_type=None): result_name = self.__format_string(result_name, replace_map) - logger.log(u'{id}: Formatting pattern: {pattern} -> {result_name}'.format - (id=self.show.indexerid, pattern=pattern, result_name=result_name), logger.DEBUG) + logger.debug('{id}: Formatting pattern: {pattern} -> {result_name}', + id=self.show.indexerid, pattern=pattern, result_name=result_name) return result_name @@ -1429,8 +1598,8 @@ def formatted_filename(self, pattern=None, multi=None, anime_type=None): def rename(self): """Rename an episode file and all related files to the location and filename as specified in naming settings.""" if not self.is_location_valid(): - logger.log(u"{id} Can't perform rename on {location} when it doesn't exist, skipping".format - (id=self.indexerid, location=self.location), logger.WARNING) + logger.warning("{id} Can't perform rename on {location} when it doesn't exist, skipping", + id=self.indexerid, location=self.location) return proper_path = self.proper_path() @@ -1445,13 +1614,13 @@ def rename(self): if absolute_current_path_no_ext.startswith(self.show.location): current_path = absolute_current_path_no_ext[len(self.show.location):] - logger.log(u'{id}: Renaming/moving episode from the base path {location} to {new_location}'.format - (id=self.indexerid, location=self.location, new_location=absolute_proper_path), logger.DEBUG) + logger.debug('{id}: Renaming/moving episode from the base path {location} to {new_location}', + id=self.indexerid, location=self.location, new_location=absolute_proper_path) # if it's already named correctly then don't do anything if proper_path == current_path: - logger.log(u'{id}: File {location} is already named correctly, skipping'.format - (id=self.indexerid, location=self.location), logger.DEBUG) + logger.debug('{id}: File {location} is already named correctly, skipping', + id=self.indexerid, location=self.location) return related_files = post_processor.PostProcessor(self.location).list_associated_files( @@ -1462,8 +1631,8 @@ def rename(self): related_subs = post_processor.PostProcessor( self.location).list_associated_files(app.SUBTITLES_DIR, subtitles_only=True, subfolders=True) - logger.log(u'{id} Files associated to {location}: {related_files}'.format - (id=self.indexerid, location=self.location, related_files=related_files), logger.DEBUG) + logger.debug('{id} Files associated to {location}: {related_files}', + id=self.indexerid, location=self.location, related_files=related_files) # move the ep file result = helpers.rename_ep_file(self.location, absolute_proper_path, absolute_current_path_no_ext_length) @@ -1482,16 +1651,16 @@ def rename(self): cur_result = helpers.rename_ep_file(cur_related_file, proper_related_path, absolute_current_path_no_ext_length + len(subfolder)) if not cur_result: - logger.log(u'{id}: Unable to rename file {cur_file}'.format - (id=self.indexerid, cur_file=cur_related_file), logger.WARNING) + logger.warning('{id}: Unable to rename file {cur_file}', + id=self.indexerid, cur_file=cur_related_file) for cur_related_sub in related_subs: absolute_proper_subs_path = os.path.join(app.SUBTITLES_DIR, self.formatted_filename()) cur_result = helpers.rename_ep_file(cur_related_sub, absolute_proper_subs_path, absolute_current_path_no_ext_length) if not cur_result: - logger.log(u'{id}: Unable to rename file {cur_file}'.format - (id=self.indexerid, cur_file=cur_related_sub), logger.WARNING) + logger.warning('{id}: Unable to rename file {cur_file}', + id=self.indexerid, cur_file=cur_related_sub) # save the ep with self.lock: @@ -1533,27 +1702,27 @@ def airdate_modify_stamp(self): if app.FILE_TIMESTAMP_TIMEZONE == 'local': airdatetime = airdatetime.astimezone(network_timezones.app_timezone) - filemtime = datetime.datetime.fromtimestamp( + filemtime = datetime.fromtimestamp( os.path.getmtime(self.location)).replace(tzinfo=network_timezones.app_timezone) if filemtime != airdatetime: airdatetime = airdatetime.timetuple() - logger.log(u"{id}: About to modify date of '{location}' to show air date {air_date}".format - (id=self.show.indexerid, location=self.location, - air_date=time.strftime('%b %d,%Y (%H:%M)', airdatetime)), logger.DEBUG) + logger.debug("{id}: About to modify date of '{location}' to show air date {air_date}", + id=self.show.indexerid, location=self.location, + air_date=time.strftime('%b %d,%Y (%H:%M)', airdatetime)) try: if helpers.touch_file(self.location, time.mktime(airdatetime)): - logger.log(u"{id}: Changed modify date of '{location}' to show air date {air_date}".format - (id=self.show.indexerid, location=os.path.basename(self.location), - air_date=time.strftime('%b %d,%Y (%H:%M)', airdatetime))) + logger.info("{id}: Changed modify date of '{location}' to show air date {air_date}", + id=self.show.indexerid, location=os.path.basename(self.location), + air_date=time.strftime('%b %d,%Y (%H:%M)', airdatetime)) else: - logger.log(u"{id}: Unable to modify date of '{location}' to show air date {air_date}".format - (id=self.show.indexerid, location=os.path.basename(self.location), - air_date=time.strftime('%b %d,%Y (%H:%M)', airdatetime)), logger.WARNING) + logger.warning("{id}: Unable to modify date of '{location}' to show air date {air_date}", + id=self.show.indexerid, location=os.path.basename(self.location), + air_date=time.strftime('%b %d,%Y (%H:%M)', airdatetime)) except Exception: - logger.log(u"{id}: Failed to modify date of '{location}' to show air date {air_date}".format - (id=self.show.indexerid, location=os.path.basename(self.location), - air_date=time.strftime('%b %d,%Y (%H:%M)', airdatetime)), logger.WARNING) + logger.warning("{id}: Failed to modify date of '{location}' to show air date {air_date}", + id=self.show.indexerid, location=os.path.basename(self.location), + air_date=time.strftime('%b %d,%Y (%H:%M)', airdatetime)) except Exception: - logger.log(u"{id}: Failed to modify date of '{location}'".format - (id=self.show.indexerid, location=os.path.basename(self.location)), logger.WARNING) + logger.warning("{id}: Failed to modify date of '{location}'", + id=self.show.indexerid, location=os.path.basename(self.location)) diff --git a/medusa/tv/indexer.py b/medusa/tv/indexer.py new file mode 100644 index 0000000000..d55c5657bf --- /dev/null +++ b/medusa/tv/indexer.py @@ -0,0 +1,54 @@ +# coding=utf-8 +"""Indexer class.""" + +from medusa.indexers.indexer_config import indexer_id_to_name, indexer_name_to_id +from medusa.tv.base import Identifier + + +class Indexer(Identifier): + """Represent an Indexer with id and slug name.""" + + def __init__(self, identifier): + """Constructor. + + :param identifier: + :type identifier: int + """ + self.id = identifier + + @classmethod + def from_slug(cls, slug): + """Create Indexer from slug.""" + identifier = indexer_name_to_id(slug) + if identifier is not None: + return Indexer(identifier) + + @classmethod + def from_id(cls, pk): + """Create Indexer from id.""" + return Indexer(pk) + + @property + def slug(self): + """Slug name.""" + return indexer_id_to_name(self.id) + + def __nonzero__(self): + """Magic method bool.""" + return self.id is not None + + def __repr__(self): + """Magic method.""" + return ''.format(self.slug, self.id) + + def __str__(self): + """Magic method.""" + return str(self.slug) + + def __hash__(self): + """Magic method.""" + return hash(self.id) + + def __eq__(self, other): + """Magic method.""" + return isinstance(other, Indexer) and self.id == other.id diff --git a/medusa/tv/series.py b/medusa/tv/series.py index c02a0841ef..38ee37e969 100644 --- a/medusa/tv/series.py +++ b/medusa/tv/series.py @@ -1,16 +1,34 @@ # coding=utf-8 -"""Series and Episode classes.""" +# Author: Nic Wolfe +# +# This file is part of Medusa. +# +# Medusa is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Medusa is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Medusa. If not, see . +"""Series classes.""" + +from __future__ import unicode_literals import copy import datetime import glob +import logging import os.path import shutil import stat import traceback import warnings from collections import ( - OrderedDict, namedtuple, ) from itertools import groupby @@ -22,7 +40,6 @@ db, helpers, image_cache, - logger, network_timezones, notifiers, post_processor, @@ -43,15 +60,18 @@ UNAIRED, UNKNOWN, WANTED, + countryList, qualityPresets, statusStrings, ) +from medusa.helper.collections import NonEmptyDict from medusa.helper.common import ( episode_num, pretty_file_size, try_int, ) from medusa.helper.exceptions import ( + CantRemoveShowException, EpisodeDeletedException, EpisodeNotFoundException, MultipleShowObjectsException, @@ -60,19 +80,25 @@ ShowNotFoundException, ex, ) -from medusa.helper.externals import get_externals +from medusa.helpers.externals import get_externals +from medusa.image_cache import ImageCache from medusa.indexers.indexer_api import indexerApi from medusa.indexers.indexer_config import ( INDEXER_TVRAGE, indexerConfig, + indexer_id_to_slug, mappings, reverse_mappings, -) + slug_to_indexer_id) from medusa.indexers.indexer_exceptions import ( IndexerAttributeNotFound, IndexerException, IndexerSeasonNotFound, ) +from medusa.media.banner import ShowBanner +from medusa.media.fan_art import ShowFanArt +from medusa.media.network_logo import ShowNetworkLogo +from medusa.media.poster import ShowPoster from medusa.name_parser.parser import ( InvalidNameException, InvalidShowException, @@ -81,10 +107,9 @@ from medusa.sbdatetime import sbdatetime from medusa.scene_exceptions import get_scene_exceptions from medusa.show.show import Show -from medusa.tv.base import TV +from medusa.tv.base import Identifier, TV from medusa.tv.episode import Episode - -import shutil_custom +from medusa.tv.indexer import Indexer from six import text_type @@ -94,10 +119,70 @@ app.TRASH_REMOVE_SHOW = 0 -shutil.copyfile = shutil_custom.copyfile_custom - MILLIS_YEAR_1900 = datetime.datetime(year=1900, month=1, day=1).toordinal() +logger = logging.getLogger(__name__) + + +class SeriesIdentifier(Identifier): + """Series identifier with indexer and indexer id.""" + + def __init__(self, indexer, identifier): + """Constructor. + + :param indexer: + :type indexer: Indexer or int + :param identifier: + :type identifier: int + """ + self.indexer = indexer if isinstance(indexer, Indexer) else Indexer.from_id(indexer) + self.id = identifier + + @classmethod + def from_slug(cls, slug): + """Create SeriesIdentifier from slug. E.g.: tvdb1234.""" + result = slug_to_indexer_id(slug) + if result is not None: + indexer, indexer_id = result + if indexer is not None and indexer_id is not None: + return SeriesIdentifier(Indexer(indexer), indexer_id) + + @classmethod + def from_id(cls, indexer, indexer_id): + """Create SeriesIdentifier from tuple (indexer, indexer_id).""" + return SeriesIdentifier(indexer, indexer_id) + + @property + def slug(self): + """Slug.""" + return str(self) + + @property + def api(self): + """Api.""" + indexer_api = indexerApi(self.indexer.id) + return indexer_api.indexer(**indexer_api.api_params) + + def __nonzero__(self): + """Magic method.""" + return self.indexer is not None and self.id is not None + + def __repr__(self): + """Magic method.""" + return ''.format(self.indexer, self.id) + + def __str__(self): + """Magic method.""" + return '{0}{1}'.format(self.indexer, self.id) + + def __hash__(self): + """Magic method.""" + return hash((self.indexer, self.id)) + + def __eq__(self, other): + """Magic method.""" + return isinstance(other, SeriesIdentifier) and self.indexer == other.indexer and self.id == other.id + class Series(TV): """Represent a TV Show.""" @@ -113,7 +198,8 @@ def __init__(self, indexer, indexerid, lang='', quality=None, :param lang: :type lang: str """ - super(Series, self).__init__(indexer, indexerid, {'episodes', 'nextaired', 'release_groups'}) + super(Series, self).__init__(indexer, indexerid, {'episodes', 'next_aired', 'release_groups', 'exceptions', + 'external', 'imdb_info'}) self.name = '' self.imdb_id = '' self.network = '' @@ -124,7 +210,7 @@ def __init__(self, indexer, indexerid, lang='', quality=None, self.quality = quality or int(app.QUALITY_DEFAULT) self.flatten_folders = flatten_folders or int(app.FLATTEN_FOLDERS_DEFAULT) self.status = 'Unknown' - self.airs = '' + self._airs = '' self.start_year = 0 self.paused = 0 self.air_by_date = 0 @@ -153,6 +239,56 @@ def __init__(self, indexer, indexerid, lang='', quality=None, self._load_from_db() + @classmethod + def find_series(cls, predicate=None): + """Find series based on given predicate.""" + return [s for s in app.showList if s and (not predicate or predicate(s))] + + @classmethod + def find_by_identifier(cls, identifier, predicate=None): + """Find series by its identifier and predicate. + + :param identifier: + :type identifier: medusa.tv.series.SeriesIdentifier + :param predicate: + :type predicate: callable + :return: + :rtype: + """ + result = Show.find(app.showList, identifier.id, identifier.indexer.id) + if result and (not predicate or predicate(result)): + return result + + @classmethod + def from_identifier(cls, identifier): + """Create a series object from its identifier.""" + return Series(identifier.indexer.id, identifier.id) + + # TODO: Make this the single entry to add new series + @classmethod + def save_series(cls, series): + """Save the specified series to medusa.""" + try: + api = series.identifier.api + series.load_from_indexer(tvapi=api) + series.load_imdb_info() + app.showList.append(series) + series.save_to_db() + series.load_episodes_from_indexer(tvapi=api) + return series + except IndexerException as e: + logger.warning('Unable to load series from indexer: {0!r}'.format(e)) + + @property + def identifier(self): + """Identifier.""" + return SeriesIdentifier(self.indexer, self.indexerid) + + @property + def slug(self): + """Slug.""" + return self.identifier.slug + @property def indexer_api(self): """Get an Indexer API instance.""" @@ -172,10 +308,7 @@ def create_indexer(self, banners=False, actors=False, dvd_order=False, episodes= if self.lang: params[b'language'] = self.lang - logger.log( - u'{id}: Using language from show settings: {lang}'.format - (id=self.indexerid, lang=self.lang), logger.DEBUG - ) + logger.debug(u'{id}: Using language from show settings: {lang}', id=self.indexerid, lang=self.lang) if self.dvd_order != 0 or dvd_order: params[b'dvdorder'] = True @@ -248,18 +381,16 @@ def indexer_name(self): @property def indexer_slug(self): - """Return the slug name of the show. Example: tvdb1234.""" - return '{name}{indexerid}'.format(name=self.indexer_name, indexerid=self.indexerid) + """Return the slug name of the series. Example: tvdb1234.""" + return indexer_id_to_slug(self.indexer, self.indexerid) @location.setter def location(self, value): - logger.log( - u'{indexer} {id}: Setting location: {location}'.format( - indexer=indexerApi(self.indexer).name, - id=self.indexerid, - location=value - ), - logger.DEBUG + logger.debug( + u'{indexer} {id}: Setting location: {location}', + indexer=indexerApi(self.indexer).name, + id=self.indexerid, + location=value ) # Don't validate dir if user wants to add shows without creating a dir if app.ADD_SHOWS_WO_DIR or self.is_location_valid(value): @@ -309,6 +440,114 @@ def subtitle_flag(self): """Subtitle flag.""" return subtitles.code_from_code(self.lang) if self.lang else '' + @property + def show_type(self): + """Return show type.""" + return 'sports' if self.is_sports else ('anime' if self.is_anime else 'series') + + @property + def imdb_year(self): + """Return series year.""" + return self.imdb_info.get('year') + + @property + def imdb_runtime(self): + """Return series runtime.""" + return self.imdb_info.get('runtimes') + + @property + def imdb_akas(self): + """Return genres akas dict.""" + akas = {} + for x in [v for v in self.imdb_info.get('akas', '').split('|') if v]: + if '::' in x: + val, key = x.split('::') + akas[key] = val + return akas + + @property + def imdb_countries(self): + """Return country codes.""" + return [v for v in self.imdb_info.get('country_codes', '').split('|') if v] + + @property + def imdb_plot(self): + """Return series plot.""" + return self.imdb_info.get('plot', '') + + @property + def imdb_genres(self): + """Return series genres.""" + return self.imdb_info.get('genres', '') + + @property + def imdb_votes(self): + """Return series votes.""" + return self.imdb_info.get('votes') + + @property + def imdb_rating(self): + """Return series rating.""" + return self.imdb_info.get('rating') + + @property + def imdb_certificates(self): + """Return series certificates.""" + return self.imdb_info.get('certificates') + + @property + def next_airdate(self): + """Return next airdate.""" + return ( + sbdatetime.convert_to_setting(network_timezones.parse_date_time(self.next_aired, self.airs, self.network)) + if try_int(self.next_aired, 1) > MILLIS_YEAR_1900 else None + ) + + @property + def genres(self): + """Return genres list.""" + return list({i for i in (self.genre or '').split('|') if i} | + {i for i in self.imdb_genres.replace('Sci-Fi', 'Science-Fiction').split('|') if i}) + + @property + def airs(self): + """Return episode time that series usually airs.""" + return self._airs + + @airs.setter + def airs(self, value): + """Set episode time that series usually airs.""" + self._airs = text_type(value).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ').strip() + + @property + def poster(self): + """Return poster path.""" + poster = ImageCache.poster_path(self.indexerid) + if os.path.isfile(poster): + return poster + + @property + def banner(self): + """Return banner path.""" + banner = ImageCache.banner_path(self.indexerid) + if os.path.isfile(banner): + return banner + + @property + def aliases(self): + """Return series aliases.""" + return self.exceptions or get_scene_exceptions(self.indexerid, self.indexer) + + @property + def release_ignore_words(self): + """Return release ignore words.""" + return [v for v in (self.rls_ignore_words or '').split(',') if v] + + @property + def release_required_words(self): + """Return release ignore words.""" + return [v for v in (self.rls_require_words or '').split(',') if v] + def flush_episodes(self): """Delete references to anything that's not in the internal lists.""" for cur_season in self.episodes: @@ -331,7 +570,7 @@ def get_all_seasons(self, last_airdate=False): main_db_con = db.DBConnection() results = main_db_con.select(sql_selection, [self.indexerid]) - return {int(x['season']): int(x['number_of_episodes']) for x in results} + return {int(x[b'season']): int(x[b'number_of_episodes']) for x in results} def get_all_episodes(self, season=None, has_location=False): """Retrieve all episodes for this show given the specified filter. @@ -437,28 +676,26 @@ def get_episode(self, season=None, episode=None, filepath=None, no_create=False, b'FROM tv_episodes ' \ b'WHERE showid = ? AND absolute_number = ? AND season != 0' sql_args = [self.indexerid, absolute_number] - logger.log(u'{id}: Season and episode lookup for {show} using absolute number {absolute}'. - format(id=self.indexerid, absolute=absolute_number, show=self.name), logger.DEBUG) + logger.debug(u'{id}: Season and episode lookup for {show} using absolute number {absolute}', + id=self.indexerid, absolute=absolute_number, show=self.name) elif air_date: sql = b'SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?' sql_args = [self.indexerid, air_date.toordinal()] - logger.log(u'{id}: Season and episode lookup for {show} using air date {air_date}'. - format(id=self.indexerid, air_date=air_date, show=self.name), logger.DEBUG) + logger.debug(u'{id}: Season and episode lookup for {show} using air date {air_date}', + id=self.indexerid, air_date=air_date, show=self.name) sql_results = main_db_con.select(sql, sql_args) if sql else [] if len(sql_results) == 1: episode = int(sql_results[0][b'episode']) season = int(sql_results[0][b'season']) - logger.log(u'{id}: Found season and episode which is {show} {ep}'.format - (id=self.indexerid, show=self.name, ep=episode_num(season, episode)), logger.DEBUG) + logger.debug(u'{id}: Found season and episode which is {show} {ep}', + id=self.indexerid, show=self.name, ep=episode_num(season, episode)) elif len(sql_results) > 1: - logger.log(u'{id}: Multiple entries found in show: {show} '.format - (id=self.indexerid, show=self.name), logger.ERROR) + logger.error(u'{id}: Multiple entries found in show: {show} ', id=self.indexerid, show=self.name) return None else: - logger.log(u'{id}: No entries found in show: {show}'.format - (id=self.indexerid, show=self.name), logger.DEBUG) + logger.debug(u'{id}: No entries found in show: {show}', id=self.indexerid, show=self.name) return None if season not in self.episodes: @@ -474,7 +711,7 @@ def get_episode(self, season=None, episode=None, filepath=None, no_create=False, else: ep = Episode(self, season, episode) - if ep is not None and should_cache: + if ep is not None and ep.loaded and should_cache: self.episodes[season][episode] = ep return ep @@ -489,8 +726,8 @@ def should_update(self, update_date=datetime.date.today()): """ # if show is 'paused' do not update_date if self.paused: - logger.log(u'{id}: Show {show} is paused. Update skipped'.format - (id=self.indexerid, show=self.name), logger.INFO) + logger.info(u'{id}: Show {show} is paused. Update skipped', + id=self.indexerid, show=self.name) return False # if show is not 'Ended' always update (status 'Continuing') @@ -588,8 +825,7 @@ def write_metadata(self, show_only=False): :type show_only: bool """ if not self.is_location_valid(): - logger.log(u"{id}: Show dir doesn't exist, skipping NFO generation".format(id=self.indexerid), - logger.WARNING) + logger.warning(u"{id}: Show dir doesn't exist, skipping NFO generation", id=self.indexerid) return for metadata_provider in app.metadata_provider_dict.values(): @@ -601,7 +837,7 @@ def write_metadata(self, show_only=False): def __write_episode_nfos(self): - logger.log(u"{id}: Writing NFOs for all episodes".format(id=self.indexerid), logger.DEBUG) + logger.debug(u"{id}: Writing NFOs for all episodes", id=self.indexerid) main_db_con = db.DBConnection() sql_results = main_db_con.select( @@ -615,9 +851,8 @@ def __write_episode_nfos(self): b" AND location != ''", [self.indexerid]) for ep_result in sql_results: - logger.log(u'{id}: Retrieving/creating episode {ep}'.format - (id=self.indexerid, ep=episode_num(ep_result[b'season'], ep_result[b'episode'])), - logger.DEBUG) + logger.debug(u'{id}: Retrieving/creating episode {ep}', + id=self.indexerid, ep=episode_num(ep_result[b'season'], ep_result[b'episode'])) cur_ep = self.get_episode(ep_result[b'season'], ep_result[b'episode']) if not cur_ep: continue @@ -627,8 +862,7 @@ def __write_episode_nfos(self): def update_metadata(self): """Update show metadata files.""" if not self.is_location_valid(): - logger.log(u"{id}: Show dir doesn't exist, skipping NFO generation".format(id=self.indexerid), - logger.WARNING) + logger.warning(u"{id}: Show dir doesn't exist, skipping NFO generation", id=self.indexerid) return self.__update_show_nfo() @@ -637,7 +871,7 @@ def __update_show_nfo(self): result = False - logger.log(u"{id}: Updating NFOs for show with new indexer info".format(id=self.indexerid), logger.INFO) + logger.info(u"{id}: Updating NFOs for show with new indexer info", id=self.indexerid) # You may only call .values() on metadata_provider_dict! As on values() call the indexer_api attribute # is reset. This will prevent errors, when using multiple indexers and caching. for cur_provider in app.metadata_provider_dict.values(): @@ -648,34 +882,31 @@ def __update_show_nfo(self): def load_episodes_from_dir(self): """Find all media files in the show folder and create episodes for as many as possible.""" if not self.is_location_valid(): - logger.log(u"{id}: Show dir doesn't exist, not loading episodes from disk".format(id=self.indexerid), - logger.WARNING) + logger.warning(u"{id}: Show dir doesn't exist, not loading episodes from disk", id=self.indexerid) return - logger.log(u"{id}: Loading all episodes from the show directory: {location}".format - (id=self.indexerid, location=self.location), logger.DEBUG) + logger.debug(u"{id}: Loading all episodes from the show directory: {location}", + id=self.indexerid, location=self.location) # get file list media_files = helpers.list_media_files(self.location) - logger.log(u'{id}: Found files: {media_files}'.format - (id=self.indexerid, media_files=media_files), logger.DEBUG) + logger.debug(u'{id}: Found files: {media_files}', id=self.indexerid, media_files=media_files) # create TVEpisodes from each media file (if possible) sql_l = [] for media_file in media_files: cur_episode = None - logger.log(u"{id}: Creating episode from: {location}".format - (id=self.indexerid, location=media_file), logger.DEBUG) + logger.debug(u"{id}: Creating episode from: {location}", id=self.indexerid, location=media_file) try: cur_episode = self.make_ep_from_file(os.path.join(self.location, media_file)) except (ShowNotFoundException, EpisodeNotFoundException) as e: - logger.log(u"{id}: Episode {location} returned an exception {error_msg}".format - (id=self.indexerid, location=media_file, error_msg=ex(e)), logger.WARNING) + logger.warning(u"{id}: Episode {location} returned an exception {error_msg}", + id=self.indexerid, location=media_file, error_msg=ex(e)) continue except EpisodeDeletedException: - logger.log(u'{id}: The episode deleted itself when I tried making an object for it'.format - (id=self.indexerid), logger.DEBUG) + logger.debug(u'{id}: The episode deleted itself when I tried making an object for it', + id=self.indexerid) if cur_episode is None: continue @@ -690,8 +921,8 @@ def load_episodes_from_dir(self): parse_result = None if ' ' not in ep_file_name and parse_result and parse_result.release_group: - logger.log(u'{id}: Filename {file_name} gave release group of {rg}, seems valid'.format - (id=self.indexerid, file_name=ep_file_name, rg=parse_result.release_group), logger.DEBUG) + logger.debug(u'{id}: Filename {file_name} gave release group of {rg}, seems valid', + id=self.indexerid, file_name=ep_file_name, rg=parse_result.release_group) cur_episode.release_name = ep_file_name # store the reference in the show @@ -699,9 +930,9 @@ def load_episodes_from_dir(self): if self.subtitles: try: cur_episode.refresh_subtitles() - except Exception: - logger.log(u'{id}: Could not refresh subtitles'.format(id=self.indexerid), logger.ERROR) - logger.log(traceback.format_exc(), logger.DEBUG) + except OSError: + logger.info(u'{id}: Could not refresh subtitles', id=self.indexerid) + logger.debug(traceback.format_exc()) sql_l.append(cur_episode.get_sql()) @@ -731,15 +962,14 @@ def load_episodes_from_db(self, seasons=None): if seasons: sql += b' AND season IN (%s)' % ','.join('?' * len(seasons)) sql_results = main_db_con.select(sql, [self.indexerid] + seasons) - logger.log(u'{id}: Loading all episodes of season(s) {seasons} from the DB'.format - (id=self.indexerid, seasons=seasons), logger.DEBUG) + logger.debug(u'{id}: Loading all episodes of season(s) {seasons} from the DB', + id=self.indexerid, seasons=seasons) else: sql_results = main_db_con.select(sql, [self.indexerid]) - logger.log(u'{id}: Loading all episodes of all seasons from the DB'.format - (id=self.indexerid), logger.DEBUG) + logger.debug(u'{id}: Loading all episodes of all seasons from the DB', id=self.indexerid) except Exception as error: - logger.log(u'{id}: Could not load episodes from the DB. Error: {error_msg}'.format - (id=self.indexerid, error_msg=error), logger.ERROR) + logger.error(u'{id}: Could not load episodes from the DB. Error: {error_msg}', + id=self.indexerid, error_msg=error) return scanned_eps cached_show = self.indexer_api[self.indexerid] @@ -757,28 +987,26 @@ def load_episodes_from_db(self, seasons=None): delete_ep = False - logger.log(u'{id}: Loading {show} {ep} from the DB'.format - (id=cur_show_id, show=cur_show_name, ep=episode_num(cur_season, cur_episode)), - logger.DEBUG) + logger.debug(u'{id}: Loading {show} {ep} from the DB', + id=cur_show_id, show=cur_show_name, ep=episode_num(cur_season, cur_episode)) if cur_season not in cached_seasons: try: cached_seasons[cur_season] = cached_show[cur_season] except IndexerSeasonNotFound as error: - logger.log(u'{id}: {error_msg} (unaired/deleted) in the indexer {indexer} for {show}. ' - u'Removing existing records from database'.format - (id=cur_show_id, error_msg=error.message, indexer=indexerApi(self.indexer).name, - show=cur_show_name), logger.DEBUG) + logger.debug(u'{id}: {error_msg} (unaired/deleted) in the indexer {indexer} for {show}. ' + u'Removing existing records from database', + id=cur_show_id, error_msg=error.message, indexer=indexerApi(self.indexer).name, + show=cur_show_name) delete_ep = True if cur_season not in scanned_eps: scanned_eps[cur_season] = {} if cur_episode == 0: - logger.log(u'{id}: Tried loading {show} {ep} from the DB. With an episode id set to 0.' - u' We dont support that. Skipping to next episode.'. - format(id=cur_show_id, show=cur_show_name, - ep=episode_num(cur_season, cur_episode)), logger.WARNING) + logger.warning(u'{id}: Tried loading {show} {ep} from the DB. With an episode id set to 0.' + u' We dont support that. Skipping to next episode.', + id=cur_show_id, show=cur_show_name, ep=episode_num(cur_season, cur_episode)) continue try: @@ -793,13 +1021,12 @@ def load_episodes_from_db(self, seasons=None): cur_ep.load_from_db(cur_season, cur_episode) scanned_eps[cur_season][cur_episode] = True except EpisodeDeletedException: - logger.log(u'{id}: Tried loading {show} {ep} from the DB that should have been deleted, ' - u'skipping it'.format(id=cur_show_id, show=cur_show_name, - ep=episode_num(cur_season, cur_episode)), logger.DEBUG) + logger.debug(u'{id}: Tried loading {show} {ep} from the DB that should have been deleted, ' + u'skipping it', id=cur_show_id, show=cur_show_name, + ep=episode_num(cur_season, cur_episode)) continue - logger.log(u'{id}: Finished loading all episodes for {show} from the DB'.format - (show=cur_show_name, id=cur_show_id), logger.DEBUG) + logger.debug(u'{id}: Finished loading all episodes for {show} from the DB', show=cur_show_name, id=cur_show_id) return scanned_eps @@ -817,26 +1044,20 @@ def load_episodes_from_indexer(self, seasons=None, tvapi=None): self.indexer_api = tvapi indexed_show = self.indexer_api[self.indexerid] except IndexerException as e: - logger.log( + logger.warning( u'{id}: {indexer} error, unable to update episodes.' - u' Message: {ex}'.format( - id=self.indexerid, - indexer=indexerApi(self.indexer).name, - ex=e, - ), - logger.WARNING + u' Message: {ex}', + id=self.indexerid, + indexer=indexerApi(self.indexer).name, + ex=e ) raise - logger.log( - u'{id}: Loading all episodes from {indexer}{season_update}'.format( - id=self.indexerid, - indexer=indexerApi(self.indexer).name, - season_update=u' on seasons {seasons}'.format( - seasons=seasons - ) if seasons else u'' - ), - logger.DEBUG + logger.debug( + u'{id}: Loading all episodes from {indexer}{season_update}', + id=self.indexerid, + indexer=indexerApi(self.indexer).name, + season_update=u' on seasons {seasons}'.format(seasons=seasons) if seasons else u'' ) scanned_eps = {} @@ -857,16 +1078,16 @@ def load_episodes_from_indexer(self, seasons=None, tvapi=None): if not ep: raise EpisodeNotFoundException except EpisodeNotFoundException: - logger.log(u'{id}: {indexer} object for {ep} is incomplete, skipping this episode'.format - (id=self.indexerid, indexer=indexerApi(self.indexer).name, - ep=episode_num(season, episode))) + logger.info(u'{id}: {indexer} object for {ep} is incomplete, skipping this episode', + id=self.indexerid, indexer=indexerApi(self.indexer).name, + ep=episode_num(season, episode)) continue else: try: ep.load_from_indexer(tvapi=self.indexer_api) except EpisodeDeletedException: - logger.log(u'{id}: The episode {ep} was deleted, skipping the rest of the load'.format - (id=self.indexerid, ep=episode_num(season, episode)), logger.DEBUG) + logger.debug(u'{id}: The episode {ep} was deleted, skipping the rest of the load', + id=self.indexerid, ep=episode_num(season, episode)) continue with ep.lock: @@ -880,7 +1101,7 @@ def load_episodes_from_indexer(self, seasons=None, tvapi=None): # Done updating save last update date self.last_update_indexer = datetime.date.today().toordinal() - logger.log(u'{id}: Saving indexer changes to database'.format(id=self.indexerid), logger.DEBUG) + logger.debug(u'{id}: Saving indexer changes to database', id=self.indexerid) self.save_to_db() return scanned_eps @@ -905,10 +1126,13 @@ def _load_externals_from_db(self, indexer=None, indexer_id=None): results = main_db_con.select(sql, [indexer, indexer_id, indexer, indexer_id]) for result in results: - if result[0] == self.indexer: - self.externals[mappings[result[2]]] = result[3] - else: - self.externals[mappings[result[0]]] = result[1] + try: + if result[b'indexer'] == self.indexer: + self.externals[mappings[result[b'mindexer']]] = result[b'mindexer_id'] + else: + self.externals[mappings[result[b'indexer']]] = result[b'indexer_id'] + except KeyError as e: + logger.error(u'Indexer not supported in current mappings: {id}', id=e.message) return self.externals @@ -919,8 +1143,8 @@ def _save_externals_to_db(self): for external in self.externals: if external in reverse_mappings and self.externals[external]: sql_l.append([b'INSERT OR IGNORE ' - 'INTO indexer_mapping (indexer_id, indexer, mindexer_id, mindexer) ' - 'VALUES (?,?,?,?)', + b'INTO indexer_mapping (indexer_id, indexer, mindexer_id, mindexer) ' + b'VALUES (?,?,?,?)', [self.indexerid, self.indexer, self.externals[external], @@ -1003,25 +1227,26 @@ def make_ep_from_file(self, filepath): :rtype: Episode """ if not os.path.isfile(filepath): - logger.log(u"{0}: That isn't even a real file dude... {1}".format - (self.indexerid, filepath)) + logger.info(u"{indexer_id}: That isn't even a real file dude... {filepath}", + indexer_id=self.indexerid, filepath=filepath) return None - logger.log(u'{0}: Creating episode object from {1}'.format - (self.indexerid, filepath), logger.DEBUG) + logger.debug(u'{indexer_id}: Creating episode object from {filepath}', + indexer_id=self.indexerid, filepath=filepath) try: parse_result = NameParser(show=self, try_indexers=True, parse_method=( 'normal', 'anime')[self.is_anime]).parse(filepath) except (InvalidNameException, InvalidShowException) as error: - logger.log(u'{0}: {1}'.format(self.indexerid, error), logger.DEBUG) + logger.debug(u'{indexerid}: {error}', indexer_id=self.indexerid, error=error) return None episodes = [ep for ep in parse_result.episode_numbers if ep is not None] if not episodes: - logger.log(u'{0}: parse_result: {1}'.format(self.indexerid, parse_result)) - logger.log(u'{0}: No episode number found in {1}, ignoring it'.format - (self.indexerid, filepath), logger.WARNING) + logger.debug(u'{indexerid}: parse_result: {parse_result}', + indexerid=self.indexerid, parse_result=parse_result) + logger.debug(u'{indexerid}: No episode number found in {filepath}, ignoring it', + indexerid=self.indexerid, filepath=filepath) return None # for now lets assume that any episode in the show dir belongs to that show @@ -1030,8 +1255,9 @@ def make_ep_from_file(self, filepath): sql_l = [] for current_ep in episodes: - logger.log(u'{0}: {1} parsed to {2} {3}'.format - (self.indexerid, filepath, self.name, episode_num(season, current_ep)), logger.DEBUG) + logger.debug(u'{id}: {filepath} parsed to {series_name} {ep_num}', + id=self.indexerid, filepath=filepath, series_name=self.name, + ep_num=episode_num(season, current_ep)) check_quality_again = False same_file = False @@ -1043,17 +1269,18 @@ def make_ep_from_file(self, filepath): if not cur_ep: raise EpisodeNotFoundException except EpisodeNotFoundException: - logger.log(u'{0}: Unable to figure out what this file is, skipping {1}'.format - (self.indexerid, filepath), logger.ERROR) + logger.warning(u'{indexerid}: Unable to figure out what this file is, skipping {filepath}', + indexerid=self.indexerid, filepath=filepath) continue else: # if there is a new file associated with this ep then re-check the quality if not cur_ep.location or os.path.normpath(cur_ep.location) != os.path.normpath(filepath): - logger.log( - u'{0}: The old episode had a different file associated with it, ' - u're-checking the quality using the new filename {1}'.format(self.indexerid, filepath), - logger.DEBUG) + logger.debug( + u'{indexerid}: The old episode had a different file associated with it, ' + u're-checking the quality using the new filename {filepath}', + indexerid=self.indexerid, filepath=filepath + ) check_quality_again = True with cur_ep.lock: @@ -1085,13 +1312,15 @@ def make_ep_from_file(self, filepath): old_ep_status = cur_ep.status new_quality = Quality.name_quality(filepath, self.is_anime) cur_ep.status = Quality.composite_status(DOWNLOADED, new_quality) - logger.log(u"{0}: Setting the status from '{1}' to '{2}' based on file: {3}. Reason: {4}".format - (self.indexerid, statusStrings[old_ep_status], statusStrings[cur_ep.status], - filepath, should_refresh_reason), logger.DEBUG) + logger.debug(u"{id}: Setting the status from '{status_old}' to '{status_cur}' " + u"based on file: {filepath}. Reason: {reason}", + id=self.indexerid, status_old=statusStrings[old_ep_status], + status_cur=statusStrings[cur_ep.status], + filepath=filepath, reason=should_refresh_reason) else: - logger.log(u"{0}: Not changing current status '{1}' based on file: {2}. " - u'Reason: {3}'.format(self.indexerid, statusStrings[cur_ep.status], - filepath, should_refresh_reason), logger.DEBUG) + logger.debug(u"{id}: Not changing current status '{status_string}' based on file: {filepath}. " + u'Reason: {should_refresh}', id=self.indexerid, status_string=statusStrings[cur_ep.status], + filepath=filepath, should_refresh=should_refresh_reason) with cur_ep.lock: sql_l.append(cur_ep.get_sql()) @@ -1108,7 +1337,7 @@ def make_ep_from_file(self, filepath): def _load_from_db(self): - logger.log(u'{id}: Loading show info from database'.format(id=self.indexerid), logger.DEBUG) + logger.debug(u'{id}: Loading show info from database', id=self.indexerid) main_db_con = db.DBConnection() sql_results = main_db_con.select(b'SELECT * FROM tv_shows WHERE indexer_id = ?', [self.indexerid]) @@ -1116,7 +1345,7 @@ def _load_from_db(self): if len(sql_results) > 1: raise MultipleShowsInDatabaseException() elif not sql_results: - logger.log(u'{0}: Unable to find the show in the database'.format(self.indexerid)) + logger.info(u'{indexerid}: Unable to find the show in the database', indexerid=self.indexerid) return else: self.indexer = int(sql_results[0][b'indexer'] or 0) @@ -1136,7 +1365,7 @@ def _load_from_db(self): if self.status is None: self.status = 'Unknown' - self.airs = sql_results[0]['airs'] + self.airs = sql_results[0][b'airs'] if self.airs is None or not network_timezones.test_timeformat(self.airs): self.airs = '' @@ -1183,9 +1412,8 @@ def _load_from_db(self): ) if not sql_results: - logger.log(u'{id}: Unable to find IMDb info' - u' in the database: {show}'.format - (id=self.indexerid, show=self.name)) + logger.info(u'{id}: Unable to find IMDb info' + u' in the database: {show}', id=self.indexerid, show=self.name) return else: self.imdb_info = dict(zip(sql_results[0].keys(), sql_results[0])) @@ -1201,8 +1429,8 @@ def load_from_indexer(self, tvapi=None): if self.indexer == INDEXER_TVRAGE: return - logger.log(u'{0}: Loading show info from {1}'.format( - self.indexerid, indexerApi(self.indexer).name), logger.DEBUG) + logger.debug(u'{id}: Loading show info from {indexer_name}', + id=self.indexerid, indexer_name=indexerApi(self.indexer).name) self.indexer_api = tvapi indexed_show = self.indexer_api[self.indexerid] @@ -1216,7 +1444,7 @@ def load_from_indexer(self, tvapi=None): self.classification = getattr(indexed_show, 'classification', 'Scripted') self.genre = getattr(indexed_show, 'genre', '') self.network = getattr(indexed_show, 'network', '') - self.runtime = getattr(indexed_show, 'runtime', '') + self.runtime = int(getattr(indexed_show, 'runtime', 0) or 0) # set the externals, using the result from the indexer. self.externals = {k: v for k, v in getattr(indexed_show, 'externals', {}).items() if v} @@ -1238,7 +1466,7 @@ def load_from_indexer(self, tvapi=None): self.status = getattr(indexed_show, 'status', 'Unknown') - self.plot = getattr(indexed_show, 'overview', '') or self.get_plot() + self.plot = getattr(indexed_show, 'overview', '') or self.imdb_plot self._save_externals_to_db() @@ -1250,22 +1478,22 @@ def load_imdb_info(self): self.imdb_id = helpers.title_to_imdb(self.name, self.start_year, imdb_api) if not self.imdb_id: - logger.log(u"{0}: Not loading show info from IMDb, " - u"because we don't know its ID.".format(self.indexerid)) + logger.info(u"{indexerid}: Not loading show info from IMDb, " + u"because we don't know its ID.", indexerid=self.indexerid) return # Make sure we only use the first ID self.imdb_id = self.imdb_id.split(',')[0] - logger.log(u'{0}: Loading show info from IMDb with ID: {1}'.format( - self.indexerid, self.imdb_id), logger.DEBUG) + logger.debug(u'{id}: Loading show info from IMDb with ID: {imdb_id}', + id=self.indexerid, imdb_id=self.imdb_id) imdb_obj = imdb_api.get_title_by_id(self.imdb_id) # If the show has no year, IMDb returned something we don't want if not imdb_obj.year: - logger.log(u'{0}: IMDb returned invalid info for {1}, skipping update.'.format( - self.indexerid, self.imdbid), logger.DEBUG) + logger.debug(u'{id}: IMDb returned invalid info for {imdb_id}, skipping update.', + id=self.indexerid, imdb_id=self.imdb_id) return self.imdb_info = { @@ -1276,7 +1504,7 @@ def load_imdb_info(self): 'genres': '|'.join(imdb_obj.genres or ''), 'countries': '', 'country_codes': '', - 'rating': imdb_obj.rating or '', + 'rating': str(imdb_obj.rating) or '', 'votes': imdb_obj.votes or '', 'runtimes': int(imdb_obj.runtime / 60) if imdb_obj.runtime else '', # Time is returned in seconds 'certificates': imdb_obj.certification or '', @@ -1286,8 +1514,8 @@ def load_imdb_info(self): self.externals['imdb_id'] = self.imdb_id - logger.log(u'{0}: Obtained info from IMDb: {1}'.format( - self.indexerid, self.imdb_info), logger.DEBUG) + logger.debug(u'{id}: Obtained info from IMDb: {imdb_info}', + id=self.indexerid, imdb_info=self.imdb_info) def next_episode(self): """Return the next episode air date. @@ -1295,7 +1523,7 @@ def next_episode(self): :return: :rtype: datetime.date """ - logger.log(u'{0}: Finding the episode which airs next'.format(self.indexerid), logger.DEBUG) + logger.debug(u'{id}: Finding the episode which airs next', id=self.indexerid) cur_date = datetime.date.today().toordinal() if not self.next_aired or self.next_aired and cur_date > self.next_aired: @@ -1317,13 +1545,11 @@ def next_episode(self): [self.indexerid, datetime.date.today().toordinal(), UNAIRED, WANTED]) if sql_results is None or len(sql_results) == 0: - logger.log(u'{id}: No episode found... need to implement a show status'.format - (id=self.indexerid), logger.DEBUG) + logger.debug(u'{id}: No episode found... need to implement a show status', id=self.indexerid) self.next_aired = u'' else: - logger.log(u'{id}: Found episode {ep}'.format - (id=self.indexerid, ep=episode_num(sql_results[0][b'season'], sql_results[0][b'episode'])), - logger.DEBUG) + logger.debug(u'{id}: Found episode {ep}', + id=self.indexerid, ep=episode_num(sql_results[0][b'season'], sql_results[0][b'episode'])) self.next_aired = sql_results[0][b'airdate'] return self.next_aired @@ -1351,8 +1577,8 @@ def delete_show(self, full=False): # clear the cache image_cache_dir = os.path.join(app.CACHE_DIR, 'images') for cache_file in glob.glob(os.path.join(image_cache_dir, str(self.indexerid) + '.*')): - logger.log(u'{id}: Attempt to {action} cache file {cache_file}'.format - (id=self.indexerid, action=action, cache_file=cache_file)) + logger.info(u'{id}: Attempt to {action} cache file {cache_file}', + id=self.indexerid, action=action, cache_file=cache_file) try: if app.TRASH_REMOVE_SHOW: send2trash(cache_file) @@ -1360,53 +1586,52 @@ def delete_show(self, full=False): os.remove(cache_file) except OSError as e: - logger.log(u'{id}: Unable to {action} {cache_file}: {error_msg}'.format - (id=self.indexerid, action=action, cache_file=cache_file, error_msg=ex(e)), logger.WARNING) + logger.warning(u'{id}: Unable to {action} {cache_file}: {error_msg}', + id=self.indexerid, action=action, cache_file=cache_file, error_msg=ex(e)) # remove entire show folder if full: try: - logger.log(u'{id}: Attempt to {action} show folder {location}'.format - (id=self.indexerid, action=action, location=self.location)) + logger.info(u'{id}: Attempt to {action} show folder {location}', + id=self.indexerid, action=action, location=self.location) # check first the read-only attribute file_attribute = os.stat(self.location)[0] if not file_attribute & stat.S_IWRITE: # File is read-only, so make it writeable - logger.log(u'{id}: Attempting to make writeable the read only folder {location}'.format - (id=self.indexerid, location=self.location), logger.DEBUG) + logger.debug(u'{id}: Attempting to make writeable the read only folder {location}', + id=self.indexerid, location=self.location) try: os.chmod(self.location, stat.S_IWRITE) except OSError: - logger.log(u'{id}: Unable to change permissions of {location}'.format - (id=self.indexerid, location=self.location), logger.WARNING) + logger.warning(u'{id}: Unable to change permissions of {location}', + id=self.indexerid, location=self.location) if app.TRASH_REMOVE_SHOW: send2trash(self.location) else: shutil.rmtree(self.location) - logger.log(u'{id}: {action} show folder {location}'.format - (id=self.indexerid, action=action, location=self.raw_location)) + logger.info(u'{id}: {action} show folder {location}', + id=self.indexerid, action=action, location=self.raw_location) except ShowDirectoryNotFoundException: - logger.log(u'{id}: Show folder {location} does not exist. No need to {action}'.format - (id=self.indexerid, location=self.raw_location, action=action), logger.WARNING) + logger.warning(u'{id}: Show folder {location} does not exist. No need to {action}', + id=self.indexerid, location=self.raw_location, action=action) except OSError as e: - logger.log(u'{id}: Unable to {action} {location}. Error: {error_msg}'.format - (id=self.indexerid, action=action, location=self.raw_location, error_msg=ex(e)), - logger.WARNING) + logger.warning(u'{id}: Unable to {action} {location}. Error: {error_msg}', + id=self.indexerid, action=action, location=self.raw_location, error_msg=ex(e)) if app.USE_TRAKT and app.TRAKT_SYNC_WATCHLIST: - logger.log(u'{id}: Removing show {show} from Trakt watchlist'.format - (id=self.indexerid, show=self.name), logger.DEBUG) + logger.debug(u'{id}: Removing show {show} from Trakt watchlist', + id=self.indexerid, show=self.name) notifiers.trakt_notifier.update_watchlist(self, update='remove') def populate_cache(self): """Populate image caching.""" cache_inst = image_cache.ImageCache() - logger.log(u'{id}: Checking & filling cache for show {show}'.format - (id=self.indexerid, show=self.name), logger.DEBUG) + logger.debug(u'{id}: Checking & filling cache for show {show}', + id=self.indexerid, show=self.name) cache_inst.fill_cache(self) def refresh_dir(self): @@ -1420,14 +1645,14 @@ def refresh_dir(self): return False # Let's get some fresh indexer info, as we might need it later on. - self.create_indexer() + # self.create_indexer() # load from dir self.load_episodes_from_dir() # run through all locations from DB, check that they exist - logger.log(u'{id}: Loading all episodes from {show} with a location from the database'.format - (id=self.indexerid, show=self.name), logger.DEBUG) + logger.debug(u'{id}: Loading all episodes from {show} with a location from the database', + id=self.indexerid, show=self.name) main_db_con = db.DBConnection() sql_results = main_db_con.select( @@ -1450,9 +1675,9 @@ def refresh_dir(self): if not cur_ep: raise EpisodeDeletedException except EpisodeDeletedException: - logger.log(u'{id:} Episode {show} {ep} was deleted while we were refreshing it, ' - u'moving on to the next one'.format - (id=self.indexerid, show=self.name, ep=episode_num(season, episode)), logger.DEBUG) + logger.debug(u'{id:} Episode {show} {ep} was deleted while we were refreshing it, ' + u'moving on to the next one', + id=self.indexerid, show=self.name, ep=episode_num(season, episode)) continue # if the path doesn't exist or if it's not in our show dir @@ -1472,10 +1697,12 @@ def refresh_dir(self): else: new_status = app.EP_DEFAULT_DELETED_STATUS - logger.log(u"{id}: Location for {show} {ep} doesn't exist, " - u"removing it and changing our status to '{status}'".format - (id=self.indexerid, show=self.name, ep=episode_num(season, episode), - status=statusStrings[new_status].upper()), logger.DEBUG) + logger.debug(u"{id}: Location for {show} {ep} doesn't exist and status is {old_status}, " + u"removing it and changing our status to '{status}'", + id=self.indexerid, show=self.name, ep=episode_num(season, episode), + old_status=statusStrings[cur_ep.status].upper(), + status=statusStrings[new_status].upper()) + cur_ep.status = new_status cur_ep.subtitles = '' cur_ep.subtitles_searchcount = 0 @@ -1487,23 +1714,23 @@ def refresh_dir(self): sql_l.append(cur_ep.get_sql()) - logger.log('{id}: Looking for hanging associated files for: {show} {ep} in: {location}'.format - (id=self.indexerid, show=self.name, ep=episode_num(season, episode), location=cur_loc)) + logger.info(u'{id}: Looking for hanging associated files for: {show} {ep} in: {location}', + id=self.indexerid, show=self.name, ep=episode_num(season, episode), location=cur_loc) related_files = post_processor.PostProcessor(cur_loc).list_associated_files( cur_loc, base_name_only=False, subfolders=True) if related_files: - logger.log(u'{id}: Found hanging associated files for {show} {ep}, deleting: {files}'.format - (id=self.indexerid, show=self.name, ep=episode_num(season, episode), - files=related_files), - logger.WARNING) + logger.warning(u'{id}: Found hanging associated files for {show} {ep}, deleting: {files}', + id=self.indexerid, show=self.name, ep=episode_num(season, episode), + files=related_files) for related_file in related_files: try: os.remove(related_file) - except Exception as e: - logger.log( - u'{id}: Could not delete associated file: {related_file}. Error: {error_msg}'.format - (id=self.indexerid, related_file=related_file, error_msg=e), logger.WARNING) + except OSError as e: + logger.warning( + u'{id}: Could not delete associated file: {related_file}. Error: {error_msg}', + id=self.indexerid, related_file=related_file, error_msg=e + ) # Clean up any empty season folders after deletion of associated files helpers.delete_empty_folders(self.location) @@ -1515,33 +1742,27 @@ def refresh_dir(self): def download_subtitles(self): """Download subtitles.""" if not self.is_location_valid(): - logger.log(u"{id}: Show {show} location doesn't exist, can't download subtitles".format - (id=self.indexerid, show=self.name), logger.WARNING) + logger.warning(u"{id}: Show {show} location doesn't exist, can't download subtitles", + id=self.indexerid, show=self.name) return - logger.log(u'{id}: Downloading subtitles for {show}'.format(id=self.indexerid, show=self.name), logger.DEBUG) + logger.debug(u'{id}: Downloading subtitles for {show}', id=self.indexerid, show=self.name) - try: - episodes = self.get_all_episodes(has_location=True) - if not episodes: - logger.log(u'{id}: No episodes to download subtitles for {show}'.format - (id=self.indexerid, show=self.name), logger.DEBUG) - return - - for episode in episodes: - episode.download_subtitles() + episodes = self.get_all_episodes(has_location=True) + if not episodes: + logger.debug(u'{id}: No episodes to download subtitles for {show}', + id=self.indexerid, show=self.name) + return - except Exception: - logger.log(u'{id}: Error occurred when downloading subtitles for show {show}'.format - (id=self.indexerid, show=self.name), logger.WARNING) - logger.log(traceback.format_exc(), logger.ERROR) + for episode in episodes: + episode.download_subtitles() def save_to_db(self): """Save to database.""" if not self.dirty: return - logger.log(u'{id}: Saving to database: {show}'.format(id=self.indexerid, show=self.name), logger.DEBUG) + logger.debug(u'{id}: Saving to database: {show}', id=self.indexerid, show=self.name) control_value_dict = {'indexer_id': self.indexerid} new_value_dict = {'indexer': self.indexer, @@ -1641,109 +1862,68 @@ def __unicode__(self): def to_json(self, detailed=True): """Return JSON representation.""" - indexer_name = self.indexer_slug bw_list = self.release_groups or BlackAndWhiteList(self.indexerid) - result = OrderedDict([ - ('id', OrderedDict([ - (indexer_name, self.indexerid), - ('imdb', str(self.imdb_id)) - ])), - ('title', self.name), - ('indexer', indexer_name), # e.g. tvdb - ('network', self.network), # e.g. CBS - ('type', self.classification), # e.g. Scripted - ('status', self.status), # e.g. Continuing - ('airs', text_type(self.airs).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ').strip()), - # e.g Thursday 8:00 PM - ('language', self.lang), - ('showType', 'sports' if self.is_sports else ('anime' if self.is_anime else 'series')), - ('akas', self.get_akas()), - ('year', OrderedDict([ - ('start', self.imdb_info.get('year') or self.start_year), - ])), - ('nextAirDate', self.get_next_airdate()), - ('runtime', self.imdb_info.get('runtimes') or self.runtime), - ('genres', self.get_genres()), - ('rating', OrderedDict([])), - ('classification', self.imdb_info.get('certificates')), - ('cache', OrderedDict([])), - ('countries', self.get_countries()), - ('plot', self.get_plot()), - ('config', OrderedDict([ - ('location', self.raw_location), - ('qualities', OrderedDict([ - ('allowed', self.get_allowed_qualities()), - ('preferred', self.get_preferred_qualities()), - ])), - ('paused', bool(self.paused)), - ('airByDate', bool(self.air_by_date)), - ('subtitlesEnabled', bool(self.subtitles)), - ('dvdOrder', bool(self.dvd_order)), - ('flattenFolders', bool(self.flatten_folders)), - ('scene', self.is_scene), - ('defaultEpisodeStatus', statusStrings[self.default_ep_status]), - ('aliases', self.exceptions or get_scene_exceptions(self.indexerid, self.indexer)), - ('release', OrderedDict([ - ('blacklist', bw_list.blacklist), - ('whitelist', bw_list.whitelist), - ('ignoredWords', [v for v in (self.rls_ignore_words or '').split(',') if v]), - ('requiredWords', [v for v in (self.rls_require_words or '').split(',') if v]), - ])), - ])) - ]) - - cache = image_cache.ImageCache() - if 'rating' in self.imdb_info and 'votes' in self.imdb_info: - result['rating']['imdb'] = OrderedDict([ - ('stars', self.imdb_info.get('rating')), - ('votes', self.imdb_info.get('votes')), - ]) - if os.path.isfile(cache.poster_path(self.indexerid)): - result['cache']['poster'] = cache.poster_path(self.indexerid) - if os.path.isfile(cache.banner_path(self.indexerid)): - result['cache']['banner'] = cache.banner_path(self.indexerid) + + data = NonEmptyDict() + data['id'] = NonEmptyDict() + data['id'][self.indexer_name] = self.indexerid + data['id']['imdb'] = text_type(self.imdb_id) + data['title'] = self.name + data['indexer'] = self.indexer_name # e.g. tvdb + data['network'] = self.network # e.g. CBS + data['type'] = self.classification # e.g. Scripted + data['status'] = self.status # e.g. Continuing + data['airs'] = self.airs # e.g. Thursday 8:00 PM + data['language'] = self.lang + data['showType'] = self.show_type # e.g. anime, sport, series + data['akas'] = self.imdb_akas + data['year'] = NonEmptyDict() + data['year']['start'] = self.imdb_year or self.start_year + data['nextAirDate'] = self.next_airdate + data['runtime'] = self.imdb_runtime or self.runtime + data['genres'] = self.genres + data['rating'] = NonEmptyDict() + if self.imdb_rating and self.imdb_votes: + data['rating']['imdb'] = NonEmptyDict() + data['rating']['imdb']['rating'] = self.imdb_rating + data['rating']['imdb']['votes'] = self.imdb_votes + + data['classification'] = self.imdb_certificates + data['cache'] = NonEmptyDict() + data['cache']['poster'] = self.poster + data['cache']['banner'] = self.banner + data['countries'] = self.imdb_countries + data['plot'] = self.imdb_plot or self.plot + data['config'] = NonEmptyDict() + data['config']['location'] = self.raw_location + data['config']['qualities'] = NonEmptyDict() + data['config']['qualities']['allowed'] = self.get_allowed_qualities() + data['config']['qualities']['preferred'] = self.get_preferred_qualities() + data['config']['paused'] = bool(self.paused) + data['config']['airByDate'] = bool(self.air_by_date) + data['config']['subtitlesEnabled'] = bool(self.subtitles) + data['config']['dvdOrder'] = bool(self.dvd_order) + data['config']['flattenFolders'] = bool(self.flatten_folders) + data['config']['scene'] = self.is_scene + data['config']['paused'] = bool(self.paused) + data['config']['defaultEpisodeStatus'] = self.default_ep_status_name + data['config']['aliases'] = self.aliases + data['config']['release'] = NonEmptyDict() + data['config']['release']['blacklist'] = bw_list.blacklist + data['config']['release']['whitelist'] = bw_list.whitelist + data['config']['release']['ignoredWords'] = self.release_ignore_words + data['config']['release']['requiredWords'] = self.release_required_words if detailed: - result.update(OrderedDict([ - ('seasons', OrderedDict([])) - ])) episodes = self.get_all_episodes() - result['seasons'] = [list(v) for _, v in groupby([ep.to_json() for ep in episodes], lambda item: item['season'])] - result['episodeCount'] = len(episodes) + data['seasons'] = [list(v) for _, v in + groupby([ep.to_json() for ep in episodes], lambda item: item['season'])] + data['episodeCount'] = len(episodes) last_episode = episodes[-1] if episodes else None if self.status == 'Ended' and last_episode and last_episode.airdate: - result['year']['end'] = last_episode.airdate.year - - return result - - def get_next_airdate(self): - """Return next airdate.""" - return ( - sbdatetime.convert_to_setting(network_timezones.parse_date_time(self.next_aired, self.airs, self.network)) - if try_int(self.next_aired, 1) > MILLIS_YEAR_1900 else None - ) - - def get_genres(self): - """Return genres list.""" - return list({v for v in (self.genre or '').split('|') if v} | - {v for v in self.imdb_info.get('genres', '').replace('Sci-Fi', 'Science-Fiction').split('|') if v}) - - def get_akas(self): - """Return genres akas dict.""" - akas = {} - for x in [v for v in self.imdb_info.get('akas', '').split('|') if v]: - if '::' in x: - val, key = x.split('::') - akas[key] = val - return akas + data['year']['end'] = last_episode.airdate.year - def get_countries(self): - """Return country codes.""" - return [v for v in self.imdb_info.get('country_codes', '').split('|') if v] - - def get_plot(self): - """Return show plot.""" - return self.imdb_info.get('plot', '') + return data def get_allowed_qualities(self): """Return allowed qualities.""" @@ -1757,6 +1937,45 @@ def get_preferred_qualities(self): return [Quality.qualityStrings[v] for v in preferred] + def get_all_possible_names(self, season=-1): + """Get every possible variation of the name for a particular show. + + Includes indexer name, and any scene exception names, and country code + at the end of the name (e.g. "Show Name (AU)". + + show: a Series object that we should get the names of + Returns: all possible show names + """ + show_names = get_scene_exceptions(self.indexerid, self.indexer, season) + show_names.add(self.name) + + new_show_names = set() + + if not self.is_anime: + country_list = {} + # add the country list + country_list.update(countryList) + # add the reversed mapping of the country list + country_list.update({v: k for k, v in countryList.items()}) + + for name in show_names: + if not name: + continue + + # if we have "Show Name Australia" or "Show Name (Australia)" + # this will add "Show Name (AU)" for any countries defined in + # common.countryList (and vice versa) + for country in country_list: + pattern_1 = ' {0}'.format(country) + pattern_2 = ' ({0})'.format(country) + replacement = ' ({0})'.format(country_list[country]) + if name.endswith(pattern_1): + new_show_names.add(name.replace(pattern_1, replacement)) + elif name.endswith(pattern_2): + new_show_names.add(name.replace(pattern_2, replacement)) + + return show_names.union(new_show_names) + @staticmethod def __qualities_to_string(qualities=None): return ', '.join([Quality.qualityStrings[quality] for quality in qualities or [] @@ -1780,15 +1999,15 @@ def want_episode(self, season, episode, quality, forced_search=False, download_c """ # if the quality isn't one we want under any circumstances then just say no allowed_qualities, preferred_qualities = self.current_qualities - logger.log(u'{id}: Allowed, Preferred = [ {allowed} ] [ {preferred} ] Found = [ {found} ]'.format - (id=self.indexerid, allowed=self.__qualities_to_string(allowed_qualities), - preferred=self.__qualities_to_string(preferred_qualities), - found=self.__qualities_to_string([quality])), logger.DEBUG) + logger.debug(u'{id}: Allowed, Preferred = [ {allowed} ] [ {preferred} ] Found = [ {found} ]', + id=self.indexerid, allowed=self.__qualities_to_string(allowed_qualities), + preferred=self.__qualities_to_string(preferred_qualities), + found=self.__qualities_to_string([quality])) if not Quality.wanted_quality(quality, allowed_qualities, preferred_qualities): - logger.log(u"{id}: Ignoring found result for '{show}' {ep} with unwanted quality '{quality}'".format - (id=self.indexerid, show=self.name, ep=episode_num(season, episode), - quality=Quality.qualityStrings[quality]), logger.DEBUG) + logger.debug(u"{id}: Ignoring found result for '{show}' {ep} with unwanted quality '{quality}'", + id=self.indexerid, show=self.name, ep=episode_num(season, episode), + quality=Quality.qualityStrings[quality]) return False main_db_con = db.DBConnection() @@ -1804,10 +2023,10 @@ def want_episode(self, season, episode, quality, forced_search=False, download_c b' AND episode = ?', [self.indexerid, season, episode]) if not sql_results or not len(sql_results): - logger.log(u'{id}: Unable to find a matching episode in database. ' - u"Ignoring found result for '{show}' {ep} with quality '{quality}'".format - (id=self.indexerid, show=self.name, ep=episode_num(season, episode), - quality=Quality.qualityStrings[quality]), logger.DEBUG) + logger.debug(u'{id}: Unable to find a matching episode in database. ' + u"Ignoring found result for '{show}' {ep} with quality '{quality}'", + id=self.indexerid, show=self.name, ep=episode_num(season, episode), + quality=Quality.qualityStrings[quality]) return False ep_status = int(sql_results[0][b'status']) @@ -1817,19 +2036,18 @@ def want_episode(self, season, episode, quality, forced_search=False, download_c # if it's one of these then we want it as long as it's in our allowed initial qualities if ep_status == WANTED: - logger.log(u"{id}: '{show}' {ep} status is 'WANTED'. Accepting result with quality '{new_quality}'".format - (id=self.indexerid, status=ep_status_text, show=self.name, ep=episode_num(season, episode), - new_quality=Quality.qualityStrings[quality]), logger.DEBUG) + logger.debug(u"{id}: '{show}' {ep} status is 'WANTED'. Accepting result with quality '{new_quality}'", + id=self.indexerid, status=ep_status_text, show=self.name, ep=episode_num(season, episode), + new_quality=Quality.qualityStrings[quality]) return True - should_replace, msg = Quality.should_replace(ep_status, cur_quality, quality, allowed_qualities, - preferred_qualities, download_current_quality, - forced_search, manually_searched) - logger.log(u"{id}: '{show}' {ep} status is: '{status}'. {action} result with quality '{new_quality}'. " - u"Reason: {msg}".format - (id=self.indexerid, show=self.name, ep=episode_num(season, episode), - status=ep_status_text, action='Accepting' if should_replace else 'Ignoring', - new_quality=Quality.qualityStrings[quality], msg=msg), logger.DEBUG) + should_replace, reason = Quality.should_replace(ep_status, cur_quality, quality, allowed_qualities, + preferred_qualities, download_current_quality, + forced_search, manually_searched) + logger.debug(u"{id}: '{show}' {ep} status is: '{status}'. {action} result with quality '{new_quality}'. " + u"Reason: {reason}", id=self.indexerid, show=self.name, ep=episode_num(season, episode), + status=ep_status_text, action='Accepting' if should_replace else 'Ignoring', + new_quality=Quality.qualityStrings[quality], reason=reason) return should_replace def get_overview(self, ep_status, backlog_mode=False, manually_searched=False): @@ -1875,8 +2093,7 @@ def get_overview(self, ep_status, backlog_mode=False, manually_searched=False): else: return Overview.GOOD else: - logger.log(u'Could not parse episode status into a valid overview status: {status}'.format - (status=ep_status), logger.ERROR) + logger.error(u'Could not parse episode status into a valid overview status: {status}', status=ep_status) def get_backlogged_episodes(self, allowed_qualities, preferred_qualities, include_wanted=False): """Check how many episodes will be backlogged when changing qualities.""" @@ -1922,9 +2139,41 @@ def set_all_episodes_archived(self, final_status_only=False): if sql_list: main_db_con = db.DBConnection() main_db_con.mass_action(sql_list) - logger.log(u'Change all DOWNLOADED episodes to ARCHIVED ' - u'for show ID: {show}'.format(show=self.name), logger.DEBUG) + logger.debug(u'Change all DOWNLOADED episodes to ARCHIVED ' + u'for show ID: {show}', show=self.name) return True else: - logger.log(u'No DOWNLOADED episodes for show ID: {show}'.format(show=self.name), logger.DEBUG) + logger.debug(u'No DOWNLOADED episodes for show ID: {show}', show=self.name) return False + + def pause(self): + """Pause the series.""" + self.paused = True + self.save_to_db() + + def unpause(self): + """Unpause the series.""" + self.paused = False + self.save_to_db() + + def delete(self, remove_files): + """Delete the series.""" + try: + app.show_queue_scheduler.action.removeShow(self, bool(remove_files)) + return True + except CantRemoveShowException: + pass + + def get_asset(self, asset_type): + """Get the specified asset for this series.""" + asset_type = asset_type.lower() + media_format = ('normal', 'thumb')[asset_type in ('bannerthumb', 'posterthumb', 'small')] + + if asset_type.startswith('banner'): + return ShowBanner(self.indexerid, media_format) + elif asset_type.startswith('fanart'): + return ShowFanArt(self.indexerid, media_format) + elif asset_type.startswith('poster'): + return ShowPoster(self.indexerid, media_format) + elif asset_type.startswith('network'): + return ShowNetworkLogo(self.indexerid, media_format) diff --git a/medusa/version_checker.py b/medusa/version_checker.py index 0fddb908be..046c3ba740 100644 --- a/medusa/version_checker.py +++ b/medusa/version_checker.py @@ -31,6 +31,9 @@ from .github_client import get_github_repo from .helper.exceptions import ex +ERROR_MESSAGE = ('Unable to find your git executable. Set git executable path in Advanced Settings ' + 'OR shutdown application and delete your .git folder and run from source to enable updates.') + class CheckVersion(object): """Version check class meant to run as a thread object with the sr scheduler.""" @@ -272,6 +275,7 @@ def check_for_new_version(self, force=False): if not self.updater or (not app.VERSION_NOTIFY and not app.AUTO_UPDATE and not force): logger.log(u'Version checking is disabled, not checking for the newest version') + app.NEWEST_VERSION_STRING = None return False # checking for updates @@ -407,12 +411,6 @@ def get_num_commits_behind(self): def get_num_commits_ahead(self): return self._num_commits_ahead - @staticmethod - def _git_error(): - error_message = ('Unable to find your git executable - Shutdown the application and EITHER set git_path ' - 'in your config.ini OR delete your .git folder and run from source to enable updates.') - app.NEWEST_VERSION_STRING = error_message - def _find_working_git(self): test_cmd = 'version' @@ -456,9 +454,9 @@ def _find_working_git(self): logger.log(u'Not using: {0}'.format(cur_git), logger.DEBUG) # Still haven't found a working git - error_message = ('Unable to find your git executable - Shutdown the application and EITHER set git_path ' - 'in your config.ini OR delete your .git folder and run from source to enable updates.') - app.NEWEST_VERSION_STRING = error_message + # Warn user only if he has version check enabled + if app.VERSION_NOTIFY: + app.NEWEST_VERSION_STRING = ERROR_MESSAGE return None @@ -469,9 +467,14 @@ def _run_git(git_path, args): if not git_path: logger.log(u"No git specified, can't use git commands", logger.WARNING) + app.NEWEST_VERSION_STRING = ERROR_MESSAGE exit_status = 1 return output, err, exit_status + # If we have a valid git remove the git warning + # String will be updated as soon we check github + app.NEWEST_VERSION_STRING = None + cmd = git_path + ' ' + args try: @@ -736,11 +739,20 @@ def list_remote_branches(self): def update_remote_origin(self): self._run_git(self._git_path, 'config remote.%s.url %s' % (app.GIT_REMOTE, app.GIT_REMOTE_URL)) - if app.GIT_USERNAME: - if app.DEVELOPER: - self._run_git(self._git_path, 'config remote.%s.pushurl %s' % (app.GIT_REMOTE, app.GIT_REMOTE_URL)) - else: - self._run_git(self._git_path, 'config remote.%s.pushurl %s' % (app.GIT_REMOTE, app.GIT_REMOTE_URL.replace(app.GIT_ORG, app.GIT_USERNAME, 1))) + if app.GIT_AUTH_TYPE == 0: + if app.GIT_USERNAME: + if app.DEVELOPER: + self._run_git(self._git_path, 'config remote.%s.pushurl %s' % (app.GIT_REMOTE, app.GIT_REMOTE_URL)) + else: + self._run_git(self._git_path, 'config remote.%s.pushurl %s' + % (app.GIT_REMOTE, app.GIT_REMOTE_URL.replace(app.GIT_ORG, app.GIT_USERNAME, 1))) + else: + if app.GIT_TOKEN: + if app.DEVELOPER: + self._run_git(self._git_path, 'config remote.%s.pushurl %s' % (app.GIT_REMOTE, app.GIT_REMOTE_URL)) + else: + self._run_git(self._git_path, 'config remote.%s.pushurl %s' + % (app.GIT_REMOTE, app.GIT_REMOTE_URL.replace(app.GIT_ORG, app.GIT_USERNAME, 1))) class SourceUpdateManager(UpdateManager): diff --git a/package.json b/package.json index 250138fff5..45eca0f816 100644 --- a/package.json +++ b/package.json @@ -10,11 +10,14 @@ }, "homepage": "https://github.com/PyMedusa/Medusa#readme", "scripts": { - "test": "xo", + "test-js": "xo", + "test-css": "node_modules/stylelint/bin/stylelint.js 'static/css/*.css'", + "test-api": "node_modules/.bin/dredd --config dredd/dredd.yml", "security": "snyk test", - "build": "./node_modules/grunt-cli/bin/grunt" + "build": "node_modules/grunt-cli/bin/grunt" }, "devDependencies": { + "dredd": "^3.3.0", "grunt": "^0.4.5", "grunt-bower-concat": "^1.0.0", "grunt-bower-task": "^0.4.0", @@ -25,36 +28,59 @@ "grunt-contrib-uglify": "^0.11.1", "load-grunt-tasks": "^3.3.0", "snyk": "^1.9.1", - "stylelint": "^7.2.0", - "xo": "^0.16.0" + "stylelint-config-standard": "^16.0.0", + "xo": "^0.17.1" }, "xo": { "space": 4, "rules": { - "space-before-function-paren": ["error", "never"] + "space-before-function-paren": [ + "error", + "never" + ] }, "envs": [ "browser", "jquery" ], "globals": [ + "PNotify", + "LazyLoad", + "_", + "log", "MEDUSA", - "PNotify" + "api", + "apiKey", + "apiRoot", + "s" ], "ignores": [ "static/js/lib/**", "static/js/*.min.js", - "static/js/vender.js" + "static/js/vender.js", + "static/js/api.js" ] }, "stylelint": { - "extends": "./.build/node_modules/stylelint-config-standard", + "extends": "stylelint-config-standard", "rules": { - "indentation": [ 4, { - "ignore": ["block"], - "message": "Please use 4 spaces for indentation. Tabs make OmgImAlexis sad.", - "severity": "error" - } ] - } + "indentation": [ + 4, + { + "ignore": [ + "inside-parens" + ], + "message": "Please use 4 spaces for indentation. Tabs make OmgImAlexis sad.", + "severity": "error" + } + ] + }, + "ignoreFiles": [ + "static/css/lib/**", + "static/css/*.min.css" + ] + }, + "dependencies": { + "stylelint": "^7.9.0" } } diff --git a/pytest.ini b/pytest.ini index a2bb76bdd0..ea8b537685 100644 --- a/pytest.ini +++ b/pytest.ini @@ -9,10 +9,14 @@ flake8-ignore = medusa/bs4_parser.py D100 D101 D102 D105 medusa/cache.py E305 medusa/classes.py D100 D101 D102 D105 E305 - medusa/clients/deluged_client.py E305 - medusa/clients/mlnet_client.py E305 - medusa/clients/qbittorrent_client.py E305 - medusa/clients/rtorrent_client.py E305 + medusa/clients/__init__.py D104 + medusa/clients/nzb/__init__.py D104 + medusa/clients/nzb/nzbget.py D100 D400 D401 N802 N806 + medusa/clients/nzb/sab.py D100 D202 D400 D401 N802 N806 + medusa/clients/torrent/deluged_client.py E305 + medusa/clients/torrent/mlnet_client.py E305 + medusa/clients/torrent/qbittorrent_client.py E305 + medusa/clients/torrent/rtorrent_client.py E305 medusa/common.py D101 D102 D105 medusa/config.py D100 D101 D103 D200 D202 D205 D210 D400 D401 N802 N803 N806 medusa/databases/__init__.py D104 @@ -27,8 +31,8 @@ flake8-ignore = medusa/helper/common.py D100 D200 D202 D205 D400 medusa/helper/encoding.py D100 D202 D400 D401 medusa/helper/exceptions.py D100 D200 D202 D205 D400 - medusa/helper/quality.py D100 D202 D205 D400 - medusa/helpers.py D103 N806 + medusa/helpers/__init__.py D103 N806 + medusa/helpers/quality.py D100 D202 D205 D400 medusa/history.py D100 D400 N802 N803 N806 medusa/image_cache.py D100 D101 D102 D105 medusa/imdb_popular.py D100 @@ -86,21 +90,14 @@ flake8-ignore = medusa/notifiers/tweet.py D100 D101 D102 N802 medusa/numdict.py D102 D105 D205 D400 D401 medusa/nzb_splitter.py D100 D202 D400 - medusa/nzbget.py D100 D400 D401 N802 N806 - medusa/process_tv.py D100 D101 D102 D103 D202 D400 E265 N802 N803 N806 + medusa/process_tv.py D100 D101 D102 medusa/recompiled/__init__.py D104 medusa/recompiled/tags.py D100 medusa/rss_feeds.py D100 D103 N802 - medusa/sab.py D100 D202 D400 D401 N802 N806 medusa/sbdatetime.py D100 D101 D102 D105 D202 D205 D400 N801 medusa/scene_exceptions.py D100 medusa/scene_numbering.py D100 D200 D205 D400 D401 E501 N803 N806 medusa/scheduler.py D100 D101 D102 D200 D205 D400 D401 N802 N803 - medusa/search/__init__.py D104 - medusa/search/backlog.py D100 D101 D102 N802 N806 - medusa/search/daily.py D100 D102 D400 D401 - medusa/search/manual.py D100 D200 D202 D205 D210 D400 D401 - medusa/search/queue.py D100 D101 D102 D103 D200 D204 D205 D210 D400 D401 E231 N803 medusa/server/__init__.py D104 medusa/server/api/__init__.py D104 medusa/server/api/v1/__init__.py D104 @@ -151,7 +148,6 @@ flake8-ignore = medusa/system/shutdown.py D100 D101 D102 medusa/tagger/__init__.py D104 medusa/tagger/episode.py D102 D200 D204 D205 D400 - medusa/trakt_checker.py D100 D101 D102 D200 D202 D205 D208 D400 D401 E261 E501 N802 N803 N806 medusa/tv/__init__.py D104 F401 medusa/ui.py D100 D101 D102 D200 D202 D204 D205 D400 D401 E305 N802 N803 N806 medusa/version_checker.py D100 D101 D102 D200 D202 D205 D400 D401 N802 N803 N806 diff --git a/readme.md b/readme.md index 29eca90a3a..9c08a6d6cc 100644 --- a/readme.md +++ b/readme.md @@ -1,9 +1,9 @@ ![Medusa](static/images/medusa-logo.png) -[![Build Status](https://travis-ci.org/pymedusa/Medusa.svg?branch=develop)](https://travis-ci.org/pymedusa/Medusa) [![Average time to resolve an issue](http://isitmaintained.com/badge/resolution/pymedusa/Medusa.svg)](http://isitmaintained.com/project/pymedusa/Medusa "Average time to resolve an issue") [![Percentage of issues still open](http://isitmaintained.com/badge/open/pymedusa/Medusa.svg)](http://isitmaintained.com/project/pymedusa/Medusa "Percentage of issues still open") [![Codacy Badge](https://api.codacy.com/project/badge/Grade/ade58b4469dd4b38bbbd681913d97bfc)](https://www.codacy.com/app/pymedusa/Medusa?utm_source=github.com&utm_medium=referral&utm_content=pymedusa/Medusa&utm_campaign=Badge_Grade) +[![Build Status](https://travis-ci.org/pymedusa/Medusa.svg?branch=master)](https://travis-ci.org/pymedusa/Medusa) [![Average time to resolve an issue](http://isitmaintained.com/badge/resolution/pymedusa/Medusa.svg)](http://isitmaintained.com/project/pymedusa/Medusa "Average time to resolve an issue") [![Percentage of issues still open](http://isitmaintained.com/badge/open/pymedusa/Medusa.svg)](http://isitmaintained.com/project/pymedusa/Medusa "Percentage of issues still open") [![Codacy Badge](https://api.codacy.com/project/badge/Grade/ade58b4469dd4b38bbbd681913d97bfc)](https://www.codacy.com/app/pymedusa/Medusa?utm_source=github.com&utm_medium=referral&utm_content=pymedusa/Medusa&utm_campaign=Badge_Grade) [![XO code style](https://img.shields.io/badge/code_style-XO-5ed9c7.svg)](https://github.com/sindresorhus/xo) -===== -Automatic Video Library Manager for TV Shows. It watches for new episodes of your favorite shows, and when they are posted it does its magic. + +### Automatic Video Library Manager for TV Shows. It watches for new episodes of your favorite shows, and when they are posted it does its magic. #### Exclusive features - Supports TVMaze and TMDB indexers @@ -21,7 +21,7 @@ Automatic Video Library Manager for TV Shows. It watches for new episodes of you #### Features - Kodi/XBMC library updates, poster/banner/fanart downloads, and NFO/TBN generation - - Sync your shows with Trakt. Keep shows/episode updated in Trakt watchlist + - Sync your shows with Trakt. Keep shows/episode updated in Trakt watchlist - Configurable automatic episode renaming, sorting, and other processing - Easily see what episodes you're missing, are airing soon, and more - Automatic torrent/nzb searching, downloading, and processing at the qualities you want @@ -94,4 +94,4 @@ The [linuxserver.io](https://www.linuxserver.io) team have kindly produced docke * armhf - [Dockerhub](https://hub.docker.com/r/lsioarmhf/medusa/), [Github](https://github.com/linuxserver/docker-medusa-armhf) * aarch64 - [Dockerhub](https://hub.docker.com/r/lsioarmhf/medusa-aarch64/), [Github](https://github.com/linuxserver/docker-medusa-arm64) - + diff --git a/runscripts/init.gentoo b/runscripts/init.gentoo index 6247d8d116..6d40b0b15b 100755 --- a/runscripts/init.gentoo +++ b/runscripts/init.gentoo @@ -1,4 +1,4 @@ -#!/sbin/runscript +#!/sbin/openrc-run # Copyright 1999-2013 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 @@ -9,12 +9,12 @@ # You will need to create a configuration file in order for this script # to work properly. Please create /etc/conf.d/medusa with the following: # -# SICKRAGE_USER= -# SICKRAGE_GROUP= -# SICKRAGE_DIR= -# PATH_TO_PYTHON_2=/usr/bin/python2.7 -# SICKRAGE_DATADIR= -# SICKRAGE_CONFDIR= +# MEDUSA_USER= +# MEDUSA_GROUP= +# MEDUSA_DIR= +# PATH_TO_PYTHON_2=/usr/bin/python2 +# MEDUSA_DATADIR= +# MEDUSA_CONFDIR= # RUNDIR=/var/run/medusa @@ -25,43 +25,38 @@ depend() { get_pidfile() { # Parse the config.ini file for the value of web_port in the General section - eval `sed -e 's/[[:space:]]*\=[[:space:]]*/=/g' \ - -e 's/;.*$//' \ - -e 's/[[:space:]]*$//' \ - -e 's/^[[:space:]]*//' \ - -e "s/^\(.*\)=\([^\"']*\)$/\1=\"\2\"/" \ - < ${SICKRAGE_CONFDIR}/config.ini \ - | sed -n -e "/^\[General\]/,/^\s*\[/{/^[^;].*\=.*/p;}"` - - echo "${RUNDIR}/medusa-${web_port}.pid" + eval web_port="$(grep -Po 'web_port = \K[^ ]+' \ + "${MEDUSA_CONFDIR}/config.ini")" + echo "${RUNDIR}/medusa-${web_port}.pid" } start() { ebegin "Starting Medusa" - checkpath -q -d -o ${SICKRAGE_USER}:${SICKRAGE_GROUP} -m 0770 "${RUNDIR}" + checkpath -q -d -o "${MEDUSA_USER}":"${MEDUSA_GROUP}" -m 0770 "${RUNDIR}" start-stop-daemon \ --quiet \ --start \ - --user ${SICKRAGE_USER} \ - --group ${SICKRAGE_GROUP} \ + --user "${MEDUSA_USER}" \ + --group "${MEDUSA_GROUP}" \ --background \ - --pidfile $(get_pidfile) \ - --exec ${PATH_TO_PYTHON_2} \ + --pidfile "$(get_pidfile)" \ + --exec "${PATH_TO_PYTHON_2}" \ -- \ - ${SICKRAGE_DIR}/start.py \ + "${MEDUSA_DIR}/start.py" \ -d \ - --pidfile $(get_pidfile) \ - --config ${SICKRAGE_CONFDIR}/config.ini \ - --datadir ${SICKRAGE_DATADIR} + --pidfile "$(get_pidfile)" \ + --config "${MEDUSA_CONFDIR}/config.ini" \ + --datadir "${MEDUSA_DATADIR}" eend $? } start_pre() { if [ "$RC_CMD" == "restart" ]; then - local pidfile=$(get_pidfile) - while [ -e ${pidfile} ]; do + local pidfile + pidfile=$(get_pidfile) + while [ -e "${pidfile}" ]; do sleep 1 done fi @@ -70,8 +65,10 @@ start_pre() { } stop() { - local pidfile=$(get_pidfile) - local rc + local pidfile + pidfile=$(get_pidfile) ebegin "Stopping Medusa" + start-stop-daemon --stop --pidfile "$(get_pidfile)" --retry 15 + eend $? } diff --git a/static/css/bootstrap-formhelpers.min.css b/static/css/bootstrap-formhelpers.min.css new file mode 100644 index 0000000000..bc83644886 --- /dev/null +++ b/static/css/bootstrap-formhelpers.min.css @@ -0,0 +1,6 @@ +/** +* bootstrap-formhelpers.js v2.3.0 by @vincentlamanna +* Copyright 2013 Vincent Lamanna +* http://www.apache.org/licenses/LICENSE-2.0 +*/ +.bfh-flag-AD,.bfh-flag-AE,.bfh-flag-AF,.bfh-flag-AG,.bfh-flag-AI,.bfh-flag-AL,.bfh-flag-AM,.bfh-flag-AN,.bfh-flag-AO,.bfh-flag-AQ,.bfh-flag-AR,.bfh-flag-AS,.bfh-flag-AT,.bfh-flag-AU,.bfh-flag-AW,.bfh-flag-AX,.bfh-flag-AZ,.bfh-flag-BA,.bfh-flag-BB,.bfh-flag-BD,.bfh-flag-BE,.bfh-flag-BG,.bfh-flag-BH,.bfh-flag-BI,.bfh-flag-BJ,.bfh-flag-BL,.bfh-flag-BM,.bfh-flag-BN,.bfh-flag-BO,.bfh-flag-BR,.bfh-flag-BS,.bfh-flag-BT,.bfh-flag-BW,.bfh-flag-BY,.bfh-flag-BZ,.bfh-flag-CA,.bfh-flag-CD,.bfh-flag-CF,.bfh-flag-CG,.bfh-flag-CH,.bfh-flag-CI,.bfh-flag-CL,.bfh-flag-CM,.bfh-flag-CN,.bfh-flag-CO,.bfh-flag-CR,.bfh-flag-CV,.bfh-flag-CY,.bfh-flag-CZ,.bfh-flag-DJ,.bfh-flag-DK,.bfh-flag-DM,.bfh-flag-DO,.bfh-flag-DZ,.bfh-flag-EC,.bfh-flag-EE,.bfh-flag-EG,.bfh-flag-EH,.bfh-flag-ER,.bfh-flag-ES,.bfh-flag-ET,.bfh-flag-EU,.bfh-flag-FI,.bfh-flag-FJ,.bfh-flag-FK,.bfh-flag-FM,.bfh-flag-FO,.bfh-flag-FR,.bfh-flag-FX,.bfh-flag-GF,.bfh-flag-GP,.bfh-flag-MQ,.bfh-flag-NC,.bfh-flag-PF,.bfh-flag-PM,.bfh-flag-RE,.bfh-flag-TF,.bfh-flag-WF,.bfh-flag-GA,.bfh-flag-GB,.bfh-flag-GD,.bfh-flag-GE,.bfh-flag-GG,.bfh-flag-GH,.bfh-flag-GL,.bfh-flag-GM,.bfh-flag-GN,.bfh-flag-GQ,.bfh-flag-GR,.bfh-flag-GS,.bfh-flag-GT,.bfh-flag-GU,.bfh-flag-GW,.bfh-flag-GY,.bfh-flag-HK,.bfh-flag-HN,.bfh-flag-HR,.bfh-flag-HT,.bfh-flag-HU,.bfh-flag-ID,.bfh-flag-IE,.bfh-flag-IL,.bfh-flag-IM,.bfh-flag-IN,.bfh-flag-IQ,.bfh-flag-IS,.bfh-flag-IT,.bfh-flag-JE,.bfh-flag-JM,.bfh-flag-JO,.bfh-flag-JP,.bfh-flag-KE,.bfh-flag-KG,.bfh-flag-KH,.bfh-flag-KI,.bfh-flag-KM,.bfh-flag-KN,.bfh-flag-KP,.bfh-flag-KR,.bfh-flag-KV,.bfh-flag-KW,.bfh-flag-KY,.bfh-flag-LA,.bfh-flag-LC,.bfh-flag-LK,.bfh-flag-LR,.bfh-flag-LS,.bfh-flag-LT,.bfh-flag-LU,.bfh-flag-LV,.bfh-flag-LY,.bfh-flag-MA,.bfh-flag-ME,.bfh-flag-MG,.bfh-flag-MH,.bfh-flag-ML,.bfh-flag-MM,.bfh-flag-MP,.bfh-flag-MR,.bfh-flag-MS,.bfh-flag-MT,.bfh-flag-MU,.bfh-flag-MV,.bfh-flag-MW,.bfh-flag-MZ,.bfh-flag-NA,.bfh-flag-NE,.bfh-flag-NF,.bfh-flag-NG,.bfh-flag-NI,.bfh-flag-NL,.bfh-flag-NO,.bfh-flag-NP,.bfh-flag-NR,.bfh-flag-NZ,.bfh-flag-OM,.bfh-flag-PA,.bfh-flag-PE,.bfh-flag-PG,.bfh-flag-PH,.bfh-flag-PK,.bfh-flag-PL,.bfh-flag-PN,.bfh-flag-PS,.bfh-flag-PT,.bfh-flag-PW,.bfh-flag-PY,.bfh-flag-QA,.bfh-flag-RS,.bfh-flag-RU,.bfh-flag-RW,.bfh-flag-SA,.bfh-flag-SB,.bfh-flag-SC,.bfh-flag-SD,.bfh-flag-SE,.bfh-flag-SG,.bfh-flag-SH,.bfh-flag-SI,.bfh-flag-SK,.bfh-flag-SM,.bfh-flag-SN,.bfh-flag-SO,.bfh-flag-SR,.bfh-flag-SS,.bfh-flag-ST,.bfh-flag-SV,.bfh-flag-SY,.bfh-flag-SZ,.bfh-flag-TC,.bfh-flag-TD,.bfh-flag-TG,.bfh-flag-TH,.bfh-flag-TJ,.bfh-flag-TM,.bfh-flag-TN,.bfh-flag-TP,.bfh-flag-TR,.bfh-flag-TT,.bfh-flag-TV,.bfh-flag-TW,.bfh-flag-TZ,.bfh-flag-UA,.bfh-flag-UG,.bfh-flag-US,.bfh-flag-UY,.bfh-flag-UZ,.bfh-flag-VC,.bfh-flag-VE,.bfh-flag-VG,.bfh-flag-VI,.bfh-flag-VN,.bfh-flag-VU,.bfh-flag-WS,.bfh-flag-YE,.bfh-flag-ZA,.bfh-flag-ZM,.bfh-flag-BF,.bfh-flag-CU,.bfh-flag-DE,.bfh-flag-IR,.bfh-flag-KZ,.bfh-flag-LB,.bfh-flag-LI,.bfh-flag-MC,.bfh-flag-MD,.bfh-flag-MK,.bfh-flag-MN,.bfh-flag-MO,.bfh-flag-MX,.bfh-flag-MY,.bfh-flag-PR,.bfh-flag-RO,.bfh-flag-SL,.bfh-flag-TO,.bfh-flag-VA,.bfh-flag-ZW{width:16px;height:14px;background:url(../img/bootstrap-formhelpers-countries.flags.png) no-repeat}.bfh-flag-AD:empty,.bfh-flag-AE:empty,.bfh-flag-AF:empty,.bfh-flag-AG:empty,.bfh-flag-AI:empty,.bfh-flag-AL:empty,.bfh-flag-AM:empty,.bfh-flag-AN:empty,.bfh-flag-AO:empty,.bfh-flag-AQ:empty,.bfh-flag-AR:empty,.bfh-flag-AS:empty,.bfh-flag-AT:empty,.bfh-flag-AU:empty,.bfh-flag-AW:empty,.bfh-flag-AX:empty,.bfh-flag-AZ:empty,.bfh-flag-BA:empty,.bfh-flag-BB:empty,.bfh-flag-BD:empty,.bfh-flag-BE:empty,.bfh-flag-BG:empty,.bfh-flag-BH:empty,.bfh-flag-BI:empty,.bfh-flag-BJ:empty,.bfh-flag-BL:empty,.bfh-flag-BM:empty,.bfh-flag-BN:empty,.bfh-flag-BO:empty,.bfh-flag-BR:empty,.bfh-flag-BS:empty,.bfh-flag-BT:empty,.bfh-flag-BW:empty,.bfh-flag-BY:empty,.bfh-flag-BZ:empty,.bfh-flag-CA:empty,.bfh-flag-CD:empty,.bfh-flag-CF:empty,.bfh-flag-CG:empty,.bfh-flag-CH:empty,.bfh-flag-CI:empty,.bfh-flag-CL:empty,.bfh-flag-CM:empty,.bfh-flag-CN:empty,.bfh-flag-CO:empty,.bfh-flag-CR:empty,.bfh-flag-CV:empty,.bfh-flag-CY:empty,.bfh-flag-CZ:empty,.bfh-flag-DJ:empty,.bfh-flag-DK:empty,.bfh-flag-DM:empty,.bfh-flag-DO:empty,.bfh-flag-DZ:empty,.bfh-flag-EC:empty,.bfh-flag-EE:empty,.bfh-flag-EG:empty,.bfh-flag-EH:empty,.bfh-flag-ER:empty,.bfh-flag-ES:empty,.bfh-flag-ET:empty,.bfh-flag-EU:empty,.bfh-flag-FI:empty,.bfh-flag-FJ:empty,.bfh-flag-FK:empty,.bfh-flag-FM:empty,.bfh-flag-FO:empty,.bfh-flag-FR:empty,.bfh-flag-FX:empty,.bfh-flag-GF:empty,.bfh-flag-GP:empty,.bfh-flag-MQ:empty,.bfh-flag-NC:empty,.bfh-flag-PF:empty,.bfh-flag-PM:empty,.bfh-flag-RE:empty,.bfh-flag-TF:empty,.bfh-flag-WF:empty,.bfh-flag-GA:empty,.bfh-flag-GB:empty,.bfh-flag-GD:empty,.bfh-flag-GE:empty,.bfh-flag-GG:empty,.bfh-flag-GH:empty,.bfh-flag-GL:empty,.bfh-flag-GM:empty,.bfh-flag-GN:empty,.bfh-flag-GQ:empty,.bfh-flag-GR:empty,.bfh-flag-GS:empty,.bfh-flag-GT:empty,.bfh-flag-GU:empty,.bfh-flag-GW:empty,.bfh-flag-GY:empty,.bfh-flag-HK:empty,.bfh-flag-HN:empty,.bfh-flag-HR:empty,.bfh-flag-HT:empty,.bfh-flag-HU:empty,.bfh-flag-ID:empty,.bfh-flag-IE:empty,.bfh-flag-IL:empty,.bfh-flag-IM:empty,.bfh-flag-IN:empty,.bfh-flag-IQ:empty,.bfh-flag-IS:empty,.bfh-flag-IT:empty,.bfh-flag-JE:empty,.bfh-flag-JM:empty,.bfh-flag-JO:empty,.bfh-flag-JP:empty,.bfh-flag-KE:empty,.bfh-flag-KG:empty,.bfh-flag-KH:empty,.bfh-flag-KI:empty,.bfh-flag-KM:empty,.bfh-flag-KN:empty,.bfh-flag-KP:empty,.bfh-flag-KR:empty,.bfh-flag-KV:empty,.bfh-flag-KW:empty,.bfh-flag-KY:empty,.bfh-flag-LA:empty,.bfh-flag-LC:empty,.bfh-flag-LK:empty,.bfh-flag-LR:empty,.bfh-flag-LS:empty,.bfh-flag-LT:empty,.bfh-flag-LU:empty,.bfh-flag-LV:empty,.bfh-flag-LY:empty,.bfh-flag-MA:empty,.bfh-flag-ME:empty,.bfh-flag-MG:empty,.bfh-flag-MH:empty,.bfh-flag-ML:empty,.bfh-flag-MM:empty,.bfh-flag-MP:empty,.bfh-flag-MR:empty,.bfh-flag-MS:empty,.bfh-flag-MT:empty,.bfh-flag-MU:empty,.bfh-flag-MV:empty,.bfh-flag-MW:empty,.bfh-flag-MZ:empty,.bfh-flag-NA:empty,.bfh-flag-NE:empty,.bfh-flag-NF:empty,.bfh-flag-NG:empty,.bfh-flag-NI:empty,.bfh-flag-NL:empty,.bfh-flag-NO:empty,.bfh-flag-NP:empty,.bfh-flag-NR:empty,.bfh-flag-NZ:empty,.bfh-flag-OM:empty,.bfh-flag-PA:empty,.bfh-flag-PE:empty,.bfh-flag-PG:empty,.bfh-flag-PH:empty,.bfh-flag-PK:empty,.bfh-flag-PL:empty,.bfh-flag-PN:empty,.bfh-flag-PS:empty,.bfh-flag-PT:empty,.bfh-flag-PW:empty,.bfh-flag-PY:empty,.bfh-flag-QA:empty,.bfh-flag-RS:empty,.bfh-flag-RU:empty,.bfh-flag-RW:empty,.bfh-flag-SA:empty,.bfh-flag-SB:empty,.bfh-flag-SC:empty,.bfh-flag-SD:empty,.bfh-flag-SE:empty,.bfh-flag-SG:empty,.bfh-flag-SH:empty,.bfh-flag-SI:empty,.bfh-flag-SK:empty,.bfh-flag-SM:empty,.bfh-flag-SN:empty,.bfh-flag-SO:empty,.bfh-flag-SR:empty,.bfh-flag-SS:empty,.bfh-flag-ST:empty,.bfh-flag-SV:empty,.bfh-flag-SY:empty,.bfh-flag-SZ:empty,.bfh-flag-TC:empty,.bfh-flag-TD:empty,.bfh-flag-TG:empty,.bfh-flag-TH:empty,.bfh-flag-TJ:empty,.bfh-flag-TM:empty,.bfh-flag-TN:empty,.bfh-flag-TP:empty,.bfh-flag-TR:empty,.bfh-flag-TT:empty,.bfh-flag-TV:empty,.bfh-flag-TW:empty,.bfh-flag-TZ:empty,.bfh-flag-UA:empty,.bfh-flag-UG:empty,.bfh-flag-US:empty,.bfh-flag-UY:empty,.bfh-flag-UZ:empty,.bfh-flag-VC:empty,.bfh-flag-VE:empty,.bfh-flag-VG:empty,.bfh-flag-VI:empty,.bfh-flag-VN:empty,.bfh-flag-VU:empty,.bfh-flag-WS:empty,.bfh-flag-YE:empty,.bfh-flag-ZA:empty,.bfh-flag-ZM:empty,.bfh-flag-BF:empty,.bfh-flag-CU:empty,.bfh-flag-DE:empty,.bfh-flag-IR:empty,.bfh-flag-KZ:empty,.bfh-flag-LB:empty,.bfh-flag-LI:empty,.bfh-flag-MC:empty,.bfh-flag-MD:empty,.bfh-flag-MK:empty,.bfh-flag-MN:empty,.bfh-flag-MO:empty,.bfh-flag-MX:empty,.bfh-flag-MY:empty,.bfh-flag-PR:empty,.bfh-flag-RO:empty,.bfh-flag-SL:empty,.bfh-flag-TO:empty,.bfh-flag-VA:empty,.bfh-flag-ZW:empty{width:16px}.bfh-flag-AD,.bfh-flag-AE,.bfh-flag-AF,.bfh-flag-AG,.bfh-flag-AI,.bfh-flag-AL,.bfh-flag-AM,.bfh-flag-AN,.bfh-flag-AO,.bfh-flag-AQ,.bfh-flag-AR,.bfh-flag-AS,.bfh-flag-AT,.bfh-flag-AU,.bfh-flag-AW,.bfh-flag-AX,.bfh-flag-AZ,.bfh-flag-BA,.bfh-flag-BB,.bfh-flag-BD,.bfh-flag-BE,.bfh-flag-BG,.bfh-flag-BH,.bfh-flag-BI,.bfh-flag-BJ,.bfh-flag-BL,.bfh-flag-BM,.bfh-flag-BN,.bfh-flag-BO,.bfh-flag-BR,.bfh-flag-BS,.bfh-flag-BT,.bfh-flag-BW,.bfh-flag-BY,.bfh-flag-BZ,.bfh-flag-CA,.bfh-flag-CD,.bfh-flag-CF,.bfh-flag-CG,.bfh-flag-CH,.bfh-flag-CI,.bfh-flag-CL,.bfh-flag-CM,.bfh-flag-CN,.bfh-flag-CO,.bfh-flag-CR,.bfh-flag-CV,.bfh-flag-CY,.bfh-flag-CZ,.bfh-flag-DJ,.bfh-flag-DK,.bfh-flag-DM,.bfh-flag-DO,.bfh-flag-DZ,.bfh-flag-EC,.bfh-flag-EE,.bfh-flag-EG,.bfh-flag-EH,.bfh-flag-ER,.bfh-flag-ES,.bfh-flag-ET,.bfh-flag-EU,.bfh-flag-FI,.bfh-flag-FJ,.bfh-flag-FK,.bfh-flag-FM,.bfh-flag-FO,.bfh-flag-FR,.bfh-flag-FX,.bfh-flag-GF,.bfh-flag-GP,.bfh-flag-MQ,.bfh-flag-NC,.bfh-flag-PF,.bfh-flag-PM,.bfh-flag-RE,.bfh-flag-TF,.bfh-flag-WF,.bfh-flag-GA,.bfh-flag-GB,.bfh-flag-GD,.bfh-flag-GE,.bfh-flag-GG,.bfh-flag-GH,.bfh-flag-GL,.bfh-flag-GM,.bfh-flag-GN,.bfh-flag-GQ,.bfh-flag-GR,.bfh-flag-GS,.bfh-flag-GT,.bfh-flag-GU,.bfh-flag-GW,.bfh-flag-GY,.bfh-flag-HK,.bfh-flag-HN,.bfh-flag-HR,.bfh-flag-HT,.bfh-flag-HU,.bfh-flag-ID,.bfh-flag-IE,.bfh-flag-IL,.bfh-flag-IM,.bfh-flag-IN,.bfh-flag-IQ,.bfh-flag-IS,.bfh-flag-IT,.bfh-flag-JE,.bfh-flag-JM,.bfh-flag-JO,.bfh-flag-JP,.bfh-flag-KE,.bfh-flag-KG,.bfh-flag-KH,.bfh-flag-KI,.bfh-flag-KM,.bfh-flag-KN,.bfh-flag-KP,.bfh-flag-KR,.bfh-flag-KV,.bfh-flag-KW,.bfh-flag-KY,.bfh-flag-LA,.bfh-flag-LC,.bfh-flag-LK,.bfh-flag-LR,.bfh-flag-LS,.bfh-flag-LT,.bfh-flag-LU,.bfh-flag-LV,.bfh-flag-LY,.bfh-flag-MA,.bfh-flag-ME,.bfh-flag-MG,.bfh-flag-MH,.bfh-flag-ML,.bfh-flag-MM,.bfh-flag-MP,.bfh-flag-MR,.bfh-flag-MS,.bfh-flag-MT,.bfh-flag-MU,.bfh-flag-MV,.bfh-flag-MW,.bfh-flag-MZ,.bfh-flag-NA,.bfh-flag-NE,.bfh-flag-NF,.bfh-flag-NG,.bfh-flag-NI,.bfh-flag-NL,.bfh-flag-NO,.bfh-flag-NP,.bfh-flag-NR,.bfh-flag-NZ,.bfh-flag-OM,.bfh-flag-PA,.bfh-flag-PE,.bfh-flag-PG,.bfh-flag-PH,.bfh-flag-PK,.bfh-flag-PL,.bfh-flag-PN,.bfh-flag-PS,.bfh-flag-PT,.bfh-flag-PW,.bfh-flag-PY,.bfh-flag-QA,.bfh-flag-RS,.bfh-flag-RU,.bfh-flag-RW,.bfh-flag-SA,.bfh-flag-SB,.bfh-flag-SC,.bfh-flag-SD,.bfh-flag-SE,.bfh-flag-SG,.bfh-flag-SH,.bfh-flag-SI,.bfh-flag-SK,.bfh-flag-SM,.bfh-flag-SN,.bfh-flag-SO,.bfh-flag-SR,.bfh-flag-SS,.bfh-flag-ST,.bfh-flag-SV,.bfh-flag-SY,.bfh-flag-SZ,.bfh-flag-TC,.bfh-flag-TD,.bfh-flag-TG,.bfh-flag-TH,.bfh-flag-TJ,.bfh-flag-TM,.bfh-flag-TN,.bfh-flag-TP,.bfh-flag-TR,.bfh-flag-TT,.bfh-flag-TV,.bfh-flag-TW,.bfh-flag-TZ,.bfh-flag-UA,.bfh-flag-UG,.bfh-flag-US,.bfh-flag-UY,.bfh-flag-UZ,.bfh-flag-VC,.bfh-flag-VE,.bfh-flag-VG,.bfh-flag-VI,.bfh-flag-VN,.bfh-flag-VU,.bfh-flag-WS,.bfh-flag-YE,.bfh-flag-ZA,.bfh-flag-ZM,.bfh-flag-BF,.bfh-flag-CU,.bfh-flag-DE,.bfh-flag-IR,.bfh-flag-KZ,.bfh-flag-LB,.bfh-flag-LI,.bfh-flag-MC,.bfh-flag-MD,.bfh-flag-MK,.bfh-flag-MN,.bfh-flag-MO,.bfh-flag-MX,.bfh-flag-MY,.bfh-flag-PR,.bfh-flag-RO,.bfh-flag-SL,.bfh-flag-TO,.bfh-flag-VA,.bfh-flag-ZW,.bfh-flag-EUR,.bfh-flag-XCD{margin-right:5px}.bfh-flag-AD{background-position:-1921px 0}.bfh-flag-AE{background-position:-1904px 0}.bfh-flag-AF{background-position:-3689px 0}.bfh-flag-AG{background-position:-34px 0}.bfh-flag-AI{background-position:-51px 0}.bfh-flag-AL{background-position:-68px 0}.bfh-flag-AM{background-position:-85px 0}.bfh-flag-AN{background-position:-102px 0}.bfh-flag-AO{background-position:-119px 0}.bfh-flag-AQ{background-position:-136px 0}.bfh-flag-AR{background-position:-153px 0}.bfh-flag-AS{background-position:-170px 0}.bfh-flag-AT{background-position:-187px 0}.bfh-flag-AU{background-position:-204px 0}.bfh-flag-AW{background-position:-221px 0}.bfh-flag-AX{background-position:-238px 0}.bfh-flag-AZ{background-position:-255px 0}.bfh-flag-BA{background-position:-272px 0}.bfh-flag-BB{background-position:-289px 0}.bfh-flag-BD{background-position:-306px 0}.bfh-flag-BE{background-position:-323px 0}.bfh-flag-BG{background-position:-340px 0}.bfh-flag-BH{background-position:-357px 0}.bfh-flag-BI{background-position:-374px 0}.bfh-flag-BJ{background-position:-391px 0}.bfh-flag-BL{background-position:-408px 0}.bfh-flag-BM{background-position:-425px 0}.bfh-flag-BN{background-position:-442px 0}.bfh-flag-BO{background-position:-459px 0}.bfh-flag-BR{background-position:-476px 0}.bfh-flag-BS{background-position:-493px 0}.bfh-flag-BT{background-position:-510px 0}.bfh-flag-BW{background-position:-527px 0}.bfh-flag-BY{background-position:-544px 0}.bfh-flag-BZ{background-position:-561px 0}.bfh-flag-CA{background-position:-578px 0}.bfh-flag-CD{background-position:-595px 0}.bfh-flag-CF{background-position:-612px 0}.bfh-flag-CG{background-position:-629px 0}.bfh-flag-CH{background-position:-646px 0}.bfh-flag-CI{background-position:-663px 0}.bfh-flag-CL{background-position:-680px 0}.bfh-flag-CM{background-position:-697px 0}.bfh-flag-CN{background-position:-714px 0}.bfh-flag-CO{background-position:-731px 0}.bfh-flag-CR{background-position:-748px 0}.bfh-flag-CV{background-position:-765px 0}.bfh-flag-CY{background-position:-782px 0}.bfh-flag-CZ{background-position:-799px 0}.bfh-flag-DJ{background-position:-816px 0}.bfh-flag-DK{background-position:-833px 0}.bfh-flag-DM{background-position:-850px 0}.bfh-flag-DO{background-position:-867px 0}.bfh-flag-DZ{background-position:-884px 0}.bfh-flag-EC{background-position:-901px 0}.bfh-flag-EE{background-position:-918px 0}.bfh-flag-EG{background-position:-935px 0}.bfh-flag-EH{background-position:-952px 0}.bfh-flag-ER{background-position:-969px 0}.bfh-flag-ES{background-position:-986px 0}.bfh-flag-ET{background-position:-1003px 0}.bfh-flag-EU{background-position:-1020px 0}.bfh-flag-FI{background-position:-1037px 0}.bfh-flag-FJ{background-position:-1054px 0}.bfh-flag-FK{background-position:-1071px 0}.bfh-flag-FM{background-position:-1088px 0}.bfh-flag-FO{background-position:-1105px 0}.bfh-flag-FR,.bfh-flag-FX,.bfh-flag-GF,.bfh-flag-GP,.bfh-flag-MQ,.bfh-flag-NC,.bfh-flag-PF,.bfh-flag-PM,.bfh-flag-RE,.bfh-flag-TF,.bfh-flag-WF{background-position:-1122px 0}.bfh-flag-GA{background-position:-1139px 0}.bfh-flag-GB{background-position:-1156px 0}.bfh-flag-GD{background-position:-1173px 0}.bfh-flag-GE{background-position:-1190px 0}.bfh-flag-GG{background-position:-1207px 0}.bfh-flag-GH{background-position:-1224px 0}.bfh-flag-GL{background-position:-1241px 0}.bfh-flag-GM{background-position:-1258px 0}.bfh-flag-GN{background-position:-1275px 0}.bfh-flag-GQ{background-position:-1292px 0}.bfh-flag-GR{background-position:-1309px 0}.bfh-flag-GS{background-position:-1326px 0}.bfh-flag-GT{background-position:-1343px 0}.bfh-flag-GU{background-position:-1360px 0}.bfh-flag-GW{background-position:-1377px 0}.bfh-flag-GY{background-position:-1394px 0}.bfh-flag-HK{background-position:-1411px 0}.bfh-flag-HN{background-position:-1428px 0}.bfh-flag-HR{background-position:-1445px 0}.bfh-flag-HT{background-position:-1462px 0}.bfh-flag-HU{background-position:-1479px 0}.bfh-flag-ID{background-position:-1496px 0}.bfh-flag-IE{background-position:-1513px 0}.bfh-flag-IL{background-position:-1530px 0}.bfh-flag-IM{background-position:-1547px 0}.bfh-flag-IN{background-position:-1564px 0}.bfh-flag-IQ{background-position:-1581px 0}.bfh-flag-IS{background-position:-1598px 0}.bfh-flag-IT{background-position:-1615px 0}.bfh-flag-JE{background-position:-1632px 0}.bfh-flag-JM{background-position:-1649px 0}.bfh-flag-JO{background-position:-1666px 0}.bfh-flag-JP{background-position:-1683px 0}.bfh-flag-KE{background-position:-1700px 0}.bfh-flag-KG{background-position:-1717px 0}.bfh-flag-KH{background-position:-1734px 0}.bfh-flag-KI{background-position:-1751px 0}.bfh-flag-KM{background-position:-1768px 0}.bfh-flag-KN{background-position:-1785px 0}.bfh-flag-KP{background-position:-1802px 0}.bfh-flag-KR{background-position:-1819px 0}.bfh-flag-KV{background-position:-1836px 0}.bfh-flag-KW{background-position:-1853px 0}.bfh-flag-KY{background-position:-1870px 0}.bfh-flag-LA{background-position:-1887px 0}.bfh-flag-LC{background-position:0 0}.bfh-flag-LK{background-position:-17px 0}.bfh-flag-LR{background-position:-1938px 0}.bfh-flag-LS{background-position:-1955px 0}.bfh-flag-LT{background-position:-1972px 0}.bfh-flag-LU{background-position:-1989px 0}.bfh-flag-LV{background-position:-2006px 0}.bfh-flag-LY{background-position:-2023px 0}.bfh-flag-MA{background-position:-2040px 0}.bfh-flag-ME{background-position:-2057px 0}.bfh-flag-MG{background-position:-2074px 0}.bfh-flag-MH{background-position:-2091px 0}.bfh-flag-ML{background-position:-2108px 0}.bfh-flag-MM{background-position:-2125px 0}.bfh-flag-MP{background-position:-2142px 0}.bfh-flag-MR{background-position:-2159px 0}.bfh-flag-MS{background-position:-2176px 0}.bfh-flag-MT{background-position:-2193px 0}.bfh-flag-MU{background-position:-2210px 0}.bfh-flag-MV{background-position:-2227px 0}.bfh-flag-MW{background-position:-2244px 0}.bfh-flag-MZ{background-position:-2261px 0}.bfh-flag-NA{background-position:-2278px 0}.bfh-flag-NE{background-position:-2295px 0}.bfh-flag-NF{background-position:-2312px 0}.bfh-flag-NG{background-position:-2329px 0}.bfh-flag-NI{background-position:-2346px 0}.bfh-flag-NL{background-position:-2363px 0}.bfh-flag-NO{background-position:-2380px 0}.bfh-flag-NP{background-position:-2397px 0}.bfh-flag-NR{background-position:-2414px 0}.bfh-flag-NZ{background-position:-2431px 0}.bfh-flag-OM{background-position:-2448px 0}.bfh-flag-PA{background-position:-2465px 0}.bfh-flag-PE{background-position:-2482px 0}.bfh-flag-PG{background-position:-2499px 0}.bfh-flag-PH{background-position:-2516px 0}.bfh-flag-PK{background-position:-2533px 0}.bfh-flag-PL{background-position:-2550px 0}.bfh-flag-PN{background-position:-2567px 0}.bfh-flag-PS{background-position:-2584px 0}.bfh-flag-PT{background-position:-2601px 0}.bfh-flag-PW{background-position:-2618px 0}.bfh-flag-PY{background-position:-2635px 0}.bfh-flag-QA{background-position:-2652px 0}.bfh-flag-RS{background-position:-2669px 0}.bfh-flag-RU{background-position:-2686px 0}.bfh-flag-RW{background-position:-2703px 0}.bfh-flag-SA{background-position:-2720px 0}.bfh-flag-SB{background-position:-2737px 0}.bfh-flag-SC{background-position:-2754px 0}.bfh-flag-SD{background-position:-2771px 0}.bfh-flag-SE{background-position:-2788px 0}.bfh-flag-SG{background-position:-2805px 0}.bfh-flag-SH{background-position:-2822px 0}.bfh-flag-SI{background-position:-2839px 0}.bfh-flag-SK{background-position:-2856px 0}.bfh-flag-SM{background-position:-2873px 0}.bfh-flag-SN{background-position:-2890px 0}.bfh-flag-SO{background-position:-2907px 0}.bfh-flag-SR{background-position:-2924px 0}.bfh-flag-SS{background-position:-2941px 0}.bfh-flag-ST{background-position:-2958px 0}.bfh-flag-SV{background-position:-2975px 0}.bfh-flag-SY{background-position:-2992px 0}.bfh-flag-SZ{background-position:-3009px 0}.bfh-flag-TC{background-position:-3026px 0}.bfh-flag-TD{background-position:-3043px 0}.bfh-flag-TG{background-position:-3060px 0}.bfh-flag-TH{background-position:-3077px 0}.bfh-flag-TJ{background-position:-3094px 0}.bfh-flag-TM{background-position:-3111px 0}.bfh-flag-TN{background-position:-3128px 0}.bfh-flag-TP{background-position:-3145px 0}.bfh-flag-TR{background-position:-3162px 0}.bfh-flag-TT{background-position:-3179px 0}.bfh-flag-TV{background-position:-3196px 0}.bfh-flag-TW{background-position:-3213px 0}.bfh-flag-TZ{background-position:-3230px 0}.bfh-flag-UA{background-position:-3247px 0}.bfh-flag-UG{background-position:-3264px 0}.bfh-flag-US{background-position:-3281px 0}.bfh-flag-UY{background-position:-3298px 0}.bfh-flag-UZ{background-position:-3315px 0}.bfh-flag-VC{background-position:-3332px 0}.bfh-flag-VE{background-position:-3349px 0}.bfh-flag-VG{background-position:-3366px 0}.bfh-flag-VI{background-position:-3383px 0}.bfh-flag-VN{background-position:-3400px 0}.bfh-flag-VU{background-position:-3417px 0}.bfh-flag-WS{background-position:-3434px 0}.bfh-flag-YE{background-position:-3451px 0}.bfh-flag-ZA{background-position:-3468px 0}.bfh-flag-ZM{background-position:-3485px 0}.bfh-flag-BF{background-position:-3502px 0}.bfh-flag-CU{background-position:-3519px 0}.bfh-flag-DE{background-position:-3536px 0}.bfh-flag-IR{background-position:-3553px 0}.bfh-flag-KZ{background-position:-3570px 0}.bfh-flag-LB{background-position:-3587px 0}.bfh-flag-LI{background-position:-3604px 0}.bfh-flag-MC{background-position:-3621px 0}.bfh-flag-MD{background-position:-3638px 0}.bfh-flag-MK{background-position:-3655px 0}.bfh-flag-MN{background-position:-3672px 0}.bfh-flag-MO{background-position:-3706px 0}.bfh-flag-MX{background-position:-3723px 0}.bfh-flag-MY{background-position:-3740px 0}.bfh-flag-PR{background-position:-3757px 0}.bfh-flag-RO{background-position:-3774px 0}.bfh-flag-SL{background-position:-3791px 0}.bfh-flag-TO{background-position:-3808px 0}.bfh-flag-VA{background-position:-3825px 0}.bfh-flag-ZW{background-position:-3842px 0}.bfh-flag-EUR{background:url(../img/eu.png) no-repeat}.bfh-flag-XCD{background:url(../img/xcd.png) no-repeat}.bfh-flag-AUD,.bfh-flag-CHF,.bfh-flag-DKK,.bfh-flag-EUR,.bfh-flag-XAF,.bfh-flag-XCD,.bfh-flag-XOF,.bfh-flag-XPF,.bfh-flag-ZAR{width:16px;height:14px;background:url(../img/bootstrap-formhelpers-currencies.flags.png) no-repeat}.bfh-flag-AUD:empty,.bfh-flag-CHF:empty,.bfh-flag-DKK:empty,.bfh-flag-EUR:empty,.bfh-flag-XAF:empty,.bfh-flag-XCD:empty,.bfh-flag-XOF:empty,.bfh-flag-XPF:empty,.bfh-flag-ZAR:empty{width:16px}.bfh-flag-AUD,.bfh-flag-CHF,.bfh-flag-DKK,.bfh-flag-EUR,.bfh-flag-XAF,.bfh-flag-XCD,.bfh-flag-XOF,.bfh-flag-XPF,.bfh-flag-ZAR{margin-right:5px}.bfh-flag-AUD{background-position:-32px 0}.bfh-flag-CHF{background-position:-224px 0}.bfh-flag-DKK{background-position:-64px -16px}.bfh-flag-EUR{background-position:-96px -16px}.bfh-flag-XAF{background-position:-160px -80px}.bfh-flag-XCD{background-position:-176px -80px}.bfh-flag-XOF{background-position:-192px -80px}.bfh-flag-XPF{background-position:-208px -80px}.bfh-flag-ZAR{background-position:-224px -80px}.bfh-selectbox{position:relative}.bfh-selectbox .bfh-selectbox-toggle{display:inline-block;padding:6px 24px 6px 12px;text-decoration:none}.bfh-selectbox .bfh-selectbox-toggle:focus{outline:0}.bfh-selectbox .bfh-selectbox-toggle .bfh-selectbox-option{display:inline-block;float:left;width:100%;height:20px;overflow:hidden;text-overflow:ellipsis}.bfh-selectbox .bfh-selectbox-toggle .selectbox-caret{float:right;margin-top:8px;margin-right:-16px;margin-left:-10px}.bfh-selectbox .bfh-selectbox-options{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:90px;padding:5px 0;margin:-1px 0 0;font-size:14px;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.15);border-radius:4px;-webkit-box-shadow:0 6px 12px rgba(0,0,0,0.175);box-shadow:0 6px 12px rgba(0,0,0,0.175);background-clip:padding-box}.bfh-selectbox .bfh-selectbox-options.pull-right{right:0;left:auto}.bfh-selectbox .bfh-selectbox-options .divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.bfh-selectbox .bfh-selectbox-options .bfh-selectbox-filter-container{width:100%;padding:5px}.bfh-selectbox .bfh-selectbox-options ul{max-width:500px;max-height:200px;padding:0;margin:5px 0 0 0;overflow-x:hidden;overflow-y:auto;list-style:none}.bfh-selectbox .bfh-selectbox-options ul li>a{display:block;width:100%;min-height:26px;padding:3px 20px;overflow-x:hidden;clear:both;font-weight:normal;line-height:1.428571429;color:#333;text-overflow:ellipsis;white-space:nowrap}.bfh-selectbox .bfh-selectbox-options ul li>a:hover,.bfh-selectbox .bfh-selectbox-options ul li>a:focus{color:#262626;text-decoration:none;background-color:#f5f5f5}.bfh-selectbox .bfh-selectbox-options ul .bfh-selectbox-options-header{display:block;padding:3px 20px;font-size:12px;line-height:1.428571429;color:#999}.bfh-selectbox .bfh-selectbox-options ul .disabled>a{color:#999}.bfh-selectbox .bfh-selectbox-options ul .disabled>a:hover,.bfh-selectbox .bfh-selectbox-options ul .disabled>a:focus{color:#999;text-decoration:none;cursor:not-allowed;background-color:transparent;background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.bfh-selectbox.open>.bfh-selectbox-options{display:block}.bfh-selectbox.open a{outline:0}.pull-right>.bfh-selectbox-options{right:0;left:auto}.bfh-selectbox-up .caret,.navbar-fixed-bottom .bfh-selectbox .caret{border-top:0 dotted;border-bottom:4px solid #000;content:""}.bfh-selectbox-up .bfh-selectbox-options,.navbar-fixed-bottom .bfh-selectbox .bfh-selectbox-options{top:auto;bottom:100%;margin-bottom:1px}@media(min-width:768px){.navbar-right .bfh-selectbox-options{right:0;left:auto}}.bfh-googlefonts .bfh-selectbox-options a{width:230px;height:30px;text-indent:-9999px;background-image:url(../img/bootstrap-formhelpers-googlefonts.png)}.bfh-googlefonts .bfh-selectbox-options a:focus{background-color:transparent;background-repeat:no-repeat;outline:0;filter:none}.bfh-googlefonts .bfh-selectbox-options .active>a,.bfh-googlefonts .bfh-selectbox-options .active>a:hover{background-color:transparent;background-image:url(../img/bootstrap-formhelpers-googlefonts.png);background-repeat:no-repeat;outline:0;filter:none}.bfh-datepicker-calendar{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:296px}.bfh-datepicker-calendar>table.calendar{width:376px;background:#fff}.bfh-datepicker-calendar>table.calendar .months-header>th{font-size:12px;text-align:center}.bfh-datepicker-calendar>table.calendar .months-header>th.month>span{display:inline-block;width:100px}.bfh-datepicker-calendar>table.calendar .months-header>th.year>span{display:inline-block;width:50px}.bfh-datepicker-calendar>table.calendar .days-header>th{width:30px;font-size:11px;line-height:12px;text-align:center}.bfh-datepicker-calendar>table.calendar>tbody>tr>td{width:30px;font-size:11px;line-height:12px;text-align:center}.bfh-datepicker-calendar>table.calendar>tbody>tr>td.today{color:#fff;background-color:#428bca}.bfh-datepicker-calendar>table.calendar>tbody>tr>td.off{color:#999}.bfh-datepicker-calendar>table.calendar>tbody>tr>td:not(.off):hover{color:#262626;cursor:pointer;background-color:#f5f5f5}.bfh-datepicker{position:relative}.bfh-datepicker-toggle{*margin-bottom:-3px}.bfh-datepicker-toggle>input[readonly]{cursor:inherit;background-color:inherit}.open>.bfh-datepicker-calendar{display:block}.bfh-timepicker-popover{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:100px;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.15);border-radius:4px;-webkit-box-shadow:0 6px 12px rgba(0,0,0,0.175);box-shadow:0 6px 12px rgba(0,0,0,0.175);background-clip:padding-box}.bfh-timepicker-popover>table{width:180px;margin:0}.bfh-timepicker-popover>table>tbody>tr>td{text-align:center;border:0}.bfh-timepicker-popover>table>tbody>tr>td.separator{font-size:20px;font-weight:bold;line-height:28px}.bfh-timepicker-popover>table>tbody>tr>td>div>input{width:42px!important;text-align:center}.bfh-timepicker{position:relative}.bfh-timepicker-toggle{*margin-bottom:-3px}.bfh-timepicker-toggle>input[readonly]{cursor:inherit;background-color:inherit}.open>.bfh-timepicker-popover{display:block}.bfh-slider{height:20px;margin-top:8px;margin-bottom:23px;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.15);border-radius:4px;-webkit-user-select:none;-khtml-user-select:none;-moz-user-select:none;-o-user-select:none}.bfh-slider>.bfh-slider-handle{position:absolute;width:20px;height:34px;margin-top:-7px;cursor:col-resize;background:#efefef;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.15);border-radius:4px}.bfh-slider>.bfh-slider-handle>.bfh-slider-value{position:absolute;width:48px;height:20px;margin-top:5px;margin-left:-15px;line-height:20px;text-align:center;cursor:col-resize;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.15);border-radius:4px}.bfh-slider>.bfh-slider-handle>.bfh-slider-value .disabled{color:#999}.bfh-slider.disabled .bfh-slider-value{color:#999}.bfh-colorpicker-popover{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:100px;padding:20px;cursor:default;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.15);border-radius:4px;-webkit-box-shadow:0 6px 12px rgba(0,0,0,0.175);box-shadow:0 6px 12px rgba(0,0,0,0.175);background-clip:padding-box}.bfh-colorpicker-popover>canvas{width:384px;height:256px}.bfh-colorpicker{position:relative}.bfh-colorpicker-toggle{*margin-bottom:-3px}.bfh-colorpicker-toggle>input[readonly]{cursor:inherit;background-color:inherit}.bfh-colorpicker-toggle .bfh-colorpicker-icon{display:block;width:16px;height:16px}.open>.bfh-colorpicker-popover{display:block}.input-group>.bfh-number{border-right:0}.input-group>.bfh-number-btn:hover{background-color:#ccc} \ No newline at end of file diff --git a/static/css/browser.css b/static/css/browser.css index 56cd697714..a075d4991a 100644 --- a/static/css/browser.css +++ b/static/css/browser.css @@ -32,8 +32,6 @@ .ui-autocomplete { max-height: 180px; overflow-y: auto; - /* prevent horizontal scrollbar */ - overflow-x: hidden; - /* add padding to account for vertical scrollbar */ - padding-right: 20px; + overflow-x: hidden; /* prevent horizontal scrollbar */ + padding-right: 20px; /* add padding to account for vertical scrollbar */ } diff --git a/static/css/country-flags.css b/static/css/country-flags.css index 1f9a424964..bef42c5159 100644 --- a/static/css/country-flags.css +++ b/static/css/country-flags.css @@ -1,508 +1,256 @@ .country-flag { - width: 16px; - height: 11px; - background:url(../images/country-flags.png) no-repeat; -} - -.country-flag.flag-ad {background-position: -16px 0; -} -.country-flag.flag-ae {background-position: -32px 0; -} -.country-flag.flag-af {background-position: -48px 0; -} -.country-flag.flag-ag {background-position: -64px 0; -} -.country-flag.flag-ai {background-position: -80px 0; -} -.country-flag.flag-al {background-position: -96px 0; -} -.country-flag.flag-am {background-position: -112px 0; -} -.country-flag.flag-an {background-position: -128px 0; -} -.country-flag.flag-ao {background-position: -144px 0; -} -.country-flag.flag-ar {background-position: -160px 0; -} -.country-flag.flag-as {background-position: -176px 0; -} -.country-flag.flag-at {background-position: -192px 0; -} -.country-flag.flag-au {background-position: -208px 0; -} -.country-flag.flag-aw {background-position: -224px 0; -} -.country-flag.flag-az {background-position: -240px 0; -} -.country-flag.flag-ba {background-position: 0 -11px; -} -.country-flag.flag-bb {background-position: -16px -11px; -} -.country-flag.flag-bd {background-position: -32px -11px; -} -.country-flag.flag-be {background-position: -48px -11px; -} -.country-flag.flag-bf {background-position: -64px -11px; -} -.country-flag.flag-bg {background-position: -80px -11px; -} -.country-flag.flag-bh {background-position: -96px -11px; -} -.country-flag.flag-bi {background-position: -112px -11px; -} -.country-flag.flag-bj {background-position: -128px -11px; -} -.country-flag.flag-bm {background-position: -144px -11px; -} -.country-flag.flag-bn {background-position: -160px -11px; -} -.country-flag.flag-bo {background-position: -176px -11px; -} -.country-flag.flag-br {background-position: -192px -11px; -} -.country-flag.flag-bs {background-position: -208px -11px; -} -.country-flag.flag-bt {background-position: -224px -11px; -} -.country-flag.flag-bv {background-position: -240px -11px; -} -.country-flag.flag-bw {background-position: 0 -22px; -} -.country-flag.flag-by {background-position: -16px -22px; -} -.country-flag.flag-bz {background-position: -32px -22px; -} -.country-flag.flag-ca {background-position: -48px -22px; -} -.country-flag.flag-catalonia {background-position: -64px -22px; -} -.country-flag.flag-cd {background-position: -80px -22px; -} -.country-flag.flag-cf {background-position: -96px -22px; -} -.country-flag.flag-cg {background-position: -112px -22px; -} -.country-flag.flag-ch {background-position: -128px -22px; -} -.country-flag.flag-ci {background-position: -144px -22px; -} -.country-flag.flag-ck {background-position: -160px -22px; -} -.country-flag.flag-cl {background-position: -176px -22px; -} -.country-flag.flag-cm {background-position: -192px -22px; -} -.country-flag.flag-cn {background-position: -208px -22px; -} -.country-flag.flag-co {background-position: -224px -22px; -} -.country-flag.flag-cr {background-position: -240px -22px; -} -.country-flag.flag-cu {background-position: 0 -33px; -} -.country-flag.flag-cv {background-position: -16px -33px; -} -.country-flag.flag-cw {background-position: -32px -33px; -} -.country-flag.flag-cy {background-position: -48px -33px; -} -.country-flag.flag-cz {background-position: -64px -33px; -} -.country-flag.flag-de {background-position: -80px -33px; -} -.country-flag.flag-dj {background-position: -96px -33px; -} -.country-flag.flag-dk {background-position: -112px -33px; -} -.country-flag.flag-dm {background-position: -128px -33px; -} -.country-flag.flag-do {background-position: -144px -33px; -} -.country-flag.flag-dz {background-position: -160px -33px; -} -.country-flag.flag-ec {background-position: -176px -33px; -} -.country-flag.flag-ee {background-position: -192px -33px; -} -.country-flag.flag-eg {background-position: -208px -33px; -} -.country-flag.flag-eh {background-position: -224px -33px; -} -.country-flag.flag-england {background-position: -240px -33px; -} -.country-flag.flag-er {background-position: 0 -44px; -} -.country-flag.flag-es {background-position: -16px -44px; -} -.country-flag.flag-et {background-position: -32px -44px; -} -.country-flag.flag-eu {background-position: -48px -44px; -} -.country-flag.flag-fi {background-position: -64px -44px; -} -.country-flag.flag-fj {background-position: -80px -44px; -} -.country-flag.flag-fk {background-position: -96px -44px; -} -.country-flag.flag-fm {background-position: -112px -44px; -} -.country-flag.flag-fo {background-position: -128px -44px; -} -.country-flag.flag-fr {background-position: -144px -44px; -} -.country-flag.flag-ga {background-position: -160px -44px; -} -.country-flag.flag-gb {background-position: -176px -44px; -} -.country-flag.flag-gd {background-position: -192px -44px; -} -.country-flag.flag-ge {background-position: -208px -44px; -} -.country-flag.flag-gf {background-position: -224px -44px; -} -.country-flag.flag-gg {background-position: -240px -44px; -} -.country-flag.flag-gh {background-position: 0 -55px; -} -.country-flag.flag-gi {background-position: -16px -55px; -} -.country-flag.flag-gl {background-position: -32px -55px; -} -.country-flag.flag-gm {background-position: -48px -55px; -} -.country-flag.flag-gn {background-position: -64px -55px; -} -.country-flag.flag-gp {background-position: -80px -55px; -} -.country-flag.flag-gq {background-position: -96px -55px; -} -.country-flag.flag-gr {background-position: -112px -55px; -} -.country-flag.flag-gs {background-position: -128px -55px; -} -.country-flag.flag-gt {background-position: -144px -55px; -} -.country-flag.flag-gu {background-position: -160px -55px; -} -.country-flag.flag-gw {background-position: -176px -55px; -} -.country-flag.flag-gy {background-position: -192px -55px; -} -.country-flag.flag-hk {background-position: -208px -55px; -} -.country-flag.flag-hm {background-position: -224px -55px; -} -.country-flag.flag-hn {background-position: -240px -55px; -} -.country-flag.flag-hr {background-position: 0 -66px; -} -.country-flag.flag-ht {background-position: -16px -66px; -} -.country-flag.flag-hu {background-position: -32px -66px; -} -.country-flag.flag-ic {background-position: -48px -66px; -} -.country-flag.flag-id {background-position: -64px -66px; -} -.country-flag.flag-ie {background-position: -80px -66px; -} -.country-flag.flag-il {background-position: -96px -66px; -} -.country-flag.flag-im {background-position: -112px -66px; -} -.country-flag.flag-in {background-position: -128px -66px; -} -.country-flag.flag-io {background-position: -144px -66px; -} -.country-flag.flag-iq {background-position: -160px -66px; -} -.country-flag.flag-ir {background-position: -176px -66px; -} -.country-flag.flag-is {background-position: -192px -66px; -} -.country-flag.flag-it {background-position: -208px -66px; -} -.country-flag.flag-je {background-position: -224px -66px; -} -.country-flag.flag-jm {background-position: -240px -66px; -} -.country-flag.flag-jo {background-position: 0 -77px; -} -.country-flag.flag-jp {background-position: -16px -77px; -} -.country-flag.flag-ke {background-position: -32px -77px; -} -.country-flag.flag-kg {background-position: -48px -77px; -} -.country-flag.flag-kh {background-position: -64px -77px; -} -.country-flag.flag-ki {background-position: -80px -77px; -} -.country-flag.flag-km {background-position: -96px -77px; -} -.country-flag.flag-kn {background-position: -112px -77px; -} -.country-flag.flag-kp {background-position: -128px -77px; -} -.country-flag.flag-kr {background-position: -144px -77px; -} -.country-flag.flag-kurdistan {background-position: -160px -77px; -} -.country-flag.flag-kw {background-position: -176px -77px; -} -.country-flag.flag-ky {background-position: -192px -77px; -} -.country-flag.flag-kz {background-position: -208px -77px; -} -.country-flag.flag-la {background-position: -224px -77px; -} -.country-flag.flag-lb {background-position: -240px -77px; -} -.country-flag.flag-lc {background-position: 0 -88px; -} -.country-flag.flag-li {background-position: -16px -88px; -} -.country-flag.flag-lk {background-position: -32px -88px; -} -.country-flag.flag-lr {background-position: -48px -88px; -} -.country-flag.flag-ls {background-position: -64px -88px; -} -.country-flag.flag-lt {background-position: -80px -88px; -} -.country-flag.flag-lu {background-position: -96px -88px; -} -.country-flag.flag-lv {background-position: -112px -88px; -} -.country-flag.flag-ly {background-position: -128px -88px; -} -.country-flag.flag-ma {background-position: -144px -88px; -} -.country-flag.flag-mc {background-position: -160px -88px; -} -.country-flag.flag-md {background-position: -176px -88px; -} -.country-flag.flag-me {background-position: -192px -88px; -} -.country-flag.flag-mg {background-position: -208px -88px; -} -.country-flag.flag-mh {background-position: -224px -88px; -} -.country-flag.flag-mk {background-position: -240px -88px; -} -.country-flag.flag-ml {background-position: 0 -99px; -} -.country-flag.flag-mm {background-position: -16px -99px; -} -.country-flag.flag-mn {background-position: -32px -99px; -} -.country-flag.flag-mo {background-position: -48px -99px; -} -.country-flag.flag-mp {background-position: -64px -99px; -} -.country-flag.flag-mq {background-position: -80px -99px; -} -.country-flag.flag-mr {background-position: -96px -99px; -} -.country-flag.flag-ms {background-position: -112px -99px; -} -.country-flag.flag-mt {background-position: -128px -99px; -} -.country-flag.flag-mu {background-position: -144px -99px; -} -.country-flag.flag-mv {background-position: -160px -99px; -} -.country-flag.flag-mw {background-position: -176px -99px; -} -.country-flag.flag-mx {background-position: -192px -99px; -} -.country-flag.flag-my {background-position: -208px -99px; -} -.country-flag.flag-mz {background-position: -224px -99px; -} -.country-flag.flag-na {background-position: -240px -99px; -} -.country-flag.flag-nc {background-position: 0 -110px; -} -.country-flag.flag-ne {background-position: -16px -110px; -} -.country-flag.flag-nf {background-position: -32px -110px; -} -.country-flag.flag-ng {background-position: -48px -110px; -} -.country-flag.flag-ni {background-position: -64px -110px; -} -.country-flag.flag-nl {background-position: -80px -110px; -} -.country-flag.flag-no {background-position: -96px -110px; -} -.country-flag.flag-np {background-position: -112px -110px; -} -.country-flag.flag-nr {background-position: -128px -110px; -} -.country-flag.flag-nu {background-position: -144px -110px; -} -.country-flag.flag-nz {background-position: -160px -110px; -} -.country-flag.flag-om {background-position: -176px -110px; -} -.country-flag.flag-pa {background-position: -192px -110px; -} -.country-flag.flag-pe {background-position: -208px -110px; -} -.country-flag.flag-pf {background-position: -224px -110px; -} -.country-flag.flag-pg {background-position: -240px -110px; -} -.country-flag.flag-ph {background-position: 0 -121px; -} -.country-flag.flag-pk {background-position: -16px -121px; -} -.country-flag.flag-pl {background-position: -32px -121px; -} -.country-flag.flag-pm {background-position: -48px -121px; -} -.country-flag.flag-pn {background-position: -64px -121px; -} -.country-flag.flag-pr {background-position: -80px -121px; -} -.country-flag.flag-ps {background-position: -96px -121px; -} -.country-flag.flag-pt {background-position: -112px -121px; -} -.country-flag.flag-pw {background-position: -128px -121px; -} -.country-flag.flag-py {background-position: -144px -121px; -} -.country-flag.flag-qa {background-position: -160px -121px; -} -.country-flag.flag-re {background-position: -176px -121px; -} -.country-flag.flag-ro {background-position: -192px -121px; -} -.country-flag.flag-rs {background-position: -208px -121px; -} -.country-flag.flag-ru {background-position: -224px -121px; -} -.country-flag.flag-rw {background-position: -240px -121px; -} -.country-flag.flag-sa {background-position: 0 -132px; -} -.country-flag.flag-sb {background-position: -16px -132px; -} -.country-flag.flag-sc {background-position: -32px -132px; -} -.country-flag.flag-scotland {background-position: -48px -132px; -} -.country-flag.flag-sd {background-position: -64px -132px; -} -.country-flag.flag-se {background-position: -80px -132px; -} -.country-flag.flag-sg {background-position: -96px -132px; -} -.country-flag.flag-sh {background-position: -112px -132px; -} -.country-flag.flag-si {background-position: -128px -132px; -} -.country-flag.flag-sk {background-position: -144px -132px; -} -.country-flag.flag-sl {background-position: -160px -132px; -} -.country-flag.flag-sm {background-position: -176px -132px; -} -.country-flag.flag-sn {background-position: -192px -132px; -} -.country-flag.flag-so {background-position: -208px -132px; -} -.country-flag.flag-somaliland {background-position: -224px -132px; -} -.country-flag.flag-sr {background-position: -240px -132px; -} -.country-flag.flag-ss {background-position: 0 -143px; -} -.country-flag.flag-st {background-position: -16px -143px; -} -.country-flag.flag-sv {background-position: -32px -143px; -} -.country-flag.flag-sx {background-position: -48px -143px; -} -.country-flag.flag-sy {background-position: -64px -143px; -} -.country-flag.flag-sz {background-position: -80px -143px; -} -.country-flag.flag-tc {background-position: -96px -143px; -} -.country-flag.flag-td {background-position: -112px -143px; -} -.country-flag.flag-tf {background-position: -128px -143px; -} -.country-flag.flag-tg {background-position: -144px -143px; -} -.country-flag.flag-th {background-position: -160px -143px; -} -.country-flag.flag-tj {background-position: -176px -143px; -} -.country-flag.flag-tk {background-position: -192px -143px; -} -.country-flag.flag-tl {background-position: -208px -143px; -} -.country-flag.flag-tm {background-position: -224px -143px; -} -.country-flag.flag-tn {background-position: -240px -143px; -} -.country-flag.flag-to {background-position: 0 -154px; -} -.country-flag.flag-tr {background-position: -16px -154px; -} -.country-flag.flag-tt {background-position: -32px -154px; -} -.country-flag.flag-tv {background-position: -48px -154px; -} -.country-flag.flag-tw {background-position: -64px -154px; -} -.country-flag.flag-tz {background-position: -80px -154px; -} -.country-flag.flag-ua {background-position: -96px -154px; -} -.country-flag.flag-ug {background-position: -112px -154px; -} -.country-flag.flag-um {background-position: -128px -154px; -} -.country-flag.flag-us {background-position: -144px -154px; -} -.country-flag.flag-uy {background-position: -160px -154px; -} -.country-flag.flag-uz {background-position: -176px -154px; -} -.country-flag.flag-va {background-position: -192px -154px; -} -.country-flag.flag-vc {background-position: -208px -154px; -} -.country-flag.flag-ve {background-position: -224px -154px; -} -.country-flag.flag-vg {background-position: -240px -154px; -} -.country-flag.flag-vi {background-position: 0 -165px; -} -.country-flag.flag-vn {background-position: -16px -165px; -} -.country-flag.flag-vu {background-position: -32px -165px; -} -.country-flag.flag-wales {background-position: -48px -165px; -} -.country-flag.flag-wf {background-position: -64px -165px; -} -.country-flag.flag-ws {background-position: -80px -165px; -} -.country-flag.flag-ye {background-position: -96px -165px; -} -.country-flag.flag-yt {background-position: -112px -165px; -} -.country-flag.flag-za {background-position: -128px -165px; -} -.country-flag.flag-zanzibar {background-position: -144px -165px; -} -.country-flag.flag-zm {background-position: -160px -165px; -} -.country-flag.flag-zw {background-position: -176px -165px; -} + width: 16px; + height: 11px; + background: url(../images/country-flags.png) no-repeat; +} +.country-flag.flag-ad { background-position: -16px 0; } +.country-flag.flag-ae { background-position: -32px 0; } +.country-flag.flag-af { background-position: -48px 0; } +.country-flag.flag-ag { background-position: -64px 0; } +.country-flag.flag-ai { background-position: -80px 0; } +.country-flag.flag-al { background-position: -96px 0; } +.country-flag.flag-am { background-position: -112px 0; } +.country-flag.flag-an { background-position: -128px 0; } +.country-flag.flag-ao { background-position: -144px 0; } +.country-flag.flag-ar { background-position: -160px 0; } +.country-flag.flag-as { background-position: -176px 0; } +.country-flag.flag-at { background-position: -192px 0; } +.country-flag.flag-au { background-position: -208px 0; } +.country-flag.flag-aw { background-position: -224px 0; } +.country-flag.flag-az { background-position: -240px 0; } +.country-flag.flag-ba { background-position: 0 -11px; } +.country-flag.flag-bb { background-position: -16px -11px; } +.country-flag.flag-bd { background-position: -32px -11px; } +.country-flag.flag-be { background-position: -48px -11px; } +.country-flag.flag-bf { background-position: -64px -11px; } +.country-flag.flag-bg { background-position: -80px -11px; } +.country-flag.flag-bh { background-position: -96px -11px; } +.country-flag.flag-bi { background-position: -112px -11px; } +.country-flag.flag-bj { background-position: -128px -11px; } +.country-flag.flag-bm { background-position: -144px -11px; } +.country-flag.flag-bn { background-position: -160px -11px; } +.country-flag.flag-bo { background-position: -176px -11px; } +.country-flag.flag-br { background-position: -192px -11px; } +.country-flag.flag-bs { background-position: -208px -11px; } +.country-flag.flag-bt { background-position: -224px -11px; } +.country-flag.flag-bv { background-position: -240px -11px; } +.country-flag.flag-bw { background-position: 0 -22px; } +.country-flag.flag-by { background-position: -16px -22px; } +.country-flag.flag-bz { background-position: -32px -22px; } +.country-flag.flag-ca { background-position: -48px -22px; } +.country-flag.flag-catalonia { background-position: -64px -22px; } +.country-flag.flag-cd { background-position: -80px -22px; } +.country-flag.flag-cf { background-position: -96px -22px; } +.country-flag.flag-cg { background-position: -112px -22px; } +.country-flag.flag-ch { background-position: -128px -22px; } +.country-flag.flag-ci { background-position: -144px -22px; } +.country-flag.flag-ck { background-position: -160px -22px; } +.country-flag.flag-cl { background-position: -176px -22px; } +.country-flag.flag-cm { background-position: -192px -22px; } +.country-flag.flag-cn { background-position: -208px -22px; } +.country-flag.flag-co { background-position: -224px -22px; } +.country-flag.flag-cr { background-position: -240px -22px; } +.country-flag.flag-cu { background-position: 0 -33px; } +.country-flag.flag-cv { background-position: -16px -33px; } +.country-flag.flag-cw { background-position: -32px -33px; } +.country-flag.flag-cy { background-position: -48px -33px; } +.country-flag.flag-cz { background-position: -64px -33px; } +.country-flag.flag-de { background-position: -80px -33px; } +.country-flag.flag-dj { background-position: -96px -33px; } +.country-flag.flag-dk { background-position: -112px -33px; } +.country-flag.flag-dm { background-position: -128px -33px; } +.country-flag.flag-do { background-position: -144px -33px; } +.country-flag.flag-dz { background-position: -160px -33px; } +.country-flag.flag-ec { background-position: -176px -33px; } +.country-flag.flag-ee { background-position: -192px -33px; } +.country-flag.flag-eg { background-position: -208px -33px; } +.country-flag.flag-eh { background-position: -224px -33px; } +.country-flag.flag-england { background-position: -240px -33px; } +.country-flag.flag-er { background-position: 0 -44px; } +.country-flag.flag-es { background-position: -16px -44px; } +.country-flag.flag-et { background-position: -32px -44px; } +.country-flag.flag-eu { background-position: -48px -44px; } +.country-flag.flag-fi { background-position: -64px -44px; } +.country-flag.flag-fj { background-position: -80px -44px; } +.country-flag.flag-fk { background-position: -96px -44px; } +.country-flag.flag-fm { background-position: -112px -44px; } +.country-flag.flag-fo { background-position: -128px -44px; } +.country-flag.flag-fr { background-position: -144px -44px; } +.country-flag.flag-ga { background-position: -160px -44px; } +.country-flag.flag-gb { background-position: -176px -44px; } +.country-flag.flag-gd { background-position: -192px -44px; } +.country-flag.flag-ge { background-position: -208px -44px; } +.country-flag.flag-gf { background-position: -224px -44px; } +.country-flag.flag-gg { background-position: -240px -44px; } +.country-flag.flag-gh { background-position: 0 -55px; } +.country-flag.flag-gi { background-position: -16px -55px; } +.country-flag.flag-gl { background-position: -32px -55px; } +.country-flag.flag-gm { background-position: -48px -55px; } +.country-flag.flag-gn { background-position: -64px -55px; } +.country-flag.flag-gp { background-position: -80px -55px; } +.country-flag.flag-gq { background-position: -96px -55px; } +.country-flag.flag-gr { background-position: -112px -55px; } +.country-flag.flag-gs { background-position: -128px -55px; } +.country-flag.flag-gt { background-position: -144px -55px; } +.country-flag.flag-gu { background-position: -160px -55px; } +.country-flag.flag-gw { background-position: -176px -55px; } +.country-flag.flag-gy { background-position: -192px -55px; } +.country-flag.flag-hk { background-position: -208px -55px; } +.country-flag.flag-hm { background-position: -224px -55px; } +.country-flag.flag-hn { background-position: -240px -55px; } +.country-flag.flag-hr { background-position: 0 -66px; } +.country-flag.flag-ht { background-position: -16px -66px; } +.country-flag.flag-hu { background-position: -32px -66px; } +.country-flag.flag-ic { background-position: -48px -66px; } +.country-flag.flag-id { background-position: -64px -66px; } +.country-flag.flag-ie { background-position: -80px -66px; } +.country-flag.flag-il { background-position: -96px -66px; } +.country-flag.flag-im { background-position: -112px -66px; } +.country-flag.flag-in { background-position: -128px -66px; } +.country-flag.flag-io { background-position: -144px -66px; } +.country-flag.flag-iq { background-position: -160px -66px; } +.country-flag.flag-ir { background-position: -176px -66px; } +.country-flag.flag-is { background-position: -192px -66px; } +.country-flag.flag-it { background-position: -208px -66px; } +.country-flag.flag-je { background-position: -224px -66px; } +.country-flag.flag-jm { background-position: -240px -66px; } +.country-flag.flag-jo { background-position: 0 -77px; } +.country-flag.flag-jp { background-position: -16px -77px; } +.country-flag.flag-ke { background-position: -32px -77px; } +.country-flag.flag-kg { background-position: -48px -77px; } +.country-flag.flag-kh { background-position: -64px -77px; } +.country-flag.flag-ki { background-position: -80px -77px; } +.country-flag.flag-km { background-position: -96px -77px; } +.country-flag.flag-kn { background-position: -112px -77px; } +.country-flag.flag-kp { background-position: -128px -77px; } +.country-flag.flag-kr { background-position: -144px -77px; } +.country-flag.flag-kurdistan { background-position: -160px -77px; } +.country-flag.flag-kw { background-position: -176px -77px; } +.country-flag.flag-ky { background-position: -192px -77px; } +.country-flag.flag-kz { background-position: -208px -77px; } +.country-flag.flag-la { background-position: -224px -77px; } +.country-flag.flag-lb { background-position: -240px -77px; } +.country-flag.flag-lc { background-position: 0 -88px; } +.country-flag.flag-li { background-position: -16px -88px; } +.country-flag.flag-lk { background-position: -32px -88px; } +.country-flag.flag-lr { background-position: -48px -88px; } +.country-flag.flag-ls { background-position: -64px -88px; } +.country-flag.flag-lt { background-position: -80px -88px; } +.country-flag.flag-lu { background-position: -96px -88px; } +.country-flag.flag-lv { background-position: -112px -88px; } +.country-flag.flag-ly { background-position: -128px -88px; } +.country-flag.flag-ma { background-position: -144px -88px; } +.country-flag.flag-mc { background-position: -160px -88px; } +.country-flag.flag-md { background-position: -176px -88px; } +.country-flag.flag-me { background-position: -192px -88px; } +.country-flag.flag-mg { background-position: -208px -88px; } +.country-flag.flag-mh { background-position: -224px -88px; } +.country-flag.flag-mk { background-position: -240px -88px; } +.country-flag.flag-ml { background-position: 0 -99px; } +.country-flag.flag-mm { background-position: -16px -99px; } +.country-flag.flag-mn { background-position: -32px -99px; } +.country-flag.flag-mo { background-position: -48px -99px; } +.country-flag.flag-mp { background-position: -64px -99px; } +.country-flag.flag-mq { background-position: -80px -99px; } +.country-flag.flag-mr { background-position: -96px -99px; } +.country-flag.flag-ms { background-position: -112px -99px; } +.country-flag.flag-mt { background-position: -128px -99px; } +.country-flag.flag-mu { background-position: -144px -99px; } +.country-flag.flag-mv { background-position: -160px -99px; } +.country-flag.flag-mw { background-position: -176px -99px; } +.country-flag.flag-mx { background-position: -192px -99px; } +.country-flag.flag-my { background-position: -208px -99px; } +.country-flag.flag-mz { background-position: -224px -99px; } +.country-flag.flag-na { background-position: -240px -99px; } +.country-flag.flag-nc { background-position: 0 -110px; } +.country-flag.flag-ne { background-position: -16px -110px; } +.country-flag.flag-nf { background-position: -32px -110px; } +.country-flag.flag-ng { background-position: -48px -110px; } +.country-flag.flag-ni { background-position: -64px -110px; } +.country-flag.flag-nl { background-position: -80px -110px; } +.country-flag.flag-no { background-position: -96px -110px; } +.country-flag.flag-np { background-position: -112px -110px; } +.country-flag.flag-nr { background-position: -128px -110px; } +.country-flag.flag-nu { background-position: -144px -110px; } +.country-flag.flag-nz { background-position: -160px -110px; } +.country-flag.flag-om { background-position: -176px -110px; } +.country-flag.flag-pa { background-position: -192px -110px; } +.country-flag.flag-pe { background-position: -208px -110px; } +.country-flag.flag-pf { background-position: -224px -110px; } +.country-flag.flag-pg { background-position: -240px -110px; } +.country-flag.flag-ph { background-position: 0 -121px; } +.country-flag.flag-pk { background-position: -16px -121px; } +.country-flag.flag-pl { background-position: -32px -121px; } +.country-flag.flag-pm { background-position: -48px -121px; } +.country-flag.flag-pn { background-position: -64px -121px; } +.country-flag.flag-pr { background-position: -80px -121px; } +.country-flag.flag-ps { background-position: -96px -121px; } +.country-flag.flag-pt { background-position: -112px -121px; } +.country-flag.flag-pw { background-position: -128px -121px; } +.country-flag.flag-py { background-position: -144px -121px; } +.country-flag.flag-qa { background-position: -160px -121px; } +.country-flag.flag-re { background-position: -176px -121px; } +.country-flag.flag-ro { background-position: -192px -121px; } +.country-flag.flag-rs { background-position: -208px -121px; } +.country-flag.flag-ru { background-position: -224px -121px; } +.country-flag.flag-rw { background-position: -240px -121px; } +.country-flag.flag-sa { background-position: 0 -132px; } +.country-flag.flag-sb { background-position: -16px -132px; } +.country-flag.flag-sc { background-position: -32px -132px; } +.country-flag.flag-scotland { background-position: -48px -132px; } +.country-flag.flag-sd { background-position: -64px -132px; } +.country-flag.flag-se { background-position: -80px -132px; } +.country-flag.flag-sg { background-position: -96px -132px; } +.country-flag.flag-sh { background-position: -112px -132px; } +.country-flag.flag-si { background-position: -128px -132px; } +.country-flag.flag-sk { background-position: -144px -132px; } +.country-flag.flag-sl { background-position: -160px -132px; } +.country-flag.flag-sm { background-position: -176px -132px; } +.country-flag.flag-sn { background-position: -192px -132px; } +.country-flag.flag-so { background-position: -208px -132px; } +.country-flag.flag-somaliland { background-position: -224px -132px; } +.country-flag.flag-sr { background-position: -240px -132px; } +.country-flag.flag-ss { background-position: 0 -143px; } +.country-flag.flag-st { background-position: -16px -143px; } +.country-flag.flag-sv { background-position: -32px -143px; } +.country-flag.flag-sx { background-position: -48px -143px; } +.country-flag.flag-sy { background-position: -64px -143px; } +.country-flag.flag-sz { background-position: -80px -143px; } +.country-flag.flag-tc { background-position: -96px -143px; } +.country-flag.flag-td { background-position: -112px -143px; } +.country-flag.flag-tf { background-position: -128px -143px; } +.country-flag.flag-tg { background-position: -144px -143px; } +.country-flag.flag-th { background-position: -160px -143px; } +.country-flag.flag-tj { background-position: -176px -143px; } +.country-flag.flag-tk { background-position: -192px -143px; } +.country-flag.flag-tl { background-position: -208px -143px; } +.country-flag.flag-tm { background-position: -224px -143px; } +.country-flag.flag-tn { background-position: -240px -143px; } +.country-flag.flag-to { background-position: 0 -154px; } +.country-flag.flag-tr { background-position: -16px -154px; } +.country-flag.flag-tt { background-position: -32px -154px; } +.country-flag.flag-tv { background-position: -48px -154px; } +.country-flag.flag-tw { background-position: -64px -154px; } +.country-flag.flag-tz { background-position: -80px -154px; } +.country-flag.flag-ua { background-position: -96px -154px; } +.country-flag.flag-ug { background-position: -112px -154px; } +.country-flag.flag-um { background-position: -128px -154px; } +.country-flag.flag-us { background-position: -144px -154px; } +.country-flag.flag-uy { background-position: -160px -154px; } +.country-flag.flag-uz { background-position: -176px -154px; } +.country-flag.flag-va { background-position: -192px -154px; } +.country-flag.flag-vc { background-position: -208px -154px; } +.country-flag.flag-ve { background-position: -224px -154px; } +.country-flag.flag-vg { background-position: -240px -154px; } +.country-flag.flag-vi { background-position: 0 -165px; } +.country-flag.flag-vn { background-position: -16px -165px; } +.country-flag.flag-vu { background-position: -32px -165px; } +.country-flag.flag-wales { background-position: -48px -165px; } +.country-flag.flag-wf { background-position: -64px -165px; } +.country-flag.flag-ws { background-position: -80px -165px; } +.country-flag.flag-ye { background-position: -96px -165px; } +.country-flag.flag-yt { background-position: -112px -165px; } +.country-flag.flag-za { background-position: -128px -165px; } +.country-flag.flag-zanzibar { background-position: -144px -165px; } +.country-flag.flag-zm { background-position: -160px -165px; } +.country-flag.flag-zw { background-position: -176px -165px; } diff --git a/static/css/dark.css b/static/css/dark.css index dcd93486ff..c160f68cb2 100644 --- a/static/css/dark.css +++ b/static/css/dark.css @@ -204,14 +204,31 @@ td.tvShow a:hover { background-color: rgb(51, 51, 51); } +.popover-title { + background-color: rgb(35, 35, 35); + border-bottom-color: #111; +} + .popover-content { background-color: rgb(51, 51, 51); } +.popover.top .arrow::after { + border-top-color: rgb(51, 51, 51); +} + .popover.bottom .arrow::after { border-bottom-color: rgb(51, 51, 51); } +.popover.right .arrow::after { + border-right-color: rgb(51, 51, 51); +} + +.popover.left .arrow::after { + border-left-color: rgb(51, 51, 51); +} + /* ======================================================================= home_addShows.mako ========================================================================== */ @@ -545,9 +562,11 @@ body { /* navbar styling */ .navbar-default { background-color: rgb(35, 35, 35); - filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#333333', endColorstr='#232323'); + filter: progid:dximagetransform.microsoft.gradient(startColorstr='#333333', endColorstr='#232323'); + /* stylelint-disable declaration-block-no-shorthand-property-overrides */ background: -webkit-gradient(linear, left top, left bottom, from(rgb(51, 51, 51)), to(rgb(35, 35, 35))); background: -moz-linear-gradient(top, rgb(51, 51, 51), rgb(35, 35, 35)); + /* stylelint-enable */ border-color: rgb(62, 63, 58); } @@ -619,9 +638,11 @@ body { /* submenu styling */ #sub-menu-container { background-color: rgb(41, 41, 41); - filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#414141', endColorstr='#292929'); + filter: progid:dximagetransform.microsoft.gradient(startColorstr='#414141', endColorstr='#292929'); + /* stylelint-disable declaration-block-no-shorthand-property-overrides */ background: -webkit-gradient(linear, left top, left bottom, from(rgb(65, 65, 65)), to(rgb(41, 41, 41))); background: -moz-linear-gradient(top, rgb(65, 65, 65), rgb(41, 41, 41)); + /* stylelint-enable */ border-color: rgb(62, 63, 58); } @@ -629,12 +650,11 @@ body { color: rgb(255, 255, 255); text-shadow: 0 1px 1px rgba(0, 0, 0, 0.75); background-color: rgb(97, 97, 97); - *background-color: rgb(97, 97, 97); background-image: -ms-linear-gradient(top, rgb(51, 51, 51), rgb(85, 85, 85)); background-image: -webkit-gradient(linear, 0 0, 0 100%, from(rgb(51, 51, 51)), to(rgb(85, 85, 85))); background-image: -webkit-linear-gradient(top, rgb(85, 85, 85), rgb(51, 51, 51)); background-image: -o-linear-gradient(top, rgb(85, 85, 85), rgb(51, 51, 51)); - background-image: linear-gradient(top, rgb(85, 85, 85), rgb(51, 51, 51)); + background-image: linear-gradient(to top, rgb(85, 85, 85), rgb(51, 51, 51)); background-image: -moz-linear-gradient(top, rgb(85, 85, 85), rgb(51, 51, 51)); background-repeat: repeat-x; border: 1px solid rgb(17, 17, 17); @@ -644,10 +664,7 @@ body { border-bottom-color: rgb(17, 17, 17); filter: progid:dximagetransform.microsoft.gradient(startColorstr='#297AB8', endColorstr='#15528F', GradientType=0); filter: progid:dximagetransform.microsoft.gradient(enabled=false); - /**zoom: 1;*/ - -webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.0), 0 1px 2px rgba(0, 0, 0, 0.05); - -moz-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.0), 0 1px 2px rgba(0, 0, 0, 0.05); - box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.0), 0 1px 2px rgba(0, 0, 0, 0.05); + box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0), 0 1px 2px rgba(0, 0, 0, 0.05); } .btn:hover, @@ -656,7 +673,6 @@ body { .btn.disabled, .btn[disabled] { background-color: rgb(30, 30, 30); - *background-color: rgb(30, 30, 30); color: rgb(255, 255, 255); } @@ -669,14 +685,13 @@ body { .btn:hover { text-decoration: none; background-color: rgb(30, 30, 30); - *background-color: rgb(30, 30, 30); color: rgb(255, 255, 255); background-position: 0 -150px; - -webkit-transition: background-position 0.0s linear; - -moz-transition: background-position 0.0s linear; - -ms-transition: background-position 0.0s linear; - -o-transition: background-position 0.0s linear; - transition: background-position 0.0s linear; + -webkit-transition: background-position 0s linear; + -moz-transition: background-position 0s linear; + -ms-transition: background-position 0s linear; + -o-transition: background-position 0s linear; + transition: background-position 0s linear; } .btn:focus { @@ -689,12 +704,9 @@ body { .btn.active, .btn:active { background-color: rgb(30, 30, 30); - *background-color: rgb(30, 30, 30); background-image: none; color: rgb(255, 255, 255); outline: 0; - -webkit-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); - -moz-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); } @@ -706,12 +718,10 @@ body { background-image: none; opacity: 0.65; filter: alpha(opacity=65); - -webkit-box-shadow: none; - -moz-box-shadow: none; box-shadow: none; } -@media (min-width:768px) { +@media (min-width: 768px) { .navbar .nav > li > .dropdown-menu::after { position: absolute; top: -6px; @@ -770,10 +780,9 @@ div.formpaginate .next { color: rgb(255, 255, 255); cursor: hand; cursor: pointer; - background: rgb(95,95,95); + background: rgb(95, 95, 95); -webkit-border-radius: 6px; - -moz-border-radius: 6px; - border-radius: 6px; + border-radius: 6px; } /* ======================================================================= @@ -786,11 +795,8 @@ pnotify.css background-image: linear-gradient(rgb(51, 51, 51), rgb(61, 61, 61)) !important; background-image: -webkit-linear-gradient(rgb(51, 51, 51), rgb(61, 61, 61)) !important; background-image: -o-linear-gradient(rgb(51, 51, 51), rgb(61, 61, 61)) !important; - filter: progid:dximagetransform.microsoft.gradient(startColorstr=rgb(51, 51, 51), endColorstr=rgb(61, 61, 61)) !important; - -ms-filter: progid:dximagetransform.microsoft.gradient(startColorstr=rgb(51, 51, 51), endColorstr=rgb(61, 61, 61)) !important; - -moz-box-shadow: 0 0 2px rgb(0, 0, 0); - -webkit-box-shadow: 0 0 2px rgb(0, 0, 0); - -o-box-shadow: 0 0 2px rgb(0, 0, 0); + filter: progid:dximagetransform.microsoft.gradient(startcolorstr=rgb(51, 51, 51), endcolorstr=rgb(61, 61, 61)) !important; + -ms-filter: progid:dximagetransform.microsoft.gradient(startcolorstr=rgb(51, 51, 51), endcolorstr=rgb(61, 61, 61)) !important; box-shadow: 0 0 2px rgb(0, 0, 0); } @@ -827,7 +833,7 @@ tablesorter.css .tablesorter th { color: rgb(255, 255, 255); text-align: center; - text-shadow: -1px -1px 0 rgba(0,0,0,0.3); + text-shadow: -1px -1px 0 rgba(0, 0, 0, 0.3); background-color: rgb(85, 85, 85); border-collapse: collapse; font-weight: normal; @@ -836,13 +842,11 @@ tablesorter.css .tablesorter thead .tablesorter-headerDesc { background-color: rgb(85, 85, 85); background-image: url(data:image/gif;base64,R0lGODlhFQAEAIAAAP///////yH5BAEAAAEALAAAAAAVAAQAAAINjB+gC+jP2ptn0WskLQA7); - /* background-image: url(../images/tablesorter/asc.gif); */ } .tablesorter thead .tablesorter-headerAsc { background-color: rgb(85, 85, 85); background-image: url(data:image/gif;base64,R0lGODlhFQAEAIAAAP///////yH5BAEAAAEALAAAAAAVAAQAAAINjI8Bya2wnINUMopZAQA7); - /* background-image: url(../images/tablesorter/desc.gif); */ } thead.tablesorter-stickyHeader { @@ -870,11 +874,8 @@ thead.tablesorter-stickyHeader { /* hidden filter row */ .tablesorter-filter-row.hideme td { - /*** *********************************************** ***/ - /*** change this padding to modify the thickness ***/ - /*** of the closed filter row (height = padding x 2) ***/ + /* Change the padding to modify the thickness of the closed filter row (height = padding x 2) */ padding: 2px; - /*** *********************************************** ***/ margin: 0; line-height: 0; cursor: pointer; @@ -886,8 +887,7 @@ thead.tablesorter-stickyHeader { border: 0; padding: 0; margin: 0; - /* don't use visibility: hidden because it disables tabbing */ - opacity: 0; + opacity: 0; /* Don't use visibility: hidden because it disables tabbing */ filter: alpha(opacity=0); } @@ -957,8 +957,8 @@ bootstrap modal -webkit-border-top-right-radius: 5px; -moz-border-radius-topleft: 5px; -moz-border-radius-topright: 5px; - border-top-left-radius: 5px; - border-top-right-radius: 5px; + border-top-left-radius: 5px; + border-top-right-radius: 5px; } /* ======================================================================= @@ -998,7 +998,7 @@ new #confirmBox } .modal-body, -.modal-content{ +.modal-content { background: rgb(34, 34, 34); } @@ -1007,8 +1007,8 @@ new #confirmBox } .confirmation-modal { - background: -moz-linear-gradient(rgba(0,0,0,0.5), rgba(0,0,0,0.5)) repeat-x rgba(0,0,0,0.5); - background:-webkit-gradient(linear, 0% 0%, 0% 100%, from(rgba(0,0,0,0.5)), to(rgba(0,0,0,0.5))) repeat-x rgba(0,0,0,0.5); + background: -moz-linear-gradient(rgba(0, 0, 0, 0.5), rgba(0, 0, 0, 0.5)) repeat-x rgba(0, 0, 0, 0.5); + background: -webkit-gradient(linear, 0% 0%, 0% 100%, from(rgba(0, 0, 0, 0.5)), to(rgba(0, 0, 0, 0.5))) repeat-x rgba(0, 0, 0, 0.5); } .modal-footer { @@ -1021,7 +1021,7 @@ new #confirmBox text-decoration: none; display: inline-block; color: rgb(255, 255, 255); - text-align:center; + text-align: center; text-shadow: 0 1px 1px rgba(0, 0, 0, 0.75); background-clip: padding-box; border: 1px solid rgb(17, 17, 17); @@ -1030,12 +1030,11 @@ new #confirmBox -webkit-box-sizing: border-box; -moz-box-sizing: border-box; box-sizing: border-box; - background-image: -webkit-linear-gradient(top, rgba(255,255,255,0.08),rgba(255,255,255,0) 50%,rgba(0,0,0,0) 51%,rgba(0,0,0,0.25)); - background-image: -moz-linear-gradient(top, rgba(255,255,255,0.08),rgba(255,255,255,0) 50%,rgba(0,0,0,0) 51%,rgba(0,0,0,0.25)); - background-image: -o-linear-gradient(top, rgba(255,255,255,0.08),rgba(255,255,255,0) 50%,rgba(0,0,0,0) 51%,rgba(0,0,0,0.25)); - background-image: linear-gradient(to bottom, rgba(255,255,255,0.08),rgba(255,255,255,0) 50%,rgba(0,0,0,0) 51%,rgba(0,0,0,0.25)); - -webkit-box-shadow: inset 0 1px rgba(255,255,255,0.1),inset 0 -1px 3px rgba(0,0,0,0.3),inset 0 0 0 1px rgba(255,255,255,0.08),0 1px 2px rgba(0,0,0,0.15); - box-shadow: inset 0 1px rgba(255,255,255,0.1),inset 0 -1px 3px rgba(0,0,0,0.3),inset 0 0 0 1px rgba(255,255,255,0.08),0 1px 2px rgba(0,0,0,0.15); + background-image: -webkit-linear-gradient(top, rgba(255, 255, 255, 0.08), rgba(255, 255, 255, 0) 50%, rgba(0, 0, 0, 0) 51%, rgba(0, 0, 0, 0.25)); + background-image: -moz-linear-gradient(top, rgba(255, 255, 255, 0.08), rgba(255, 255, 255, 0) 50%, rgba(0, 0, 0, 0) 51%, rgba(0, 0, 0, 0.25)); + background-image: -o-linear-gradient(top, rgba(255, 255, 255, 0.08), rgba(255, 255, 255, 0) 50%, rgba(0, 0, 0, 0) 51%, rgba(0, 0, 0, 0.25)); + background-image: linear-gradient(to bottom, rgba(255, 255, 255, 0.08), rgba(255, 255, 255, 0) 50%, rgba(0, 0, 0, 0) 51%, rgba(0, 0, 0, 0.25)); + box-shadow: inset 0 1px rgba(255, 255, 255, 0.1), inset 0 -1px 3px rgba(0, 0, 0, 0.3), inset 0 0 0 1px rgba(255, 255, 255, 0.08), 0 1px 2px rgba(0, 0, 0, 0.15); } .modal-footer button:last-child { @@ -1058,8 +1057,7 @@ new #confirmBox background-color: rgb(161, 51, 49); } -/* Christmas edition*/ -/* navbar styling */ +/* Christmas edition */ .navbar-default { - background-color: rgb(51, 51, 51)!important; + background-color: rgb(51, 51, 51) !important; } diff --git a/static/css/light.css b/static/css/light.css index 1a1edd27d8..a716c8f40c 100644 --- a/static/css/light.css +++ b/static/css/light.css @@ -7,8 +7,6 @@ home.mako background-image: linear-gradient(rgb(166, 207, 65), rgb(91, 153, 13)) !important; background-image: -webkit-linear-gradient(rgb(166, 207, 65), rgb(91, 153, 13)) !important; background-image: -o-linear-gradient(rgb(166, 207, 65), rgb(91, 153, 13)) !important; - -moz-border-radius: 3px; - -webkit-border-radius: 3px; border-radius: 3px; } @@ -17,8 +15,6 @@ home.mako background-image: linear-gradient(rgb(225, 255, 151), rgb(157, 178, 105)) !important; background-image: -webkit-linear-gradient(rgb(225, 255, 151), rgb(157, 178, 105)) !important; background-image: -o-linear-gradient(rgb(225, 255, 151), rgb(157, 178, 105)) !important; - -moz-border-radius: 3px; - -webkit-border-radius: 3px; border-radius: 3px; } @@ -27,8 +23,6 @@ home.mako background-image: linear-gradient(rgb(250, 212, 64), rgb(242, 167, 13)) !important; background-image: -webkit-linear-gradient(rgb(250, 212, 64), rgb(242, 167, 13)) !important; background-image: -o-linear-gradient(rgb(250, 212, 64), rgb(242, 167, 13)) !important; - -moz-border-radius: 3px; - -webkit-border-radius: 3px; border-radius: 3px; } @@ -37,8 +31,6 @@ home.mako background-image: linear-gradient(rgb(250, 181, 67), rgb(242, 112, 13)) !important; background-image: -webkit-linear-gradient(rgb(250, 181, 67), rgb(242, 112, 13)) !important; background-image: -o-linear-gradient(rgb(250, 181, 67), rgb(242, 112, 13)) !important; - -moz-border-radius: 3px; - -webkit-border-radius: 3px; border-radius: 3px; } @@ -47,8 +39,6 @@ home.mako background-image: linear-gradient(rgb(218, 89, 69), rgb(177, 26, 16)) !important; background-image: -webkit-linear-gradient(rgb(218, 89, 69), rgb(177, 26, 16)) !important; background-image: -o-linear-gradient(rgb(218, 89, 69), rgb(177, 26, 16)) !important; - -moz-border-radius: 3px; - -webkit-border-radius: 3px; border-radius: 3px; } @@ -111,9 +101,11 @@ bootstrap Overrides /* navbar styling */ .navbar-default { background-color: rgb(51, 51, 51); - filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#555555', endColorstr='#333333'); + filter: progid:dximagetransform.microsoft.gradient(startColorstr='#555555', endColorstr='#333333'); + /* stylelint-disable declaration-block-no-shorthand-property-overrides */ background: -webkit-gradient(linear, left top, left bottom, from(rgb(85, 85, 85)), to(rgb(51, 51, 51))); background: -moz-linear-gradient(top, rgb(85, 85, 85), rgb(51, 51, 51)); + /* stylelint-enable */ border-color: rgb(62, 63, 58); } @@ -130,13 +122,14 @@ bootstrap Overrides /* ssubmenu styling */ #sub-menu-container { background-color: rgb(85, 85, 85); - filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#555555', endColorstr='#555555'); + filter: progid:dximagetransform.microsoft.gradient(startColorstr='#555555', endColorstr='#555555'); + /* stylelint-disable declaration-block-no-shorthand-property-overrides */ background: -webkit-gradient(linear, left top, left bottom, from(rgb(85, 85, 85)), to(rgb(85, 85, 85))); background: -moz-linear-gradient(top, rgb(85, 85, 85), rgb(85, 85, 85)); + /* stylelint-enable */ border-color: rgb(62, 63, 58); } - .dropdown-menu > li > a:hover, .dropdown-menu > li > a:focus { color: rgb(255, 255, 255); @@ -166,11 +159,8 @@ tablesorter.css /* hidden filter row */ .tablesorter-filter-row.hideme td { - /*** *********************************************** ***/ - /*** change this padding to modify the thickness ***/ - /*** of the closed filter row (height = padding x 2) ***/ + /* Change the padding to modify the thickness of the closed filter row (height = padding x 2) */ padding: 2px; - /*** *********************************************** ***/ margin: 0; line-height: 0; cursor: pointer; @@ -182,8 +172,7 @@ tablesorter.css border: 0; padding: 0; margin: 0; - /* don't use visibility: hidden because it disables tabbing */ - opacity: 0; + opacity: 0; /* Don't use visibility: hidden because it disables tabbing */ filter: alpha(opacity=0); } @@ -207,10 +196,6 @@ bootstrap modal padding: 9px 15px; border-bottom: 1px solid rgb(238, 238, 238); background-color: rgb(245, 241, 228); - -webkit-border-top-left-radius: 5px; - -webkit-border-top-right-radius: 5px; - -moz-border-radius-topleft: 5px; - -moz-border-radius-topright: 5px; - border-top-left-radius: 5px; - border-top-right-radius: 5px; - } + border-top-left-radius: 5px; + border-top-right-radius: 5px; +} diff --git a/static/css/style.css b/static/css/style.css index bc7f06791c..02fe620268 100644 --- a/static/css/style.css +++ b/static/css/style.css @@ -162,8 +162,6 @@ inc_top.mako padding-left: 0; background: transparent; border-width: 0; - -moz-border-radius: 0; - -webkit-border-radius: 0; border-radius: 0; } @@ -201,7 +199,6 @@ inc_top.mako } .upgrade-notification { - /*width: 600px;*/ text-align: center; margin-top: 20px; display: inline-block; @@ -431,11 +428,10 @@ inc_rootDirs.mako history.mako ========================================================================== */ .layout-controls { - padding-top:24px; + padding-top: 24px; } -.fanartOpacity -{ +.fanartOpacity { opacity: 0.9; } @@ -536,7 +532,7 @@ div.ellipsis { white-space: nowrap; overflow: hidden; text-overflow: ellipsis; - width: 85% + width: 85%; } div.xem { @@ -578,11 +574,11 @@ div.xem { } .aligner { - display: flex; - align-items: center; + display: flex; + align-items: center; } -/*FF hack*/ +/* FF hack */ @-moz-document url-prefix() { .aligner { flex-flow: column-reverse; @@ -590,10 +586,10 @@ div.xem { } } -/*IE 11 hack*/ -@media all and (-ms-high-contrast:none) { +/* IE 11 hack */ +@media all and (-ms-high-contrast: none) { .poster-overlay { - left: 0px; + left: 0; top: 0; width: 100%; } @@ -609,7 +605,6 @@ div.xem { max-width: 100%; overflow: hidden; border: 1px solid rgb(136, 136, 136); - } /* Used by the displayShow and snatchSelection show header posters */ @@ -1075,13 +1070,14 @@ displayShow.mako ========================================================================== */ .modal-wide .modal-dialog { - width: 90%; + width: 90%; } -.modal-dialog{ - overflow-y: initial !important +.modal-dialog { + overflow-y: initial !important; } -.modal-body{ + +.modal-body { overflow-y: auto; } @@ -1104,15 +1100,15 @@ div#container-display-show { } #showSelector { - height:31px; + height: 31px; display: table-cell; - left:20px; + left: 20px; margin-top: 5px; margin-bottom: 5px; } .show-info-container { - overflow:hidden; + overflow: hidden; display: table-cell; } @@ -1123,7 +1119,7 @@ div#container-display-show { .container-navShow { margin-top: -2px; margin-left: 305px; - z-index:10; + z-index: 10; } .tvshowImg { @@ -1136,8 +1132,8 @@ div#container-display-show { @media (min-width: 1281px) { #sub-menu-container { - position:fixed; - width:100%; + position: fixed; + width: 100%; top: 51px; } } @@ -1185,7 +1181,7 @@ div#container-display-show { @media (max-width: 767px) { #showSelector { - left:5px; + left: 5px; margin-bottom: 5px; } @@ -1219,7 +1215,7 @@ div#container-display-show { .show-poster-container img { display: block; margin: 0 auto; - max-width: 280px!important; + max-width: 280px !important; } .tvshowImg { @@ -1235,11 +1231,11 @@ div#container-display-show { padding-top: 5px; width: 100%; } - } @media (min-width: 768px) { - .display-specials, .display-seasons { + .display-specials, + .display-seasons { top: -60px; } @@ -1252,7 +1248,6 @@ div#container-display-show { .tvshowImg { max-height: 245px; } - } @media (min-width: 992px) { @@ -1309,8 +1304,8 @@ div#container-display-show { } .navShow { - width:18px; - max-height:15px; + width: 18px; + max-height: 15px; display: -webkit-box; cursor: pointer; } @@ -1403,7 +1398,6 @@ ul.tags li a { #summary { margin-top: 5px; padding: 10px; - /*border: 1px solid #DFDEDE;*/ width: 100%; overflow: auto; cursor: default; @@ -1411,25 +1405,25 @@ ul.tags li a { #summaryBackground { position: absolute; - top:305px; + top: 305px; width: 100%; - height:200px; - background-color: #EFEFEF; + height: 200px; + background-color: #efefef; opacity: 0.8; border-width: 1px; - border-style: solid none solid none; + border-style: solid none; border-color: rgb(136, 136, 136); } #checkboxControlsBackground { position: absolute; - top: 0px; + top: 0; width: 100%; - height: 0px; - background-color: #DDD; + height: 0; + background-color: #ddd; opacity: 0.8; border-width: 1px; - border-style: solid none solid none; + border-style: solid none; border-color: rgb(136, 136, 136); z-index: -2; } @@ -1482,11 +1476,11 @@ ul.tags li a { } .downloaded { -background-color: rgb(195, 227, 200); + background-color: rgb(195, 227, 200); } .failed { -background-color: rgb(255, 153, 153); + background-color: rgb(255, 153, 153); } span.unaired { @@ -1777,12 +1771,11 @@ div.season-scene-exception { } select#select-show { - padding-top:5px; + padding-top: 5px; display: inline-block; width: 100%; } - /* ======================================================================= schedule.mako ========================================================================== */ @@ -1792,7 +1785,7 @@ schedule.mako } .listing-key { - padding: 2px 5px 2px 5px; + padding: 2px 5px; font-size: 13px; font-weight: bold; border-radius: 5px; @@ -1854,7 +1847,6 @@ h2.network { margin: auto; padding: 0; text-align: left; - /*width: 750px;*/ border-radius: 5px; background: rgb(255, 255, 255); cursor: default; @@ -1984,7 +1976,6 @@ config*.mako #config-content { display: block; - /*width: 960px;*/ padding: 0 0 40px; margin: 0 auto; clear: both; @@ -2571,44 +2562,43 @@ span.path { } .align-left { - text-align: left!important; + text-align: left !important; } .h2footer { - /*margin: -45px 0 8px;*/ line-height: 18px; clear: both; } span.quality { font: 12px/13px "Open Sans", verdana, sans-serif; - background-image:-webkit-linear-gradient(top, rgba(255,255,255,0.08),rgba(255,255,255,0) 50%,rgba(0,0,0,0) 50%,rgba(0,0,0,0.25)); - background-image:-moz-linear-gradient(top, rgba(255,255,255,0.08),rgba(255,255,255,0) 50%,rgba(0,0,0,0) 50%,rgba(0,0,0,0.25)); - background-image:-o-linear-gradient(top, rgba(255,255,255,0.08),rgba(255,255,255,0) 50%,rgba(0,0,0,0) 50%,rgba(0,0,0,0.25)); - background-image:linear-gradient(to bottom, rgba(255,255,255,0.08),rgba(255,255,255,0) 50%,rgba(0,0,0,0) 50%,rgba(0,0,0,0.25)); - -webkit-box-shadow:inset 0 1px rgba(255,255,255,0.1),inset 0 -1px 3px rgba(0,0,0,0.3),inset 0 0 0 1px rgba(255,255,255,0.08),0 1px 2px rgba(0,0,0,0.15); - box-shadow:inset 0 1px rgba(255,255,255,0.1),inset 0 -1px 3px rgba(0,0,0,0.3),inset 0 0 0 1px rgba(255,255,255,0.08),0 1px 2px rgba(0,0,0,0.15); + background-image: -webkit-linear-gradient(top, rgba(255, 255, 255, 0.08), rgba(255, 255, 255, 0) 50%, rgba(0, 0, 0, 0) 50%, rgba(0, 0, 0, 0.25)); + background-image: -moz-linear-gradient(top, rgba(255, 255, 255, 0.08), rgba(255, 255, 255, 0) 50%, rgba(0, 0, 0, 0) 50%, rgba(0, 0, 0, 0.25)); + background-image: -o-linear-gradient(top, rgba(255, 255, 255, 0.08), rgba(255, 255, 255, 0) 50%, rgba(0, 0, 0, 0) 50%, rgba(0, 0, 0, 0.25)); + background-image: linear-gradient(to bottom, rgba(255, 255, 255, 0.08), rgba(255, 255, 255, 0) 50%, rgba(0, 0, 0, 0) 50%, rgba(0, 0, 0, 0.25)); + box-shadow: inset 0 1px rgba(255, 255, 255, 0.1), inset 0 -1px 3px rgba(0, 0, 0, 0.3), inset 0 0 0 1px rgba(255, 255, 255, 0.08), 0 1px 2px rgba(0, 0, 0, 0.15); text-shadow: 0 1px rgba(0, 0, 0, 0.8); color: rgb(255, 255, 255); display: inline-block; padding: 2px 4px; text-align: center; vertical-align: baseline; - -webkit-border-radius: 4px; - -moz-border-radius: 4px; border-radius: 4px; white-space: nowrap; } span.any-hd { background-color: rgb(38, 114, 182); - background: repeating-linear-gradient( + /* stylelint-disable declaration-block-no-shorthand-property-overrides */ + background: + repeating-linear-gradient( -45deg, rgb(38, 114, 182), rgb(38, 114, 182) 10px, rgb(91, 153, 13) 10px, rgb(91, 153, 13) 20px ); + /* stylelint-enable */ } span.Custom { @@ -2767,6 +2757,14 @@ div.blackwhitelist.manual { margin: 7px 0; } +ul.simpleList { + padding-left: 0px; +} + +ul.simpleList li { + list-style-type: none; +} + /* ======================================================================= bootstrap Overrides ========================================================================== */ @@ -2941,9 +2939,7 @@ fieldset[disabled] .navbar-default .btn-link:focus { .dropdown-menu-custom > li > a { padding: 15px 36px 15px 5px; - border-bottom-style: solid; - border-bottom-width: 1px; - border-bottom-color: darkgrey; + border-bottom: 1px solid darkgrey; } .dropdown-menu-custom > li:last-child > a { @@ -2978,26 +2974,26 @@ fieldset[disabled] .navbar-default .btn-link:focus { } #sub-menu-container { - z-index:550; + z-index: 550; min-height: 41px; } .shadow { - box-shadow: 3px 3px 3px rgba(17,17,17,0.5); + box-shadow: 3px 3px 3px rgba(17, 17, 17, 0.5); } .border-bottom { border-width: 1px; - border-style: none none solid none; + border-style: none none solid; border-color: rgb(62, 63, 58); } .btn { display: inline-block; *display: inline; - padding: 4px 10px 4px; + padding: 4px 10px; margin-bottom: 0; - *margin-left: .3em; + *margin-left: 0.3em; font-size: 12px; line-height: 16px; *line-height: 20px; @@ -3012,7 +3008,7 @@ fieldset[disabled] .navbar-default .btn-link:focus { background-image: -webkit-gradient(linear, 0 0, 0 100%, from(rgb(255, 255, 255)), to(rgb(230, 230, 230))); background-image: -webkit-linear-gradient(top, rgb(255, 255, 255), rgb(230, 230, 230)); background-image: -o-linear-gradient(top, rgb(255, 255, 255), rgb(230, 230, 230)); - background-image: linear-gradient(top, rgb(255, 255, 255), rgb(230, 230, 230)); + background-image: linear-gradient(to top, rgb(255, 255, 255), rgb(230, 230, 230)); background-image: -moz-linear-gradient(top, rgb(255, 255, 255), rgb(230, 230, 230)); background-repeat: repeat-x; border: 1px solid rgb(204, 204, 204); @@ -3026,8 +3022,6 @@ fieldset[disabled] .navbar-default .btn-link:focus { filter: progid:dximagetransform.microsoft.gradient(startColorstr='#ffffff', endColorstr='#e6e6e6', GradientType=0); filter: progid:dximagetransform.microsoft.gradient(enabled=false); *zoom: 1; - -webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05); - -moz-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05); box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05); } @@ -3074,8 +3068,6 @@ fieldset[disabled] .navbar-default .btn-link:focus { background-color: rgb(217, 217, 217) \9; background-image: none; outline: 0; - -webkit-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); - -moz-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); } @@ -3086,8 +3078,6 @@ fieldset[disabled] .navbar-default .btn-link:focus { background-image: none; opacity: 0.65; filter: alpha(opacity=65); - -webkit-box-shadow: none; - -moz-box-shadow: none; box-shadow: none; } @@ -3095,8 +3085,6 @@ fieldset[disabled] .navbar-default .btn-link:focus { padding: 9px 14px; font-size: 15px; line-height: normal; - -webkit-border-radius: 5px; - -moz-border-radius: 5px; border-radius: 1px; } @@ -3153,7 +3141,7 @@ fieldset[disabled] .navbar-default .btn-link:focus { background-image: -webkit-linear-gradient(top, rgb(0, 136, 204), rgb(0, 85, 204)); background-image: -o-linear-gradient(top, rgb(0, 136, 204), rgb(0, 85, 204)); background-image: -moz-linear-gradient(top, rgb(0, 136, 204), rgb(0, 85, 204)); - background-image: linear-gradient(top, rgb(0, 136, 204), rgb(0, 85, 204)); + background-image: linear-gradient(to top, rgb(0, 136, 204), rgb(0, 85, 204)); background-repeat: repeat-x; border-color: rgb(0, 85, 204) rgb(0, 85, 204) rgb(0, 53, 128); border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); @@ -3183,7 +3171,7 @@ fieldset[disabled] .navbar-default .btn-link:focus { background-image: -webkit-linear-gradient(top, rgb(251, 180, 80), rgb(248, 148, 6)); background-image: -o-linear-gradient(top, rgb(251, 180, 80), rgb(248, 148, 6)); background-image: -moz-linear-gradient(top, rgb(251, 180, 80), rgb(248, 148, 6)); - background-image: linear-gradient(top, rgb(251, 180, 80), rgb(248, 148, 6)); + background-image: linear-gradient(to top, rgb(251, 180, 80), rgb(248, 148, 6)); background-repeat: repeat-x; border-color: rgb(248, 148, 6) rgb(248, 148, 6) rgb(173, 103, 4); border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); @@ -3213,7 +3201,7 @@ fieldset[disabled] .navbar-default .btn-link:focus { background-image: -webkit-linear-gradient(top, rgb(238, 95, 91), rgb(189, 54, 47)); background-image: -o-linear-gradient(top, rgb(238, 95, 91), rgb(189, 54, 47)); background-image: -moz-linear-gradient(top, rgb(238, 95, 91), rgb(189, 54, 47)); - background-image: linear-gradient(top, rgb(238, 95, 91), rgb(189, 54, 47)); + background-image: linear-gradient(to top, rgb(238, 95, 91), rgb(189, 54, 47)); background-repeat: repeat-x; border-color: rgb(189, 54, 47) rgb(189, 54, 47) rgb(128, 36, 32); border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); @@ -3243,7 +3231,7 @@ fieldset[disabled] .navbar-default .btn-link:focus { background-image: -webkit-linear-gradient(top, rgb(98, 196, 98), rgb(81, 163, 81)); background-image: -o-linear-gradient(top, rgb(98, 196, 98), rgb(81, 163, 81)); background-image: -moz-linear-gradient(top, rgb(98, 196, 98), rgb(81, 163, 81)); - background-image: linear-gradient(top, rgb(98, 196, 98), rgb(81, 163, 81)); + background-image: linear-gradient(to top, rgb(98, 196, 98), rgb(81, 163, 81)); background-repeat: repeat-x; border-color: rgb(81, 163, 81) rgb(81, 163, 81) rgb(56, 112, 56); border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); @@ -3273,7 +3261,7 @@ fieldset[disabled] .navbar-default .btn-link:focus { background-image: -webkit-linear-gradient(top, rgb(91, 192, 222), rgb(47, 150, 180)); background-image: -o-linear-gradient(top, rgb(91, 192, 222), rgb(47, 150, 180)); background-image: -moz-linear-gradient(top, rgb(91, 192, 222), rgb(47, 150, 180)); - background-image: linear-gradient(top, rgb(91, 192, 222), rgb(47, 150, 180)); + background-image: linear-gradient(to top, rgb(91, 192, 222), rgb(47, 150, 180)); background-repeat: repeat-x; border-color: rgb(47, 150, 180) rgb(47, 150, 180) rgb(31, 99, 119); border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); @@ -3303,7 +3291,7 @@ fieldset[disabled] .navbar-default .btn-link:focus { background-image: -webkit-linear-gradient(top, rgb(85, 85, 85), rgb(34, 34, 34)); background-image: -o-linear-gradient(top, rgb(85, 85, 85), rgb(34, 34, 34)); background-image: -moz-linear-gradient(top, rgb(85, 85, 85), rgb(34, 34, 34)); - background-image: linear-gradient(top, rgb(85, 85, 85), rgb(34, 34, 34)); + background-image: linear-gradient(to top, rgb(85, 85, 85), rgb(34, 34, 34)); background-repeat: repeat-x; border-color: rgb(34, 34, 34) rgb(34, 34, 34) rgb(0, 0, 0); border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); @@ -3468,9 +3456,7 @@ div.formpaginate .next { cursor: hand; cursor: pointer; background: rgb(87, 68, 43); - -webkit-border-radius: 6px; - -moz-border-radius: 6px; - border-radius: 6px; + border-radius: 6px; } .stepDiv { @@ -3498,7 +3484,6 @@ div.formpaginate .next { font-size: 12px; font-weight: normal; display: block; - /*width: 475px;*/ margin-left: 182px; } @@ -3860,12 +3845,11 @@ jquery.confirm.css -webkit-box-sizing: border-box; -moz-box-sizing: border-box; box-sizing: border-box; - background-image: -webkit-linear-gradient(top, rgba(255,255,255,0.08),rgba(255,255,255,0) 50%,rgba(0,0,0,0) 51%,rgba(0,0,0,0.25)); - background-image: -moz-linear-gradient(top, rgba(255,255,255,0.08),rgba(255,255,255,0) 50%,rgba(0,0,0,0) 51%,rgba(0,0,0,0.25)); - background-image: -o-linear-gradient(top, rgba(255,255,255,0.08),rgba(255,255,255,0) 50%,rgba(0,0,0,0) 51%,rgba(0,0,0,0.25)); - background-image: linear-gradient(to bottom, rgba(255,255,255,0.08),rgba(255,255,255,0) 50%,rgba(0,0,0,0) 51%,rgba(0,0,0,0.25)); - -webkit-box-shadow: inset 0 1px rgba(255,255,255,0.1),inset 0 -1px 3px rgba(0,0,0,0.3),inset 0 0 0 1px rgba(255,255,255,0.08),0 1px 2px rgba(0,0,0,0.15); - box-shadow: inset 0 1px rgba(255,255,255,0.1),inset 0 -1px 3px rgba(0,0,0,0.3),inset 0 0 0 1px rgba(255,255,255,0.08),0 1px 2px rgba(0,0,0,0.15); + background-image: -webkit-linear-gradient(top, rgba(255, 255, 255, 0.08), rgba(255, 255, 255, 0) 50%, rgba(0, 0, 0, 0) 51%, rgba(0, 0, 0, 0.25)); + background-image: -moz-linear-gradient(top, rgba(255, 255, 255, 0.08), rgba(255, 255, 255, 0) 50%, rgba(0, 0, 0, 0) 51%, rgba(0, 0, 0, 0.25)); + background-image: -o-linear-gradient(top, rgba(255, 255, 255, 0.08), rgba(255, 255, 255, 0) 50%, rgba(0, 0, 0, 0) 51%, rgba(0, 0, 0, 0.25)); + background-image: linear-gradient(to bottom, rgba(255, 255, 255, 0.08), rgba(255, 255, 255, 0) 50%, rgba(0, 0, 0, 0) 51%, rgba(0, 0, 0, 0.25)); + box-shadow: inset 0 1px rgba(255, 255, 255, 0.1), inset 0 -1px 3px rgba(0, 0, 0, 0.3), inset 0 0 0 1px rgba(255, 255, 255, 0.08), 0 1px 2px rgba(0, 0, 0, 0.15); } #confirmBox .button:last-child { @@ -4042,11 +4026,11 @@ div#searchNotification { } .left-30 { - margin-left:-30px; + margin-left: -30px; } .right-30 { - margin-right:-30px; + margin-right: -30px; } .vMiddle { @@ -4080,37 +4064,37 @@ div#searchNotification { #snatchhistory.tablesorter tbody:nth-child(2) th:nth-child(1) { width: 15%; - text-align: center!important; + text-align: center !important; } #snatchhistory.tablesorter tbody:nth-child(2) th:nth-child(2) { width: 15%; - text-align: center!important; + text-align: center !important; } #snatchhistory.tablesorter tbody:nth-child(2) th:nth-child(3) { width: 15%; - text-align: center!important; + text-align: center !important; } #snatchhistory.tablesorter tbody:nth-child(2) th:nth-child(4) { width: 52%; - text-align: center!important; + text-align: center !important; } #snatchhistory.tablesorter td:nth-child(1) { width: 15%; - text-align: center!important; + text-align: center !important; } #snatchhistory.tablesorter td:nth-child(2) { width: 18%; - text-align: center!important; + text-align: center !important; } #snatchhistory.tablesorter td:nth-child(3) { width: 15%; - text-align: center!important; + text-align: center !important; } #snatchhistory.tablesorter td:nth-child(4) { @@ -4123,7 +4107,7 @@ div#searchNotification { #srchresults.tablesorter td:nth-child(n+2) { width: 4%; - text-align:center; + text-align: center; } .summaryTable tr td { @@ -4186,9 +4170,8 @@ div.notepad img { } /* ======================================================================== -css helper classes -===========================================================================*/ -/* Some css helper classes */ +CSS helper classes +=========================================================================== */ .top-5 { margin-top: 5px; @@ -4242,34 +4225,37 @@ css helper classes /* sub-menu */ -@media screen and (min-width: 0px) and (max-width: 768px) { - .full-width { display: none; } /* show it on small screens */ - .mobile { display: block;} - .mobile .ui-icon {display: inline-block;} - .dropdown-menu-custom > li > a { - padding: 15px 36px 15px 5px; - border-bottom-style: solid; - border-bottom-width: 1px; - } - .mobile > ul > li > a { - padding-top: 15px; - padding-bottom: 15px; - padding-left: 8px; - } - .mobile .img-align { - width: 25px; - float: left; - } +@media screen and (min-width: 0) and (max-width: 768px) { + .full-width { display: none; } /* show it on small screens */ + .mobile { display: block; } + .mobile .ui-icon { display: inline-block; } + + .dropdown-menu-custom > li > a { + padding: 15px 36px 15px 5px; + border-bottom-style: solid; + border-bottom-width: 1px; + } + + .mobile > ul > li > a { + padding-top: 15px; + padding-bottom: 15px; + padding-left: 8px; + } + + .mobile .img-align { + width: 25px; + float: left; + } } @media screen and (min-width: 769px) { - .full-width {display: block; } /* show it on small screens */ - .mobile {display: none;} + .full-width { display: block; } /* show it on small screens */ + .mobile { display: none; } } /* navbar padding, to make room for in-page scrollbars */ .padding-right-15 { - padding-right: 15px!important; + padding-right: 15px !important; } .backlog-quality { @@ -4283,7 +4269,58 @@ css helper classes @media (max-width: 1700px) { #key-padding { - margin-left: 0px; + margin-left: 0; margin-right: 5px; } } + +.triggerhighlight { + cursor: default; +} + +/* ======================================================================= + Scroll to top +========================================================================== */ +.scroll-top-wrapper { + position: fixed; + opacity: 0; + visibility: hidden; + overflow: hidden; + text-align: center; + font-size: 20px; + z-index: 99999999; + background-color: #777; + color: #eee; + width: 50px; + height: 48px; + line-height: 48px; + right: 30px; + bottom: 30px; + padding-top: 2px; + border-radius: 10px; + -webkit-transition: all 0.5s ease-in-out; + -moz-transition: all 0.5s ease-in-out; + -ms-transition: all 0.5s ease-in-out; + -o-transition: all 0.5s ease-in-out; + transition: all 0.5s ease-in-out; +} + +.scroll-top-wrapper:hover { + background-color: #888; +} + +.scroll-top-wrapper.show { + visibility: visible; + cursor: pointer; + opacity: 1; +} + +.scroll-top-wrapper i.fa { + line-height: inherit; +} + +/* CSS for Nav dropdown hover +.dropdown:hover .dropdown-menu { + display: block; +} +*/ diff --git a/static/css/vender.min.css b/static/css/vender.min.css index 42c2ca9948..b91cfa5652 100644 --- a/static/css/vender.min.css +++ b/static/css/vender.min.css @@ -2,4 +2,4 @@ * Bootstrap v3.3.7 (http://getbootstrap.com) * Copyright 2011-2016 Twitter, Inc. * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) - *//*! normalize.css v3.0.3 | MIT License | github.com/necolas/normalize.css */.label,sub,sup{vertical-align:baseline}.btn,.btn-group,.btn-group-vertical,.caret,.checkbox-inline,.radio-inline,img{vertical-align:middle}hr,img{border:0}body,figure{margin:0}.btn-group>.btn-group,.btn-toolbar .btn,.btn-toolbar .btn-group,.btn-toolbar .input-group,.col-xs-1,.col-xs-10,.col-xs-11,.col-xs-12,.col-xs-2,.col-xs-3,.col-xs-4,.col-xs-5,.col-xs-6,.col-xs-7,.col-xs-8,.col-xs-9,.dropdown-menu{float:left}.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse,.pre-scrollable{max-height:340px}html{font-family:sans-serif;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}article,aside,details,figcaption,figure,footer,header,hgroup,main,menu,nav,section,summary{display:block}audio,canvas,progress,video{display:inline-block;vertical-align:baseline}audio:not([controls]){display:none;height:0}[hidden],template{display:none}a{background-color:transparent}a:active,a:hover{outline:0}b,optgroup,strong{font-weight:700}dfn{font-style:italic}h1{margin:.67em 0}mark{color:#000;background:#ff0}sub,sup{position:relative;font-size:75%;line-height:0}sup{top:-.5em}sub{bottom:-.25em}svg:not(:root){overflow:hidden}hr{height:0;-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box}pre,textarea{overflow:auto}code,kbd,pre,samp{font-size:1em}button,input,optgroup,select,textarea{margin:0;font:inherit;color:inherit}.glyphicon,address{font-style:normal}button{overflow:visible}button,select{text-transform:none}button,html input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer}button[disabled],html input[disabled]{cursor:default}button::-moz-focus-inner,input::-moz-focus-inner{padding:0;border:0}input[type=checkbox],input[type=radio]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:0}input[type=number]::-webkit-inner-spin-button,input[type=number]::-webkit-outer-spin-button{height:auto}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}table{border-spacing:0;border-collapse:collapse}td,th{padding:0}/*! Source: https://github.com/h5bp/html5-boilerplate/blob/master/src/css/main.css */@media print{blockquote,img,pre,tr{page-break-inside:avoid}*,:after,:before{color:#000!important;text-shadow:none!important;background:0 0!important;-webkit-box-shadow:none!important;box-shadow:none!important}a,a:visited{text-decoration:underline}a[href]:after{content:" (" attr(href) ")"}abbr[title]:after{content:" (" attr(title) ")"}a[href^="javascript:"]:after,a[href^="#"]:after{content:""}blockquote,pre{border:1px solid #999}thead{display:table-header-group}img{max-width:100%!important}h2,h3,p{orphans:3;widows:3}h2,h3{page-break-after:avoid}.navbar{display:none}.btn>.caret,.dropup>.btn>.caret{border-top-color:#000!important}.label{border:1px solid #000}.table{border-collapse:collapse!important}.table td,.table th{background-color:#fff!important}.table-bordered td,.table-bordered th{border:1px solid #ddd!important}}.dropdown-menu,.modal-content{-webkit-background-clip:padding-box}.btn,.btn-danger.active,.btn-danger:active,.btn-default.active,.btn-default:active,.btn-info.active,.btn-info:active,.btn-primary.active,.btn-primary:active,.btn-warning.active,.btn-warning:active,.btn.active,.btn:active,.dropdown-menu>.disabled>a:focus,.dropdown-menu>.disabled>a:hover,.form-control,.navbar-toggle,.open>.dropdown-toggle.btn-danger,.open>.dropdown-toggle.btn-default,.open>.dropdown-toggle.btn-info,.open>.dropdown-toggle.btn-primary,.open>.dropdown-toggle.btn-warning{background-image:none}.img-thumbnail,body{background-color:#fff}@font-face{font-family:'Glyphicons Halflings';src:url(../fonts/glyphicons-halflings-regular.eot);src:url(../fonts/glyphicons-halflings-regular.eot?#iefix) format('embedded-opentype'),url(../fonts/glyphicons-halflings-regular.woff2) format('woff2'),url(../fonts/glyphicons-halflings-regular.woff) format('woff'),url(../fonts/glyphicons-halflings-regular.ttf) format('truetype'),url(../fonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular) format('svg')}.glyphicon{position:relative;top:1px;display:inline-block;font-family:'Glyphicons Halflings';font-weight:400;line-height:1;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.glyphicon-asterisk:before{content:"\002a"}.glyphicon-plus:before{content:"\002b"}.glyphicon-eur:before,.glyphicon-euro:before{content:"\20ac"}.glyphicon-minus:before{content:"\2212"}.glyphicon-cloud:before{content:"\2601"}.glyphicon-envelope:before{content:"\2709"}.glyphicon-pencil:before{content:"\270f"}.glyphicon-glass:before{content:"\e001"}.glyphicon-music:before{content:"\e002"}.glyphicon-search:before{content:"\e003"}.glyphicon-heart:before{content:"\e005"}.glyphicon-star:before{content:"\e006"}.glyphicon-star-empty:before{content:"\e007"}.glyphicon-user:before{content:"\e008"}.glyphicon-film:before{content:"\e009"}.glyphicon-th-large:before{content:"\e010"}.glyphicon-th:before{content:"\e011"}.glyphicon-th-list:before{content:"\e012"}.glyphicon-ok:before{content:"\e013"}.glyphicon-remove:before{content:"\e014"}.glyphicon-zoom-in:before{content:"\e015"}.glyphicon-zoom-out:before{content:"\e016"}.glyphicon-off:before{content:"\e017"}.glyphicon-signal:before{content:"\e018"}.glyphicon-cog:before{content:"\e019"}.glyphicon-trash:before{content:"\e020"}.glyphicon-home:before{content:"\e021"}.glyphicon-file:before{content:"\e022"}.glyphicon-time:before{content:"\e023"}.glyphicon-road:before{content:"\e024"}.glyphicon-download-alt:before{content:"\e025"}.glyphicon-download:before{content:"\e026"}.glyphicon-upload:before{content:"\e027"}.glyphicon-inbox:before{content:"\e028"}.glyphicon-play-circle:before{content:"\e029"}.glyphicon-repeat:before{content:"\e030"}.glyphicon-refresh:before{content:"\e031"}.glyphicon-list-alt:before{content:"\e032"}.glyphicon-lock:before{content:"\e033"}.glyphicon-flag:before{content:"\e034"}.glyphicon-headphones:before{content:"\e035"}.glyphicon-volume-off:before{content:"\e036"}.glyphicon-volume-down:before{content:"\e037"}.glyphicon-volume-up:before{content:"\e038"}.glyphicon-qrcode:before{content:"\e039"}.glyphicon-barcode:before{content:"\e040"}.glyphicon-tag:before{content:"\e041"}.glyphicon-tags:before{content:"\e042"}.glyphicon-book:before{content:"\e043"}.glyphicon-bookmark:before{content:"\e044"}.glyphicon-print:before{content:"\e045"}.glyphicon-camera:before{content:"\e046"}.glyphicon-font:before{content:"\e047"}.glyphicon-bold:before{content:"\e048"}.glyphicon-italic:before{content:"\e049"}.glyphicon-text-height:before{content:"\e050"}.glyphicon-text-width:before{content:"\e051"}.glyphicon-align-left:before{content:"\e052"}.glyphicon-align-center:before{content:"\e053"}.glyphicon-align-right:before{content:"\e054"}.glyphicon-align-justify:before{content:"\e055"}.glyphicon-list:before{content:"\e056"}.glyphicon-indent-left:before{content:"\e057"}.glyphicon-indent-right:before{content:"\e058"}.glyphicon-facetime-video:before{content:"\e059"}.glyphicon-picture:before{content:"\e060"}.glyphicon-map-marker:before{content:"\e062"}.glyphicon-adjust:before{content:"\e063"}.glyphicon-tint:before{content:"\e064"}.glyphicon-edit:before{content:"\e065"}.glyphicon-share:before{content:"\e066"}.glyphicon-check:before{content:"\e067"}.glyphicon-move:before{content:"\e068"}.glyphicon-step-backward:before{content:"\e069"}.glyphicon-fast-backward:before{content:"\e070"}.glyphicon-backward:before{content:"\e071"}.glyphicon-play:before{content:"\e072"}.glyphicon-pause:before{content:"\e073"}.glyphicon-stop:before{content:"\e074"}.glyphicon-forward:before{content:"\e075"}.glyphicon-fast-forward:before{content:"\e076"}.glyphicon-step-forward:before{content:"\e077"}.glyphicon-eject:before{content:"\e078"}.glyphicon-chevron-left:before{content:"\e079"}.glyphicon-chevron-right:before{content:"\e080"}.glyphicon-plus-sign:before{content:"\e081"}.glyphicon-minus-sign:before{content:"\e082"}.glyphicon-remove-sign:before{content:"\e083"}.glyphicon-ok-sign:before{content:"\e084"}.glyphicon-question-sign:before{content:"\e085"}.glyphicon-info-sign:before{content:"\e086"}.glyphicon-screenshot:before{content:"\e087"}.glyphicon-remove-circle:before{content:"\e088"}.glyphicon-ok-circle:before{content:"\e089"}.glyphicon-ban-circle:before{content:"\e090"}.glyphicon-arrow-left:before{content:"\e091"}.glyphicon-arrow-right:before{content:"\e092"}.glyphicon-arrow-up:before{content:"\e093"}.glyphicon-arrow-down:before{content:"\e094"}.glyphicon-share-alt:before{content:"\e095"}.glyphicon-resize-full:before{content:"\e096"}.glyphicon-resize-small:before{content:"\e097"}.glyphicon-exclamation-sign:before{content:"\e101"}.glyphicon-gift:before{content:"\e102"}.glyphicon-leaf:before{content:"\e103"}.glyphicon-fire:before{content:"\e104"}.glyphicon-eye-open:before{content:"\e105"}.glyphicon-eye-close:before{content:"\e106"}.glyphicon-warning-sign:before{content:"\e107"}.glyphicon-plane:before{content:"\e108"}.glyphicon-calendar:before{content:"\e109"}.glyphicon-random:before{content:"\e110"}.glyphicon-comment:before{content:"\e111"}.glyphicon-magnet:before{content:"\e112"}.glyphicon-chevron-up:before{content:"\e113"}.glyphicon-chevron-down:before{content:"\e114"}.glyphicon-retweet:before{content:"\e115"}.glyphicon-shopping-cart:before{content:"\e116"}.glyphicon-folder-close:before{content:"\e117"}.glyphicon-folder-open:before{content:"\e118"}.glyphicon-resize-vertical:before{content:"\e119"}.glyphicon-resize-horizontal:before{content:"\e120"}.glyphicon-hdd:before{content:"\e121"}.glyphicon-bullhorn:before{content:"\e122"}.glyphicon-bell:before{content:"\e123"}.glyphicon-certificate:before{content:"\e124"}.glyphicon-thumbs-up:before{content:"\e125"}.glyphicon-thumbs-down:before{content:"\e126"}.glyphicon-hand-right:before{content:"\e127"}.glyphicon-hand-left:before{content:"\e128"}.glyphicon-hand-up:before{content:"\e129"}.glyphicon-hand-down:before{content:"\e130"}.glyphicon-circle-arrow-right:before{content:"\e131"}.glyphicon-circle-arrow-left:before{content:"\e132"}.glyphicon-circle-arrow-up:before{content:"\e133"}.glyphicon-circle-arrow-down:before{content:"\e134"}.glyphicon-globe:before{content:"\e135"}.glyphicon-wrench:before{content:"\e136"}.glyphicon-tasks:before{content:"\e137"}.glyphicon-filter:before{content:"\e138"}.glyphicon-briefcase:before{content:"\e139"}.glyphicon-fullscreen:before{content:"\e140"}.glyphicon-dashboard:before{content:"\e141"}.glyphicon-paperclip:before{content:"\e142"}.glyphicon-heart-empty:before{content:"\e143"}.glyphicon-link:before{content:"\e144"}.glyphicon-phone:before{content:"\e145"}.glyphicon-pushpin:before{content:"\e146"}.glyphicon-usd:before{content:"\e148"}.glyphicon-gbp:before{content:"\e149"}.glyphicon-sort:before{content:"\e150"}.glyphicon-sort-by-alphabet:before{content:"\e151"}.glyphicon-sort-by-alphabet-alt:before{content:"\e152"}.glyphicon-sort-by-order:before{content:"\e153"}.glyphicon-sort-by-order-alt:before{content:"\e154"}.glyphicon-sort-by-attributes:before{content:"\e155"}.glyphicon-sort-by-attributes-alt:before{content:"\e156"}.glyphicon-unchecked:before{content:"\e157"}.glyphicon-expand:before{content:"\e158"}.glyphicon-collapse-down:before{content:"\e159"}.glyphicon-collapse-up:before{content:"\e160"}.glyphicon-log-in:before{content:"\e161"}.glyphicon-flash:before{content:"\e162"}.glyphicon-log-out:before{content:"\e163"}.glyphicon-new-window:before{content:"\e164"}.glyphicon-record:before{content:"\e165"}.glyphicon-save:before{content:"\e166"}.glyphicon-open:before{content:"\e167"}.glyphicon-saved:before{content:"\e168"}.glyphicon-import:before{content:"\e169"}.glyphicon-export:before{content:"\e170"}.glyphicon-send:before{content:"\e171"}.glyphicon-floppy-disk:before{content:"\e172"}.glyphicon-floppy-saved:before{content:"\e173"}.glyphicon-floppy-remove:before{content:"\e174"}.glyphicon-floppy-save:before{content:"\e175"}.glyphicon-floppy-open:before{content:"\e176"}.glyphicon-credit-card:before{content:"\e177"}.glyphicon-transfer:before{content:"\e178"}.glyphicon-cutlery:before{content:"\e179"}.glyphicon-header:before{content:"\e180"}.glyphicon-compressed:before{content:"\e181"}.glyphicon-earphone:before{content:"\e182"}.glyphicon-phone-alt:before{content:"\e183"}.glyphicon-tower:before{content:"\e184"}.glyphicon-stats:before{content:"\e185"}.glyphicon-sd-video:before{content:"\e186"}.glyphicon-hd-video:before{content:"\e187"}.glyphicon-subtitles:before{content:"\e188"}.glyphicon-sound-stereo:before{content:"\e189"}.glyphicon-sound-dolby:before{content:"\e190"}.glyphicon-sound-5-1:before{content:"\e191"}.glyphicon-sound-6-1:before{content:"\e192"}.glyphicon-sound-7-1:before{content:"\e193"}.glyphicon-copyright-mark:before{content:"\e194"}.glyphicon-registration-mark:before{content:"\e195"}.glyphicon-cloud-download:before{content:"\e197"}.glyphicon-cloud-upload:before{content:"\e198"}.glyphicon-tree-conifer:before{content:"\e199"}.glyphicon-tree-deciduous:before{content:"\e200"}.glyphicon-cd:before{content:"\e201"}.glyphicon-save-file:before{content:"\e202"}.glyphicon-open-file:before{content:"\e203"}.glyphicon-level-up:before{content:"\e204"}.glyphicon-copy:before{content:"\e205"}.glyphicon-paste:before{content:"\e206"}.glyphicon-alert:before{content:"\e209"}.glyphicon-equalizer:before{content:"\e210"}.glyphicon-king:before{content:"\e211"}.glyphicon-queen:before{content:"\e212"}.glyphicon-pawn:before{content:"\e213"}.glyphicon-bishop:before{content:"\e214"}.glyphicon-knight:before{content:"\e215"}.glyphicon-baby-formula:before{content:"\e216"}.glyphicon-tent:before{content:"\26fa"}.glyphicon-blackboard:before{content:"\e218"}.glyphicon-bed:before{content:"\e219"}.glyphicon-apple:before{content:"\f8ff"}.glyphicon-erase:before{content:"\e221"}.glyphicon-hourglass:before{content:"\231b"}.glyphicon-lamp:before{content:"\e223"}.glyphicon-duplicate:before{content:"\e224"}.glyphicon-piggy-bank:before{content:"\e225"}.glyphicon-scissors:before{content:"\e226"}.glyphicon-bitcoin:before,.glyphicon-btc:before,.glyphicon-xbt:before{content:"\e227"}.glyphicon-jpy:before,.glyphicon-yen:before{content:"\00a5"}.glyphicon-rub:before,.glyphicon-ruble:before{content:"\20bd"}.glyphicon-scale:before{content:"\e230"}.glyphicon-ice-lolly:before{content:"\e231"}.glyphicon-ice-lolly-tasted:before{content:"\e232"}.glyphicon-education:before{content:"\e233"}.glyphicon-option-horizontal:before{content:"\e234"}.glyphicon-option-vertical:before{content:"\e235"}.glyphicon-menu-hamburger:before{content:"\e236"}.glyphicon-modal-window:before{content:"\e237"}.glyphicon-oil:before{content:"\e238"}.glyphicon-grain:before{content:"\e239"}.glyphicon-sunglasses:before{content:"\e240"}.glyphicon-text-size:before{content:"\e241"}.glyphicon-text-color:before{content:"\e242"}.glyphicon-text-background:before{content:"\e243"}.glyphicon-object-align-top:before{content:"\e244"}.glyphicon-object-align-bottom:before{content:"\e245"}.glyphicon-object-align-horizontal:before{content:"\e246"}.glyphicon-object-align-left:before{content:"\e247"}.glyphicon-object-align-vertical:before{content:"\e248"}.glyphicon-object-align-right:before{content:"\e249"}.glyphicon-triangle-right:before{content:"\e250"}.glyphicon-triangle-left:before{content:"\e251"}.glyphicon-triangle-bottom:before{content:"\e252"}.glyphicon-triangle-top:before{content:"\e253"}.glyphicon-console:before{content:"\e254"}.glyphicon-superscript:before{content:"\e255"}.glyphicon-subscript:before{content:"\e256"}.glyphicon-menu-left:before{content:"\e257"}.glyphicon-menu-right:before{content:"\e258"}.glyphicon-menu-down:before{content:"\e259"}.glyphicon-menu-up:before{content:"\e260"}*,:after,:before{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:10px;-webkit-tap-highlight-color:transparent}body{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;line-height:1.42857143;color:#333}button,input,select,textarea{font-family:inherit;font-size:inherit;line-height:inherit}a{color:#337ab7;text-decoration:none}a:focus,a:hover{color:#23527c;text-decoration:underline}a:focus{outline:-webkit-focus-ring-color auto 5px;outline-offset:-2px}.carousel-inner>.item>a>img,.carousel-inner>.item>img,.img-responsive,.thumbnail a>img,.thumbnail>img{display:block;max-width:100%;height:auto}.img-rounded{border-radius:6px}.img-thumbnail{display:inline-block;max-width:100%;height:auto;padding:4px;line-height:1.42857143;border:1px solid #ddd;border-radius:4px;-webkit-transition:all .2s ease-in-out;-o-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.img-circle{border-radius:50%}hr{margin-top:20px;margin-bottom:20px;border-top:1px solid #eee}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}[role=button]{cursor:pointer}.h1,.h2,.h3,.h4,.h5,.h6,h1,h2,h3,h4,h5,h6{font-family:inherit;font-weight:500;line-height:1.1;color:inherit}.h1 .small,.h1 small,.h2 .small,.h2 small,.h3 .small,.h3 small,.h4 .small,.h4 small,.h5 .small,.h5 small,.h6 .small,.h6 small,h1 .small,h1 small,h2 .small,h2 small,h3 .small,h3 small,h4 .small,h4 small,h5 .small,h5 small,h6 .small,h6 small{font-weight:400;line-height:1;color:#777}.h1,.h2,.h3,h1,h2,h3{margin-top:20px;margin-bottom:10px}.h1 .small,.h1 small,.h2 .small,.h2 small,.h3 .small,.h3 small,h1 .small,h1 small,h2 .small,h2 small,h3 .small,h3 small{font-size:65%}.h4,.h5,.h6,h4,h5,h6{margin-top:10px;margin-bottom:10px}.h4 .small,.h4 small,.h5 .small,.h5 small,.h6 .small,.h6 small,h4 .small,h4 small,h5 .small,h5 small,h6 .small,h6 small{font-size:75%}.h1,h1{font-size:36px}.h2,h2{font-size:30px}.h3,h3{font-size:24px}.h4,h4{font-size:18px}.h5,h5{font-size:14px}.h6,h6{font-size:12px}p{margin:0 0 10px}.lead{margin-bottom:20px;font-size:16px;font-weight:300;line-height:1.4}dt,kbd kbd,label{font-weight:700}address,blockquote .small,blockquote footer,blockquote small,dd,dt,pre{line-height:1.42857143}@media (min-width:768px){.lead{font-size:21px}}.small,small{font-size:85%}.mark,mark{padding:.2em;background-color:#fcf8e3}.list-inline,.list-unstyled{padding-left:0;list-style:none}.text-left{text-align:left}.text-right{text-align:right}.text-center{text-align:center}.text-justify{text-align:justify}.text-nowrap{white-space:nowrap}.text-lowercase{text-transform:lowercase}.text-uppercase{text-transform:uppercase}.text-capitalize{text-transform:capitalize}.text-muted{color:#777}.text-primary{color:#337ab7}a.text-primary:focus,a.text-primary:hover{color:#286090}.text-success{color:#3c763d}a.text-success:focus,a.text-success:hover{color:#2b542c}.text-info{color:#31708f}a.text-info:focus,a.text-info:hover{color:#245269}.text-warning{color:#8a6d3b}a.text-warning:focus,a.text-warning:hover{color:#66512c}.text-danger{color:#a94442}a.text-danger:focus,a.text-danger:hover{color:#843534}.bg-primary{color:#fff;background-color:#337ab7}a.bg-primary:focus,a.bg-primary:hover{background-color:#286090}.bg-success{background-color:#dff0d8}a.bg-success:focus,a.bg-success:hover{background-color:#c1e2b3}.bg-info{background-color:#d9edf7}a.bg-info:focus,a.bg-info:hover{background-color:#afd9ee}.bg-warning{background-color:#fcf8e3}a.bg-warning:focus,a.bg-warning:hover{background-color:#f7ecb5}.bg-danger{background-color:#f2dede}a.bg-danger:focus,a.bg-danger:hover{background-color:#e4b9b9}pre code,table{background-color:transparent}.page-header{padding-bottom:9px;margin:40px 0 20px;border-bottom:1px solid #eee}dl,ol,ul{margin-top:0}blockquote ol:last-child,blockquote p:last-child,blockquote ul:last-child,ol ol,ol ul,ul ol,ul ul{margin-bottom:0}address,dl{margin-bottom:20px}ol,ul{margin-bottom:10px}.list-inline{margin-left:-5px}.list-inline>li{display:inline-block;padding-right:5px;padding-left:5px}dd{margin-left:0}@media (min-width:768px){.dl-horizontal dt{float:left;width:160px;overflow:hidden;clear:left;text-align:right;text-overflow:ellipsis;white-space:nowrap}.dl-horizontal dd{margin-left:180px}.container{width:750px}}abbr[data-original-title],abbr[title]{cursor:help;border-bottom:1px dotted #777}.initialism{font-size:90%;text-transform:uppercase}blockquote{padding:10px 20px;margin:0 0 20px;font-size:17.5px;border-left:5px solid #eee}blockquote .small,blockquote footer,blockquote small{display:block;font-size:80%;color:#777}legend,pre{display:block;color:#333}blockquote .small:before,blockquote footer:before,blockquote small:before{content:'\2014 \00A0'}.blockquote-reverse,blockquote.pull-right{padding-right:15px;padding-left:0;text-align:right;border-right:5px solid #eee;border-left:0}code,kbd{padding:2px 4px;font-size:90%}caption,th{text-align:left}.blockquote-reverse .small:before,.blockquote-reverse footer:before,.blockquote-reverse small:before,blockquote.pull-right .small:before,blockquote.pull-right footer:before,blockquote.pull-right small:before{content:''}.blockquote-reverse .small:after,.blockquote-reverse footer:after,.blockquote-reverse small:after,blockquote.pull-right .small:after,blockquote.pull-right footer:after,blockquote.pull-right small:after{content:'\00A0 \2014'}code,kbd,pre,samp{font-family:Menlo,Monaco,Consolas,"Courier New",monospace}code{color:#c7254e;background-color:#f9f2f4;border-radius:4px}kbd{color:#fff;background-color:#333;border-radius:3px;-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,.25);box-shadow:inset 0 -1px 0 rgba(0,0,0,.25)}kbd kbd{padding:0;font-size:100%;-webkit-box-shadow:none;box-shadow:none}pre{padding:9.5px;margin:0 0 10px;font-size:13px;word-break:break-all;word-wrap:break-word;background-color:#f5f5f5;border:1px solid #ccc;border-radius:4px}.container,.container-fluid{margin-right:auto;margin-left:auto}pre code{padding:0;font-size:inherit;color:inherit;white-space:pre-wrap;border-radius:0}.container,.container-fluid{padding-right:15px;padding-left:15px}.pre-scrollable{overflow-y:scroll}@media (min-width:992px){.container{width:970px}}@media (min-width:1200px){.container{width:1170px}}.row{margin-right:-15px;margin-left:-15px}.col-lg-1,.col-lg-10,.col-lg-11,.col-lg-12,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9,.col-md-1,.col-md-10,.col-md-11,.col-md-12,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9,.col-sm-1,.col-sm-10,.col-sm-11,.col-sm-12,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-xs-1,.col-xs-10,.col-xs-11,.col-xs-12,.col-xs-2,.col-xs-3,.col-xs-4,.col-xs-5,.col-xs-6,.col-xs-7,.col-xs-8,.col-xs-9{position:relative;min-height:1px;padding-right:15px;padding-left:15px}.col-xs-12{width:100%}.col-xs-11{width:91.66666667%}.col-xs-10{width:83.33333333%}.col-xs-9{width:75%}.col-xs-8{width:66.66666667%}.col-xs-7{width:58.33333333%}.col-xs-6{width:50%}.col-xs-5{width:41.66666667%}.col-xs-4{width:33.33333333%}.col-xs-3{width:25%}.col-xs-2{width:16.66666667%}.col-xs-1{width:8.33333333%}.col-xs-pull-12{right:100%}.col-xs-pull-11{right:91.66666667%}.col-xs-pull-10{right:83.33333333%}.col-xs-pull-9{right:75%}.col-xs-pull-8{right:66.66666667%}.col-xs-pull-7{right:58.33333333%}.col-xs-pull-6{right:50%}.col-xs-pull-5{right:41.66666667%}.col-xs-pull-4{right:33.33333333%}.col-xs-pull-3{right:25%}.col-xs-pull-2{right:16.66666667%}.col-xs-pull-1{right:8.33333333%}.col-xs-pull-0{right:auto}.col-xs-push-12{left:100%}.col-xs-push-11{left:91.66666667%}.col-xs-push-10{left:83.33333333%}.col-xs-push-9{left:75%}.col-xs-push-8{left:66.66666667%}.col-xs-push-7{left:58.33333333%}.col-xs-push-6{left:50%}.col-xs-push-5{left:41.66666667%}.col-xs-push-4{left:33.33333333%}.col-xs-push-3{left:25%}.col-xs-push-2{left:16.66666667%}.col-xs-push-1{left:8.33333333%}.col-xs-push-0{left:auto}.col-xs-offset-12{margin-left:100%}.col-xs-offset-11{margin-left:91.66666667%}.col-xs-offset-10{margin-left:83.33333333%}.col-xs-offset-9{margin-left:75%}.col-xs-offset-8{margin-left:66.66666667%}.col-xs-offset-7{margin-left:58.33333333%}.col-xs-offset-6{margin-left:50%}.col-xs-offset-5{margin-left:41.66666667%}.col-xs-offset-4{margin-left:33.33333333%}.col-xs-offset-3{margin-left:25%}.col-xs-offset-2{margin-left:16.66666667%}.col-xs-offset-1{margin-left:8.33333333%}.col-xs-offset-0{margin-left:0}@media (min-width:768px){.col-sm-1,.col-sm-10,.col-sm-11,.col-sm-12,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9{float:left}.col-sm-12{width:100%}.col-sm-11{width:91.66666667%}.col-sm-10{width:83.33333333%}.col-sm-9{width:75%}.col-sm-8{width:66.66666667%}.col-sm-7{width:58.33333333%}.col-sm-6{width:50%}.col-sm-5{width:41.66666667%}.col-sm-4{width:33.33333333%}.col-sm-3{width:25%}.col-sm-2{width:16.66666667%}.col-sm-1{width:8.33333333%}.col-sm-pull-12{right:100%}.col-sm-pull-11{right:91.66666667%}.col-sm-pull-10{right:83.33333333%}.col-sm-pull-9{right:75%}.col-sm-pull-8{right:66.66666667%}.col-sm-pull-7{right:58.33333333%}.col-sm-pull-6{right:50%}.col-sm-pull-5{right:41.66666667%}.col-sm-pull-4{right:33.33333333%}.col-sm-pull-3{right:25%}.col-sm-pull-2{right:16.66666667%}.col-sm-pull-1{right:8.33333333%}.col-sm-pull-0{right:auto}.col-sm-push-12{left:100%}.col-sm-push-11{left:91.66666667%}.col-sm-push-10{left:83.33333333%}.col-sm-push-9{left:75%}.col-sm-push-8{left:66.66666667%}.col-sm-push-7{left:58.33333333%}.col-sm-push-6{left:50%}.col-sm-push-5{left:41.66666667%}.col-sm-push-4{left:33.33333333%}.col-sm-push-3{left:25%}.col-sm-push-2{left:16.66666667%}.col-sm-push-1{left:8.33333333%}.col-sm-push-0{left:auto}.col-sm-offset-12{margin-left:100%}.col-sm-offset-11{margin-left:91.66666667%}.col-sm-offset-10{margin-left:83.33333333%}.col-sm-offset-9{margin-left:75%}.col-sm-offset-8{margin-left:66.66666667%}.col-sm-offset-7{margin-left:58.33333333%}.col-sm-offset-6{margin-left:50%}.col-sm-offset-5{margin-left:41.66666667%}.col-sm-offset-4{margin-left:33.33333333%}.col-sm-offset-3{margin-left:25%}.col-sm-offset-2{margin-left:16.66666667%}.col-sm-offset-1{margin-left:8.33333333%}.col-sm-offset-0{margin-left:0}}@media (min-width:992px){.col-md-1,.col-md-10,.col-md-11,.col-md-12,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9{float:left}.col-md-12{width:100%}.col-md-11{width:91.66666667%}.col-md-10{width:83.33333333%}.col-md-9{width:75%}.col-md-8{width:66.66666667%}.col-md-7{width:58.33333333%}.col-md-6{width:50%}.col-md-5{width:41.66666667%}.col-md-4{width:33.33333333%}.col-md-3{width:25%}.col-md-2{width:16.66666667%}.col-md-1{width:8.33333333%}.col-md-pull-12{right:100%}.col-md-pull-11{right:91.66666667%}.col-md-pull-10{right:83.33333333%}.col-md-pull-9{right:75%}.col-md-pull-8{right:66.66666667%}.col-md-pull-7{right:58.33333333%}.col-md-pull-6{right:50%}.col-md-pull-5{right:41.66666667%}.col-md-pull-4{right:33.33333333%}.col-md-pull-3{right:25%}.col-md-pull-2{right:16.66666667%}.col-md-pull-1{right:8.33333333%}.col-md-pull-0{right:auto}.col-md-push-12{left:100%}.col-md-push-11{left:91.66666667%}.col-md-push-10{left:83.33333333%}.col-md-push-9{left:75%}.col-md-push-8{left:66.66666667%}.col-md-push-7{left:58.33333333%}.col-md-push-6{left:50%}.col-md-push-5{left:41.66666667%}.col-md-push-4{left:33.33333333%}.col-md-push-3{left:25%}.col-md-push-2{left:16.66666667%}.col-md-push-1{left:8.33333333%}.col-md-push-0{left:auto}.col-md-offset-12{margin-left:100%}.col-md-offset-11{margin-left:91.66666667%}.col-md-offset-10{margin-left:83.33333333%}.col-md-offset-9{margin-left:75%}.col-md-offset-8{margin-left:66.66666667%}.col-md-offset-7{margin-left:58.33333333%}.col-md-offset-6{margin-left:50%}.col-md-offset-5{margin-left:41.66666667%}.col-md-offset-4{margin-left:33.33333333%}.col-md-offset-3{margin-left:25%}.col-md-offset-2{margin-left:16.66666667%}.col-md-offset-1{margin-left:8.33333333%}.col-md-offset-0{margin-left:0}}@media (min-width:1200px){.col-lg-1,.col-lg-10,.col-lg-11,.col-lg-12,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9{float:left}.col-lg-12{width:100%}.col-lg-11{width:91.66666667%}.col-lg-10{width:83.33333333%}.col-lg-9{width:75%}.col-lg-8{width:66.66666667%}.col-lg-7{width:58.33333333%}.col-lg-6{width:50%}.col-lg-5{width:41.66666667%}.col-lg-4{width:33.33333333%}.col-lg-3{width:25%}.col-lg-2{width:16.66666667%}.col-lg-1{width:8.33333333%}.col-lg-pull-12{right:100%}.col-lg-pull-11{right:91.66666667%}.col-lg-pull-10{right:83.33333333%}.col-lg-pull-9{right:75%}.col-lg-pull-8{right:66.66666667%}.col-lg-pull-7{right:58.33333333%}.col-lg-pull-6{right:50%}.col-lg-pull-5{right:41.66666667%}.col-lg-pull-4{right:33.33333333%}.col-lg-pull-3{right:25%}.col-lg-pull-2{right:16.66666667%}.col-lg-pull-1{right:8.33333333%}.col-lg-pull-0{right:auto}.col-lg-push-12{left:100%}.col-lg-push-11{left:91.66666667%}.col-lg-push-10{left:83.33333333%}.col-lg-push-9{left:75%}.col-lg-push-8{left:66.66666667%}.col-lg-push-7{left:58.33333333%}.col-lg-push-6{left:50%}.col-lg-push-5{left:41.66666667%}.col-lg-push-4{left:33.33333333%}.col-lg-push-3{left:25%}.col-lg-push-2{left:16.66666667%}.col-lg-push-1{left:8.33333333%}.col-lg-push-0{left:auto}.col-lg-offset-12{margin-left:100%}.col-lg-offset-11{margin-left:91.66666667%}.col-lg-offset-10{margin-left:83.33333333%}.col-lg-offset-9{margin-left:75%}.col-lg-offset-8{margin-left:66.66666667%}.col-lg-offset-7{margin-left:58.33333333%}.col-lg-offset-6{margin-left:50%}.col-lg-offset-5{margin-left:41.66666667%}.col-lg-offset-4{margin-left:33.33333333%}.col-lg-offset-3{margin-left:25%}.col-lg-offset-2{margin-left:16.66666667%}.col-lg-offset-1{margin-left:8.33333333%}.col-lg-offset-0{margin-left:0}}caption{padding-top:8px;padding-bottom:8px;color:#777}.table{width:100%;max-width:100%;margin-bottom:20px}.table>tbody>tr>td,.table>tbody>tr>th,.table>tfoot>tr>td,.table>tfoot>tr>th,.table>thead>tr>td,.table>thead>tr>th{padding:8px;line-height:1.42857143;vertical-align:top;border-top:1px solid #ddd}.table>thead>tr>th{vertical-align:bottom;border-bottom:2px solid #ddd}.table>caption+thead>tr:first-child>td,.table>caption+thead>tr:first-child>th,.table>colgroup+thead>tr:first-child>td,.table>colgroup+thead>tr:first-child>th,.table>thead:first-child>tr:first-child>td,.table>thead:first-child>tr:first-child>th{border-top:0}.table>tbody+tbody{border-top:2px solid #ddd}.table .table{background-color:#fff}.table-condensed>tbody>tr>td,.table-condensed>tbody>tr>th,.table-condensed>tfoot>tr>td,.table-condensed>tfoot>tr>th,.table-condensed>thead>tr>td,.table-condensed>thead>tr>th{padding:5px}.table-bordered,.table-bordered>tbody>tr>td,.table-bordered>tbody>tr>th,.table-bordered>tfoot>tr>td,.table-bordered>tfoot>tr>th,.table-bordered>thead>tr>td,.table-bordered>thead>tr>th{border:1px solid #ddd}.table-bordered>thead>tr>td,.table-bordered>thead>tr>th{border-bottom-width:2px}.table-striped>tbody>tr:nth-of-type(odd){background-color:#f9f9f9}.table-hover>tbody>tr:hover,.table>tbody>tr.active>td,.table>tbody>tr.active>th,.table>tbody>tr>td.active,.table>tbody>tr>th.active,.table>tfoot>tr.active>td,.table>tfoot>tr.active>th,.table>tfoot>tr>td.active,.table>tfoot>tr>th.active,.table>thead>tr.active>td,.table>thead>tr.active>th,.table>thead>tr>td.active,.table>thead>tr>th.active{background-color:#f5f5f5}table col[class*=col-]{position:static;display:table-column;float:none}table td[class*=col-],table th[class*=col-]{position:static;display:table-cell;float:none}.table-hover>tbody>tr.active:hover>td,.table-hover>tbody>tr.active:hover>th,.table-hover>tbody>tr:hover>.active,.table-hover>tbody>tr>td.active:hover,.table-hover>tbody>tr>th.active:hover{background-color:#e8e8e8}.table>tbody>tr.success>td,.table>tbody>tr.success>th,.table>tbody>tr>td.success,.table>tbody>tr>th.success,.table>tfoot>tr.success>td,.table>tfoot>tr.success>th,.table>tfoot>tr>td.success,.table>tfoot>tr>th.success,.table>thead>tr.success>td,.table>thead>tr.success>th,.table>thead>tr>td.success,.table>thead>tr>th.success{background-color:#dff0d8}.table-hover>tbody>tr.success:hover>td,.table-hover>tbody>tr.success:hover>th,.table-hover>tbody>tr:hover>.success,.table-hover>tbody>tr>td.success:hover,.table-hover>tbody>tr>th.success:hover{background-color:#d0e9c6}.table>tbody>tr.info>td,.table>tbody>tr.info>th,.table>tbody>tr>td.info,.table>tbody>tr>th.info,.table>tfoot>tr.info>td,.table>tfoot>tr.info>th,.table>tfoot>tr>td.info,.table>tfoot>tr>th.info,.table>thead>tr.info>td,.table>thead>tr.info>th,.table>thead>tr>td.info,.table>thead>tr>th.info{background-color:#d9edf7}.table-hover>tbody>tr.info:hover>td,.table-hover>tbody>tr.info:hover>th,.table-hover>tbody>tr:hover>.info,.table-hover>tbody>tr>td.info:hover,.table-hover>tbody>tr>th.info:hover{background-color:#c4e3f3}.table>tbody>tr.warning>td,.table>tbody>tr.warning>th,.table>tbody>tr>td.warning,.table>tbody>tr>th.warning,.table>tfoot>tr.warning>td,.table>tfoot>tr.warning>th,.table>tfoot>tr>td.warning,.table>tfoot>tr>th.warning,.table>thead>tr.warning>td,.table>thead>tr.warning>th,.table>thead>tr>td.warning,.table>thead>tr>th.warning{background-color:#fcf8e3}.table-hover>tbody>tr.warning:hover>td,.table-hover>tbody>tr.warning:hover>th,.table-hover>tbody>tr:hover>.warning,.table-hover>tbody>tr>td.warning:hover,.table-hover>tbody>tr>th.warning:hover{background-color:#faf2cc}.table>tbody>tr.danger>td,.table>tbody>tr.danger>th,.table>tbody>tr>td.danger,.table>tbody>tr>th.danger,.table>tfoot>tr.danger>td,.table>tfoot>tr.danger>th,.table>tfoot>tr>td.danger,.table>tfoot>tr>th.danger,.table>thead>tr.danger>td,.table>thead>tr.danger>th,.table>thead>tr>td.danger,.table>thead>tr>th.danger{background-color:#f2dede}.table-hover>tbody>tr.danger:hover>td,.table-hover>tbody>tr.danger:hover>th,.table-hover>tbody>tr:hover>.danger,.table-hover>tbody>tr>td.danger:hover,.table-hover>tbody>tr>th.danger:hover{background-color:#ebcccc}.table-responsive{min-height:.01%;overflow-x:auto}@media screen and (max-width:767px){.table-responsive{width:100%;margin-bottom:15px;overflow-y:hidden;-ms-overflow-style:-ms-autohiding-scrollbar;border:1px solid #ddd}.table-responsive>.table{margin-bottom:0}.table-responsive>.table>tbody>tr>td,.table-responsive>.table>tbody>tr>th,.table-responsive>.table>tfoot>tr>td,.table-responsive>.table>tfoot>tr>th,.table-responsive>.table>thead>tr>td,.table-responsive>.table>thead>tr>th{white-space:nowrap}.table-responsive>.table-bordered{border:0}.table-responsive>.table-bordered>tbody>tr>td:first-child,.table-responsive>.table-bordered>tbody>tr>th:first-child,.table-responsive>.table-bordered>tfoot>tr>td:first-child,.table-responsive>.table-bordered>tfoot>tr>th:first-child,.table-responsive>.table-bordered>thead>tr>td:first-child,.table-responsive>.table-bordered>thead>tr>th:first-child{border-left:0}.table-responsive>.table-bordered>tbody>tr>td:last-child,.table-responsive>.table-bordered>tbody>tr>th:last-child,.table-responsive>.table-bordered>tfoot>tr>td:last-child,.table-responsive>.table-bordered>tfoot>tr>th:last-child,.table-responsive>.table-bordered>thead>tr>td:last-child,.table-responsive>.table-bordered>thead>tr>th:last-child{border-right:0}.table-responsive>.table-bordered>tbody>tr:last-child>td,.table-responsive>.table-bordered>tbody>tr:last-child>th,.table-responsive>.table-bordered>tfoot>tr:last-child>td,.table-responsive>.table-bordered>tfoot>tr:last-child>th{border-bottom:0}}fieldset,legend{padding:0;border:0}fieldset{min-width:0;margin:0}legend{width:100%;margin-bottom:20px;font-size:21px;line-height:inherit;border-bottom:1px solid #e5e5e5}label{display:inline-block;max-width:100%;margin-bottom:5px}input[type=search]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;-webkit-appearance:none}input[type=checkbox],input[type=radio]{margin:4px 0 0;margin-top:1px\9;line-height:normal}.form-control,output{font-size:14px;line-height:1.42857143;color:#555;display:block}input[type=file]{display:block}input[type=range]{display:block;width:100%}select[multiple],select[size]{height:auto}input[type=checkbox]:focus,input[type=file]:focus,input[type=radio]:focus{outline:-webkit-focus-ring-color auto 5px;outline-offset:-2px}output{padding-top:7px}.form-control{width:100%;height:34px;padding:6px 12px;background-color:#fff;border:1px solid #ccc;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075);-webkit-transition:border-color ease-in-out .15s,-webkit-box-shadow ease-in-out .15s;-o-transition:border-color ease-in-out .15s,box-shadow ease-in-out .15s;transition:border-color ease-in-out .15s,box-shadow ease-in-out .15s}.form-control:focus{border-color:#66afe9;outline:0;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 8px rgba(102,175,233,.6);box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 8px rgba(102,175,233,.6)}.form-control::-moz-placeholder{color:#999;opacity:1}.form-control:-ms-input-placeholder{color:#999}.form-control::-webkit-input-placeholder{color:#999}.has-success .checkbox,.has-success .checkbox-inline,.has-success .control-label,.has-success .form-control-feedback,.has-success .help-block,.has-success .radio,.has-success .radio-inline,.has-success.checkbox label,.has-success.checkbox-inline label,.has-success.radio label,.has-success.radio-inline label{color:#3c763d}.form-control::-ms-expand{background-color:transparent;border:0}.form-control[disabled],.form-control[readonly],fieldset[disabled] .form-control{background-color:#eee;opacity:1}.form-control[disabled],fieldset[disabled] .form-control{cursor:not-allowed}textarea.form-control{height:auto}@media screen and (-webkit-min-device-pixel-ratio:0){input[type=date].form-control,input[type=datetime-local].form-control,input[type=month].form-control,input[type=time].form-control{line-height:34px}.input-group-sm input[type=date],.input-group-sm input[type=datetime-local],.input-group-sm input[type=month],.input-group-sm input[type=time],input[type=date].input-sm,input[type=datetime-local].input-sm,input[type=month].input-sm,input[type=time].input-sm{line-height:30px}.input-group-lg input[type=date],.input-group-lg input[type=datetime-local],.input-group-lg input[type=month],.input-group-lg input[type=time],input[type=date].input-lg,input[type=datetime-local].input-lg,input[type=month].input-lg,input[type=time].input-lg{line-height:46px}}.form-group{margin-bottom:15px}.checkbox,.radio{position:relative;display:block;margin-top:10px;margin-bottom:10px}.checkbox label,.radio label{min-height:20px;padding-left:20px;margin-bottom:0;font-weight:400;cursor:pointer}.checkbox input[type=checkbox],.checkbox-inline input[type=checkbox],.radio input[type=radio],.radio-inline input[type=radio]{position:absolute;margin-top:4px\9;margin-left:-20px}.checkbox+.checkbox,.radio+.radio{margin-top:-5px}.checkbox-inline,.radio-inline{position:relative;display:inline-block;padding-left:20px;margin-bottom:0;font-weight:400;cursor:pointer}.checkbox-inline+.checkbox-inline,.radio-inline+.radio-inline{margin-top:0;margin-left:10px}.checkbox-inline.disabled,.checkbox.disabled label,.radio-inline.disabled,.radio.disabled label,fieldset[disabled] .checkbox label,fieldset[disabled] .checkbox-inline,fieldset[disabled] .radio label,fieldset[disabled] .radio-inline,fieldset[disabled] input[type=checkbox],fieldset[disabled] input[type=radio],input[type=checkbox].disabled,input[type=checkbox][disabled],input[type=radio].disabled,input[type=radio][disabled]{cursor:not-allowed}.form-control-static{min-height:34px;padding-top:7px;padding-bottom:7px;margin-bottom:0}.form-control-static.input-lg,.form-control-static.input-sm{padding-right:0;padding-left:0}.form-group-sm .form-control,.input-sm{padding:5px 10px;border-radius:3px;font-size:12px}.input-sm{height:30px;line-height:1.5}select.input-sm{height:30px;line-height:30px}select[multiple].input-sm,textarea.input-sm{height:auto}.form-group-sm .form-control{height:30px;line-height:1.5}.form-group-lg .form-control,.input-lg{border-radius:6px;padding:10px 16px;font-size:18px}.form-group-sm select.form-control{height:30px;line-height:30px}.form-group-sm select[multiple].form-control,.form-group-sm textarea.form-control{height:auto}.form-group-sm .form-control-static{height:30px;min-height:32px;padding:6px 10px;font-size:12px;line-height:1.5}.input-lg{height:46px;line-height:1.3333333}select.input-lg{height:46px;line-height:46px}select[multiple].input-lg,textarea.input-lg{height:auto}.form-group-lg .form-control{height:46px;line-height:1.3333333}.form-group-lg select.form-control{height:46px;line-height:46px}.form-group-lg select[multiple].form-control,.form-group-lg textarea.form-control{height:auto}.form-group-lg .form-control-static{height:46px;min-height:38px;padding:11px 16px;font-size:18px;line-height:1.3333333}.has-feedback{position:relative}.has-feedback .form-control{padding-right:42.5px}.form-control-feedback{position:absolute;top:0;right:0;z-index:2;display:block;width:34px;height:34px;line-height:34px;text-align:center;pointer-events:none}.collapsing,.dropdown,.dropup{position:relative}.form-group-lg .form-control+.form-control-feedback,.input-group-lg+.form-control-feedback,.input-lg+.form-control-feedback{width:46px;height:46px;line-height:46px}.form-group-sm .form-control+.form-control-feedback,.input-group-sm+.form-control-feedback,.input-sm+.form-control-feedback{width:30px;height:30px;line-height:30px}.has-success .form-control{border-color:#3c763d;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-success .form-control:focus{border-color:#2b542c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #67b168;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #67b168}.has-success .input-group-addon{color:#3c763d;background-color:#dff0d8;border-color:#3c763d}.has-warning .checkbox,.has-warning .checkbox-inline,.has-warning .control-label,.has-warning .form-control-feedback,.has-warning .help-block,.has-warning .radio,.has-warning .radio-inline,.has-warning.checkbox label,.has-warning.checkbox-inline label,.has-warning.radio label,.has-warning.radio-inline label{color:#8a6d3b}.has-warning .form-control{border-color:#8a6d3b;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-warning .form-control:focus{border-color:#66512c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #c0a16b;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #c0a16b}.has-warning .input-group-addon{color:#8a6d3b;background-color:#fcf8e3;border-color:#8a6d3b}.has-error .checkbox,.has-error .checkbox-inline,.has-error .control-label,.has-error .form-control-feedback,.has-error .help-block,.has-error .radio,.has-error .radio-inline,.has-error.checkbox label,.has-error.checkbox-inline label,.has-error.radio label,.has-error.radio-inline label{color:#a94442}.has-error .form-control{border-color:#a94442;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-error .form-control:focus{border-color:#843534;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #ce8483;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #ce8483}.has-error .input-group-addon{color:#a94442;background-color:#f2dede;border-color:#a94442}.has-feedback label~.form-control-feedback{top:25px}.has-feedback label.sr-only~.form-control-feedback{top:0}.help-block{display:block;margin-top:5px;margin-bottom:10px;color:#737373}@media (min-width:768px){.form-inline .form-control-static,.form-inline .form-group{display:inline-block}.form-inline .control-label,.form-inline .form-group{margin-bottom:0;vertical-align:middle}.form-inline .form-control{display:inline-block;width:auto;vertical-align:middle}.form-inline .input-group{display:inline-table;vertical-align:middle}.form-inline .input-group .form-control,.form-inline .input-group .input-group-addon,.form-inline .input-group .input-group-btn{width:auto}.form-inline .input-group>.form-control{width:100%}.form-inline .checkbox,.form-inline .radio{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.form-inline .checkbox label,.form-inline .radio label{padding-left:0}.form-inline .checkbox input[type=checkbox],.form-inline .radio input[type=radio]{position:relative;margin-left:0}.form-inline .has-feedback .form-control-feedback{top:0}.form-horizontal .control-label{padding-top:7px;margin-bottom:0;text-align:right}}.form-horizontal .checkbox,.form-horizontal .checkbox-inline,.form-horizontal .radio,.form-horizontal .radio-inline{padding-top:7px;margin-top:0;margin-bottom:0}.form-horizontal .checkbox,.form-horizontal .radio{min-height:27px}.form-horizontal .form-group{margin-right:-15px;margin-left:-15px}.form-horizontal .has-feedback .form-control-feedback{right:15px}@media (min-width:768px){.form-horizontal .form-group-lg .control-label{padding-top:11px;font-size:18px}.form-horizontal .form-group-sm .control-label{padding-top:6px;font-size:12px}}.btn{display:inline-block;padding:6px 12px;margin-bottom:0;font-size:14px;font-weight:400;line-height:1.42857143;text-align:center;white-space:nowrap;-ms-touch-action:manipulation;touch-action:manipulation;cursor:pointer;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;border:1px solid transparent;border-radius:4px}.btn.active.focus,.btn.active:focus,.btn.focus,.btn:active.focus,.btn:active:focus,.btn:focus{outline:-webkit-focus-ring-color auto 5px;outline-offset:-2px}.btn.focus,.btn:focus,.btn:hover{color:#333;text-decoration:none}.btn.active,.btn:active{outline:0;-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125);box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn.disabled,.btn[disabled],fieldset[disabled] .btn{cursor:not-allowed;filter:alpha(opacity=65);-webkit-box-shadow:none;box-shadow:none;opacity:.65}a.btn.disabled,fieldset[disabled] a.btn{pointer-events:none}.btn-default{color:#333;background-color:#fff;border-color:#ccc}.btn-default.focus,.btn-default:focus{color:#333;background-color:#e6e6e6;border-color:#8c8c8c}.btn-default.active,.btn-default:active,.btn-default:hover,.open>.dropdown-toggle.btn-default{color:#333;background-color:#e6e6e6;border-color:#adadad}.btn-default.active.focus,.btn-default.active:focus,.btn-default.active:hover,.btn-default:active.focus,.btn-default:active:focus,.btn-default:active:hover,.open>.dropdown-toggle.btn-default.focus,.open>.dropdown-toggle.btn-default:focus,.open>.dropdown-toggle.btn-default:hover{color:#333;background-color:#d4d4d4;border-color:#8c8c8c}.btn-default.disabled.focus,.btn-default.disabled:focus,.btn-default.disabled:hover,.btn-default[disabled].focus,.btn-default[disabled]:focus,.btn-default[disabled]:hover,fieldset[disabled] .btn-default.focus,fieldset[disabled] .btn-default:focus,fieldset[disabled] .btn-default:hover{background-color:#fff;border-color:#ccc}.btn-default .badge{color:#fff;background-color:#333}.btn-primary{color:#fff;background-color:#337ab7;border-color:#2e6da4}.btn-primary.focus,.btn-primary:focus{color:#fff;background-color:#286090;border-color:#122b40}.btn-primary.active,.btn-primary:active,.btn-primary:hover,.open>.dropdown-toggle.btn-primary{color:#fff;background-color:#286090;border-color:#204d74}.btn-primary.active.focus,.btn-primary.active:focus,.btn-primary.active:hover,.btn-primary:active.focus,.btn-primary:active:focus,.btn-primary:active:hover,.open>.dropdown-toggle.btn-primary.focus,.open>.dropdown-toggle.btn-primary:focus,.open>.dropdown-toggle.btn-primary:hover{color:#fff;background-color:#204d74;border-color:#122b40}.btn-primary.disabled.focus,.btn-primary.disabled:focus,.btn-primary.disabled:hover,.btn-primary[disabled].focus,.btn-primary[disabled]:focus,.btn-primary[disabled]:hover,fieldset[disabled] .btn-primary.focus,fieldset[disabled] .btn-primary:focus,fieldset[disabled] .btn-primary:hover{background-color:#337ab7;border-color:#2e6da4}.btn-primary .badge{color:#337ab7;background-color:#fff}.btn-success{color:#fff;background-color:#5cb85c;border-color:#4cae4c}.btn-success.focus,.btn-success:focus{color:#fff;background-color:#449d44;border-color:#255625}.btn-success.active,.btn-success:active,.btn-success:hover,.open>.dropdown-toggle.btn-success{color:#fff;background-color:#449d44;border-color:#398439}.btn-success.active.focus,.btn-success.active:focus,.btn-success.active:hover,.btn-success:active.focus,.btn-success:active:focus,.btn-success:active:hover,.open>.dropdown-toggle.btn-success.focus,.open>.dropdown-toggle.btn-success:focus,.open>.dropdown-toggle.btn-success:hover{color:#fff;background-color:#398439;border-color:#255625}.btn-success.active,.btn-success:active,.open>.dropdown-toggle.btn-success{background-image:none}.btn-success.disabled.focus,.btn-success.disabled:focus,.btn-success.disabled:hover,.btn-success[disabled].focus,.btn-success[disabled]:focus,.btn-success[disabled]:hover,fieldset[disabled] .btn-success.focus,fieldset[disabled] .btn-success:focus,fieldset[disabled] .btn-success:hover{background-color:#5cb85c;border-color:#4cae4c}.btn-success .badge{color:#5cb85c;background-color:#fff}.btn-info{color:#fff;background-color:#5bc0de;border-color:#46b8da}.btn-info.focus,.btn-info:focus{color:#fff;background-color:#31b0d5;border-color:#1b6d85}.btn-info.active,.btn-info:active,.btn-info:hover,.open>.dropdown-toggle.btn-info{color:#fff;background-color:#31b0d5;border-color:#269abc}.btn-info.active.focus,.btn-info.active:focus,.btn-info.active:hover,.btn-info:active.focus,.btn-info:active:focus,.btn-info:active:hover,.open>.dropdown-toggle.btn-info.focus,.open>.dropdown-toggle.btn-info:focus,.open>.dropdown-toggle.btn-info:hover{color:#fff;background-color:#269abc;border-color:#1b6d85}.btn-info.disabled.focus,.btn-info.disabled:focus,.btn-info.disabled:hover,.btn-info[disabled].focus,.btn-info[disabled]:focus,.btn-info[disabled]:hover,fieldset[disabled] .btn-info.focus,fieldset[disabled] .btn-info:focus,fieldset[disabled] .btn-info:hover{background-color:#5bc0de;border-color:#46b8da}.btn-info .badge{color:#5bc0de;background-color:#fff}.btn-warning{color:#fff;background-color:#f0ad4e;border-color:#eea236}.btn-warning.focus,.btn-warning:focus{color:#fff;background-color:#ec971f;border-color:#985f0d}.btn-warning.active,.btn-warning:active,.btn-warning:hover,.open>.dropdown-toggle.btn-warning{color:#fff;background-color:#ec971f;border-color:#d58512}.btn-warning.active.focus,.btn-warning.active:focus,.btn-warning.active:hover,.btn-warning:active.focus,.btn-warning:active:focus,.btn-warning:active:hover,.open>.dropdown-toggle.btn-warning.focus,.open>.dropdown-toggle.btn-warning:focus,.open>.dropdown-toggle.btn-warning:hover{color:#fff;background-color:#d58512;border-color:#985f0d}.btn-warning.disabled.focus,.btn-warning.disabled:focus,.btn-warning.disabled:hover,.btn-warning[disabled].focus,.btn-warning[disabled]:focus,.btn-warning[disabled]:hover,fieldset[disabled] .btn-warning.focus,fieldset[disabled] .btn-warning:focus,fieldset[disabled] .btn-warning:hover{background-color:#f0ad4e;border-color:#eea236}.btn-warning .badge{color:#f0ad4e;background-color:#fff}.btn-danger{color:#fff;background-color:#d9534f;border-color:#d43f3a}.btn-danger.focus,.btn-danger:focus{color:#fff;background-color:#c9302c;border-color:#761c19}.btn-danger.active,.btn-danger:active,.btn-danger:hover,.open>.dropdown-toggle.btn-danger{color:#fff;background-color:#c9302c;border-color:#ac2925}.btn-danger.active.focus,.btn-danger.active:focus,.btn-danger.active:hover,.btn-danger:active.focus,.btn-danger:active:focus,.btn-danger:active:hover,.open>.dropdown-toggle.btn-danger.focus,.open>.dropdown-toggle.btn-danger:focus,.open>.dropdown-toggle.btn-danger:hover{color:#fff;background-color:#ac2925;border-color:#761c19}.btn-danger.disabled.focus,.btn-danger.disabled:focus,.btn-danger.disabled:hover,.btn-danger[disabled].focus,.btn-danger[disabled]:focus,.btn-danger[disabled]:hover,fieldset[disabled] .btn-danger.focus,fieldset[disabled] .btn-danger:focus,fieldset[disabled] .btn-danger:hover{background-color:#d9534f;border-color:#d43f3a}.btn-danger .badge{color:#d9534f;background-color:#fff}.btn-link{font-weight:400;color:#337ab7;border-radius:0}.btn-link,.btn-link.active,.btn-link:active,.btn-link[disabled],fieldset[disabled] .btn-link{background-color:transparent;-webkit-box-shadow:none;box-shadow:none}.btn-link,.btn-link:active,.btn-link:focus,.btn-link:hover{border-color:transparent}.btn-link:focus,.btn-link:hover{color:#23527c;text-decoration:underline;background-color:transparent}.btn-link[disabled]:focus,.btn-link[disabled]:hover,fieldset[disabled] .btn-link:focus,fieldset[disabled] .btn-link:hover{color:#777;text-decoration:none}.btn-group-lg>.btn,.btn-lg{padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}.btn-group-sm>.btn,.btn-sm{padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.btn-group-xs>.btn,.btn-xs{padding:1px 5px;font-size:12px;line-height:1.5;border-radius:3px}.btn-block{display:block;width:100%}.btn-block+.btn-block{margin-top:5px}input[type=button].btn-block,input[type=reset].btn-block,input[type=submit].btn-block{width:100%}.fade{opacity:0;-webkit-transition:opacity .15s linear;-o-transition:opacity .15s linear;transition:opacity .15s linear}.fade.in{opacity:1}.collapse{display:none}.collapse.in{display:block}tr.collapse.in{display:table-row}tbody.collapse.in{display:table-row-group}.collapsing{height:0;overflow:hidden;-webkit-transition-timing-function:ease;-o-transition-timing-function:ease;transition-timing-function:ease;-webkit-transition-duration:.35s;-o-transition-duration:.35s;transition-duration:.35s;-webkit-transition-property:height,visibility;-o-transition-property:height,visibility;transition-property:height,visibility}.caret{display:inline-block;width:0;height:0;margin-left:2px;border-top:4px dashed;border-top:4px solid\9;border-right:4px solid transparent;border-left:4px solid transparent}.dropdown-toggle:focus{outline:0}.dropdown-menu{position:absolute;top:100%;left:0;z-index:1000;display:none;min-width:160px;padding:5px 0;margin:2px 0 0;font-size:14px;text-align:left;list-style:none;background-color:#fff;background-clip:padding-box;border:1px solid #ccc;border:1px solid rgba(0,0,0,.15);border-radius:4px;-webkit-box-shadow:0 6px 12px rgba(0,0,0,.175);box-shadow:0 6px 12px rgba(0,0,0,.175)}.dropdown-menu-right,.dropdown-menu.pull-right{right:0;left:auto}.dropdown-header,.dropdown-menu>li>a{display:block;padding:3px 20px;line-height:1.42857143;white-space:nowrap}.btn-group>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group>.btn-group:first-child:not(:last-child)>.dropdown-toggle,.btn-group>.btn:first-child:not(:last-child):not(.dropdown-toggle){border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn-group:last-child:not(:first-child)>.btn:first-child,.btn-group>.btn:last-child:not(:first-child),.btn-group>.dropdown-toggle:not(:first-child){border-top-left-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn:not(:first-child):not(:last-child),.btn-group>.btn-group:not(:first-child):not(:last-child)>.btn,.btn-group>.btn:not(:first-child):not(:last-child):not(.dropdown-toggle){border-radius:0}.dropdown-menu .divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.dropdown-menu>li>a{clear:both;font-weight:400;color:#333}.dropdown-menu>li>a:focus,.dropdown-menu>li>a:hover{color:#262626;text-decoration:none;background-color:#f5f5f5}.dropdown-menu>.active>a,.dropdown-menu>.active>a:focus,.dropdown-menu>.active>a:hover{color:#fff;text-decoration:none;background-color:#337ab7;outline:0}.dropdown-menu>.disabled>a,.dropdown-menu>.disabled>a:focus,.dropdown-menu>.disabled>a:hover{color:#777}.dropdown-menu>.disabled>a:focus,.dropdown-menu>.disabled>a:hover{text-decoration:none;cursor:not-allowed;background-color:transparent;filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.open>.dropdown-menu{display:block}.open>a{outline:0}.dropdown-menu-left{right:auto;left:0}.dropdown-header{font-size:12px;color:#777}.dropdown-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:990}.nav-justified>.dropdown .dropdown-menu,.nav-tabs.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}.pull-right>.dropdown-menu{right:0;left:auto}.dropup .caret,.navbar-fixed-bottom .dropdown .caret{content:"";border-top:0;border-bottom:4px dashed;border-bottom:4px solid\9}.dropup .dropdown-menu,.navbar-fixed-bottom .dropdown .dropdown-menu{top:auto;bottom:100%;margin-bottom:2px}@media (min-width:768px){.navbar-right .dropdown-menu{right:0;left:auto}.navbar-right .dropdown-menu-left{right:auto;left:0}}.btn-group,.btn-group-vertical{position:relative;display:inline-block}.btn-group-vertical>.btn,.btn-group>.btn{position:relative;float:left}.btn-group-vertical>.btn.active,.btn-group-vertical>.btn:active,.btn-group-vertical>.btn:focus,.btn-group-vertical>.btn:hover,.btn-group>.btn.active,.btn-group>.btn:active,.btn-group>.btn:focus,.btn-group>.btn:hover{z-index:2}.btn-group .btn+.btn,.btn-group .btn+.btn-group,.btn-group .btn-group+.btn,.btn-group .btn-group+.btn-group{margin-left:-1px}.btn-toolbar{margin-left:-5px}.btn-toolbar>.btn,.btn-toolbar>.btn-group,.btn-toolbar>.input-group{margin-left:5px}.btn .caret,.btn-group>.btn:first-child{margin-left:0}.btn-group .dropdown-toggle:active,.btn-group.open .dropdown-toggle{outline:0}.btn-group>.btn+.dropdown-toggle{padding-right:8px;padding-left:8px}.btn-group>.btn-lg+.dropdown-toggle{padding-right:12px;padding-left:12px}.btn-group.open .dropdown-toggle{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125);box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn-group.open .dropdown-toggle.btn-link{-webkit-box-shadow:none;box-shadow:none}.btn-lg .caret{border-width:5px 5px 0;border-bottom-width:0}.dropup .btn-lg .caret{border-width:0 5px 5px}.btn-group-vertical>.btn,.btn-group-vertical>.btn-group,.btn-group-vertical>.btn-group>.btn{display:block;float:none;width:100%;max-width:100%}.btn-group-vertical>.btn-group>.btn{float:none}.btn-group-vertical>.btn+.btn,.btn-group-vertical>.btn+.btn-group,.btn-group-vertical>.btn-group+.btn,.btn-group-vertical>.btn-group+.btn-group{margin-top:-1px;margin-left:0}.btn-group-vertical>.btn:first-child:not(:last-child){border-top-left-radius:4px;border-top-right-radius:4px;border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn:last-child:not(:first-child){border-top-left-radius:0;border-top-right-radius:0;border-bottom-right-radius:4px;border-bottom-left-radius:4px}.btn-group-vertical>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group-vertical>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group-vertical>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn-group:last-child:not(:first-child)>.btn:first-child{border-top-left-radius:0;border-top-right-radius:0}.btn-group-justified{display:table;width:100%;table-layout:fixed;border-collapse:separate}.btn-group-justified>.btn,.btn-group-justified>.btn-group{display:table-cell;float:none;width:1%}.btn-group-justified>.btn-group .btn{width:100%}.btn-group-justified>.btn-group .dropdown-menu{left:auto}[data-toggle=buttons]>.btn input[type=checkbox],[data-toggle=buttons]>.btn input[type=radio],[data-toggle=buttons]>.btn-group>.btn input[type=checkbox],[data-toggle=buttons]>.btn-group>.btn input[type=radio]{position:absolute;clip:rect(0,0,0,0);pointer-events:none}.input-group{position:relative;display:table;border-collapse:separate}.input-group[class*=col-]{float:none;padding-right:0;padding-left:0}.input-group .form-control{position:relative;z-index:2;float:left;width:100%;margin-bottom:0}.input-group .form-control:focus{z-index:3}.input-group-lg>.form-control,.input-group-lg>.input-group-addon,.input-group-lg>.input-group-btn>.btn{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}select.input-group-lg>.form-control,select.input-group-lg>.input-group-addon,select.input-group-lg>.input-group-btn>.btn{height:46px;line-height:46px}select[multiple].input-group-lg>.form-control,select[multiple].input-group-lg>.input-group-addon,select[multiple].input-group-lg>.input-group-btn>.btn,textarea.input-group-lg>.form-control,textarea.input-group-lg>.input-group-addon,textarea.input-group-lg>.input-group-btn>.btn{height:auto}.input-group-sm>.form-control,.input-group-sm>.input-group-addon,.input-group-sm>.input-group-btn>.btn{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}select.input-group-sm>.form-control,select.input-group-sm>.input-group-addon,select.input-group-sm>.input-group-btn>.btn{height:30px;line-height:30px}select[multiple].input-group-sm>.form-control,select[multiple].input-group-sm>.input-group-addon,select[multiple].input-group-sm>.input-group-btn>.btn,textarea.input-group-sm>.form-control,textarea.input-group-sm>.input-group-addon,textarea.input-group-sm>.input-group-btn>.btn{height:auto}.input-group .form-control,.input-group-addon,.input-group-btn{display:table-cell}.nav>li,.nav>li>a{display:block;position:relative}.input-group .form-control:not(:first-child):not(:last-child),.input-group-addon:not(:first-child):not(:last-child),.input-group-btn:not(:first-child):not(:last-child){border-radius:0}.input-group-addon,.input-group-btn{width:1%;white-space:nowrap;vertical-align:middle}.input-group-addon{padding:6px 12px;font-size:14px;font-weight:400;line-height:1;color:#555;text-align:center;background-color:#eee;border:1px solid #ccc;border-radius:4px}.input-group-addon.input-sm{padding:5px 10px;font-size:12px;border-radius:3px}.input-group-addon.input-lg{padding:10px 16px;font-size:18px;border-radius:6px}.input-group-addon input[type=checkbox],.input-group-addon input[type=radio]{margin-top:0}.input-group .form-control:first-child,.input-group-addon:first-child,.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group>.btn,.input-group-btn:first-child>.dropdown-toggle,.input-group-btn:last-child>.btn-group:not(:last-child)>.btn,.input-group-btn:last-child>.btn:not(:last-child):not(.dropdown-toggle){border-top-right-radius:0;border-bottom-right-radius:0}.input-group-addon:first-child{border-right:0}.input-group .form-control:last-child,.input-group-addon:last-child,.input-group-btn:first-child>.btn-group:not(:first-child)>.btn,.input-group-btn:first-child>.btn:not(:first-child),.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group>.btn,.input-group-btn:last-child>.dropdown-toggle{border-top-left-radius:0;border-bottom-left-radius:0}.input-group-addon:last-child{border-left:0}.input-group-btn{position:relative;font-size:0;white-space:nowrap}.input-group-btn>.btn{position:relative}.input-group-btn>.btn+.btn{margin-left:-1px}.input-group-btn>.btn:active,.input-group-btn>.btn:focus,.input-group-btn>.btn:hover{z-index:2}.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group{margin-right:-1px}.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group{z-index:2;margin-left:-1px}.nav{padding-left:0;margin-bottom:0;list-style:none}.nav>li>a{padding:10px 15px}.nav>li>a:focus,.nav>li>a:hover{text-decoration:none;background-color:#eee}.nav>li.disabled>a{color:#777}.nav>li.disabled>a:focus,.nav>li.disabled>a:hover{color:#777;text-decoration:none;cursor:not-allowed;background-color:transparent}.nav .open>a,.nav .open>a:focus,.nav .open>a:hover{background-color:#eee;border-color:#337ab7}.nav .nav-divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.nav>li>a>img{max-width:none}.nav-tabs{border-bottom:1px solid #ddd}.nav-tabs>li{float:left;margin-bottom:-1px}.nav-tabs>li>a{margin-right:2px;line-height:1.42857143;border:1px solid transparent;border-radius:4px 4px 0 0}.nav-tabs>li>a:hover{border-color:#eee #eee #ddd}.nav-tabs>li.active>a,.nav-tabs>li.active>a:focus,.nav-tabs>li.active>a:hover{color:#555;cursor:default;background-color:#fff;border:1px solid #ddd;border-bottom-color:transparent}.nav-tabs.nav-justified{width:100%;border-bottom:0}.nav-tabs.nav-justified>li{float:none}.nav-tabs.nav-justified>li>a{margin-bottom:5px;text-align:center;margin-right:0;border-radius:4px}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:focus,.nav-tabs.nav-justified>.active>a:hover{border:1px solid #ddd}@media (min-width:768px){.nav-tabs.nav-justified>li{display:table-cell;width:1%}.nav-tabs.nav-justified>li>a{margin-bottom:0;border-bottom:1px solid #ddd;border-radius:4px 4px 0 0}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:focus,.nav-tabs.nav-justified>.active>a:hover{border-bottom-color:#fff}}.nav-pills>li{float:left}.nav-justified>li,.nav-stacked>li{float:none}.nav-pills>li>a{border-radius:4px}.nav-pills>li+li{margin-left:2px}.nav-pills>li.active>a,.nav-pills>li.active>a:focus,.nav-pills>li.active>a:hover{color:#fff;background-color:#337ab7}.nav-stacked>li+li{margin-top:2px;margin-left:0}.nav-justified{width:100%}.nav-justified>li>a{margin-bottom:5px;text-align:center}.nav-tabs-justified{border-bottom:0}.nav-tabs-justified>li>a{margin-right:0;border-radius:4px}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:focus,.nav-tabs-justified>.active>a:hover{border:1px solid #ddd}@media (min-width:768px){.nav-justified>li{display:table-cell;width:1%}.nav-justified>li>a{margin-bottom:0}.nav-tabs-justified>li>a{border-bottom:1px solid #ddd;border-radius:4px 4px 0 0}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:focus,.nav-tabs-justified>.active>a:hover{border-bottom-color:#fff}}.tab-content>.tab-pane{display:none}.tab-content>.active{display:block}.nav-tabs .dropdown-menu{margin-top:-1px;border-top-left-radius:0;border-top-right-radius:0}.navbar{position:relative;min-height:50px;margin-bottom:20px;border:1px solid transparent}.navbar-collapse{padding-right:15px;padding-left:15px;overflow-x:visible;-webkit-overflow-scrolling:touch;border-top:1px solid transparent;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.1);box-shadow:inset 0 1px 0 rgba(255,255,255,.1)}.navbar-collapse.in{overflow-y:auto}@media (min-width:768px){.navbar{border-radius:4px}.navbar-header{float:left}.navbar-collapse{width:auto;border-top:0;-webkit-box-shadow:none;box-shadow:none}.navbar-collapse.collapse{display:block!important;height:auto!important;padding-bottom:0;overflow:visible!important}.navbar-collapse.in{overflow-y:visible}.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse,.navbar-static-top .navbar-collapse{padding-right:0;padding-left:0}}.embed-responsive,.modal,.modal-open,.progress{overflow:hidden}@media (max-device-width:480px) and (orientation:landscape){.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse{max-height:200px}}.container-fluid>.navbar-collapse,.container-fluid>.navbar-header,.container>.navbar-collapse,.container>.navbar-header{margin-right:-15px;margin-left:-15px}.navbar-static-top{z-index:1000;border-width:0 0 1px}.navbar-fixed-bottom,.navbar-fixed-top{position:fixed;right:0;left:0;z-index:1030}.navbar-fixed-top{top:0;border-width:0 0 1px}.navbar-fixed-bottom{bottom:0;margin-bottom:0;border-width:1px 0 0}.navbar-brand{float:left;height:50px;padding:15px;font-size:18px;line-height:20px}.navbar-brand:focus,.navbar-brand:hover{text-decoration:none}.navbar-brand>img{display:block}@media (min-width:768px){.container-fluid>.navbar-collapse,.container-fluid>.navbar-header,.container>.navbar-collapse,.container>.navbar-header{margin-right:0;margin-left:0}.navbar-fixed-bottom,.navbar-fixed-top,.navbar-static-top{border-radius:0}.navbar>.container .navbar-brand,.navbar>.container-fluid .navbar-brand{margin-left:-15px}}.navbar-toggle{position:relative;float:right;padding:9px 10px;margin-top:8px;margin-right:15px;margin-bottom:8px;background-color:transparent;border:1px solid transparent;border-radius:4px}.navbar-toggle:focus{outline:0}.navbar-toggle .icon-bar{display:block;width:22px;height:2px;border-radius:1px}.navbar-toggle .icon-bar+.icon-bar{margin-top:4px}.navbar-nav{margin:7.5px -15px}.navbar-nav>li>a{padding-top:10px;padding-bottom:10px;line-height:20px}@media (max-width:767px){.navbar-nav .open .dropdown-menu{position:static;float:none;width:auto;margin-top:0;background-color:transparent;border:0;-webkit-box-shadow:none;box-shadow:none}.navbar-nav .open .dropdown-menu .dropdown-header,.navbar-nav .open .dropdown-menu>li>a{padding:5px 15px 5px 25px}.navbar-nav .open .dropdown-menu>li>a{line-height:20px}.navbar-nav .open .dropdown-menu>li>a:focus,.navbar-nav .open .dropdown-menu>li>a:hover{background-image:none}}.progress-bar-striped,.progress-striped .progress-bar,.progress-striped .progress-bar-success{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}@media (min-width:768px){.navbar-toggle{display:none}.navbar-nav{float:left;margin:0}.navbar-nav>li{float:left}.navbar-nav>li>a{padding-top:15px;padding-bottom:15px}}.navbar-form{padding:10px 15px;margin-top:8px;margin-right:-15px;margin-bottom:8px;margin-left:-15px;border-top:1px solid transparent;border-bottom:1px solid transparent;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.1),0 1px 0 rgba(255,255,255,.1);box-shadow:inset 0 1px 0 rgba(255,255,255,.1),0 1px 0 rgba(255,255,255,.1)}@media (min-width:768px){.navbar-form .form-control-static,.navbar-form .form-group{display:inline-block}.navbar-form .control-label,.navbar-form .form-group{margin-bottom:0;vertical-align:middle}.navbar-form .form-control{display:inline-block;width:auto;vertical-align:middle}.navbar-form .input-group{display:inline-table;vertical-align:middle}.navbar-form .input-group .form-control,.navbar-form .input-group .input-group-addon,.navbar-form .input-group .input-group-btn{width:auto}.navbar-form .input-group>.form-control{width:100%}.navbar-form .checkbox,.navbar-form .radio{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.navbar-form .checkbox label,.navbar-form .radio label{padding-left:0}.navbar-form .checkbox input[type=checkbox],.navbar-form .radio input[type=radio]{position:relative;margin-left:0}.navbar-form .has-feedback .form-control-feedback{top:0}.navbar-form{width:auto;padding-top:0;padding-bottom:0;margin-right:0;margin-left:0;border:0;-webkit-box-shadow:none;box-shadow:none}}.breadcrumb>li,.pagination{display:inline-block}.btn .badge,.btn .label{top:-1px;position:relative}@media (max-width:767px){.navbar-form .form-group{margin-bottom:5px}.navbar-form .form-group:last-child{margin-bottom:0}}.navbar-nav>li>.dropdown-menu{margin-top:0;border-top-left-radius:0;border-top-right-radius:0}.navbar-fixed-bottom .navbar-nav>li>.dropdown-menu{margin-bottom:0;border-top-left-radius:4px;border-top-right-radius:4px;border-bottom-right-radius:0;border-bottom-left-radius:0}.navbar-btn{margin-top:8px;margin-bottom:8px}.navbar-btn.btn-sm{margin-top:10px;margin-bottom:10px}.navbar-btn.btn-xs{margin-top:14px;margin-bottom:14px}.navbar-text{margin-top:15px;margin-bottom:15px}@media (min-width:768px){.navbar-text{float:left;margin-right:15px;margin-left:15px}.navbar-left{float:left!important}.navbar-right{float:right!important;margin-right:-15px}.navbar-right~.navbar-right{margin-right:0}}.navbar-default{background-color:#f8f8f8;border-color:#e7e7e7}.navbar-default .navbar-brand{color:#777}.navbar-default .navbar-brand:focus,.navbar-default .navbar-brand:hover{color:#5e5e5e;background-color:transparent}.navbar-default .navbar-nav>li>a,.navbar-default .navbar-text{color:#777}.navbar-default .navbar-nav>li>a:focus,.navbar-default .navbar-nav>li>a:hover{color:#333;background-color:transparent}.navbar-default .navbar-nav>.active>a,.navbar-default .navbar-nav>.active>a:focus,.navbar-default .navbar-nav>.active>a:hover{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav>.disabled>a,.navbar-default .navbar-nav>.disabled>a:focus,.navbar-default .navbar-nav>.disabled>a:hover{color:#ccc;background-color:transparent}.navbar-default .navbar-toggle{border-color:#ddd}.navbar-default .navbar-toggle:focus,.navbar-default .navbar-toggle:hover{background-color:#ddd}.navbar-default .navbar-toggle .icon-bar{background-color:#888}.navbar-default .navbar-collapse,.navbar-default .navbar-form{border-color:#e7e7e7}.navbar-default .navbar-nav>.open>a,.navbar-default .navbar-nav>.open>a:focus,.navbar-default .navbar-nav>.open>a:hover{color:#555;background-color:#e7e7e7}@media (max-width:767px){.navbar-default .navbar-nav .open .dropdown-menu>li>a{color:#777}.navbar-default .navbar-nav .open .dropdown-menu>li>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>li>a:hover{color:#333;background-color:transparent}.navbar-default .navbar-nav .open .dropdown-menu>.active>a,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:hover{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:hover{color:#ccc;background-color:transparent}}.navbar-default .navbar-link{color:#777}.navbar-default .navbar-link:hover{color:#333}.navbar-default .btn-link{color:#777}.navbar-default .btn-link:focus,.navbar-default .btn-link:hover{color:#333}.navbar-default .btn-link[disabled]:focus,.navbar-default .btn-link[disabled]:hover,fieldset[disabled] .navbar-default .btn-link:focus,fieldset[disabled] .navbar-default .btn-link:hover{color:#ccc}.navbar-inverse{background-color:#222;border-color:#080808}.navbar-inverse .navbar-brand{color:#9d9d9d}.navbar-inverse .navbar-brand:focus,.navbar-inverse .navbar-brand:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav>li>a,.navbar-inverse .navbar-text{color:#9d9d9d}.navbar-inverse .navbar-nav>li>a:focus,.navbar-inverse .navbar-nav>li>a:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav>.active>a,.navbar-inverse .navbar-nav>.active>a:focus,.navbar-inverse .navbar-nav>.active>a:hover{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav>.disabled>a,.navbar-inverse .navbar-nav>.disabled>a:focus,.navbar-inverse .navbar-nav>.disabled>a:hover{color:#444;background-color:transparent}.navbar-inverse .navbar-toggle{border-color:#333}.navbar-inverse .navbar-toggle:focus,.navbar-inverse .navbar-toggle:hover{background-color:#333}.navbar-inverse .navbar-toggle .icon-bar{background-color:#fff}.navbar-inverse .navbar-collapse,.navbar-inverse .navbar-form{border-color:#101010}.navbar-inverse .navbar-nav>.open>a,.navbar-inverse .navbar-nav>.open>a:focus,.navbar-inverse .navbar-nav>.open>a:hover{color:#fff;background-color:#080808}@media (max-width:767px){.navbar-inverse .navbar-nav .open .dropdown-menu>.dropdown-header{border-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu .divider{background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a{color:#9d9d9d}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:hover{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:hover{color:#444;background-color:transparent}}.navbar-inverse .navbar-link{color:#9d9d9d}.navbar-inverse .navbar-link:hover{color:#fff}.navbar-inverse .btn-link{color:#9d9d9d}.navbar-inverse .btn-link:focus,.navbar-inverse .btn-link:hover{color:#fff}.navbar-inverse .btn-link[disabled]:focus,.navbar-inverse .btn-link[disabled]:hover,fieldset[disabled] .navbar-inverse .btn-link:focus,fieldset[disabled] .navbar-inverse .btn-link:hover{color:#444}.breadcrumb{padding:8px 15px;margin-bottom:20px;list-style:none;background-color:#f5f5f5;border-radius:4px}.breadcrumb>li+li:before{padding:0 5px;color:#ccc;content:"/\00a0"}.breadcrumb>.active{color:#777}.pagination{padding-left:0;margin:20px 0;border-radius:4px}.pager li,.pagination>li{display:inline}.pagination>li>a,.pagination>li>span{position:relative;float:left;padding:6px 12px;margin-left:-1px;line-height:1.42857143;color:#337ab7;text-decoration:none;background-color:#fff;border:1px solid #ddd}.pagination>li:first-child>a,.pagination>li:first-child>span{margin-left:0;border-top-left-radius:4px;border-bottom-left-radius:4px}.pagination>li:last-child>a,.pagination>li:last-child>span{border-top-right-radius:4px;border-bottom-right-radius:4px}.pagination>li>a:focus,.pagination>li>a:hover,.pagination>li>span:focus,.pagination>li>span:hover{z-index:2;color:#23527c;background-color:#eee;border-color:#ddd}.pagination>.active>a,.pagination>.active>a:focus,.pagination>.active>a:hover,.pagination>.active>span,.pagination>.active>span:focus,.pagination>.active>span:hover{z-index:3;color:#fff;cursor:default;background-color:#337ab7;border-color:#337ab7}.pagination>.disabled>a,.pagination>.disabled>a:focus,.pagination>.disabled>a:hover,.pagination>.disabled>span,.pagination>.disabled>span:focus,.pagination>.disabled>span:hover{color:#777;cursor:not-allowed;background-color:#fff;border-color:#ddd}.pagination-lg>li>a,.pagination-lg>li>span{padding:10px 16px;font-size:18px;line-height:1.3333333}.pagination-lg>li:first-child>a,.pagination-lg>li:first-child>span{border-top-left-radius:6px;border-bottom-left-radius:6px}.pagination-lg>li:last-child>a,.pagination-lg>li:last-child>span{border-top-right-radius:6px;border-bottom-right-radius:6px}.pagination-sm>li>a,.pagination-sm>li>span{padding:5px 10px;font-size:12px;line-height:1.5}.badge,.label{font-weight:700;line-height:1;white-space:nowrap;text-align:center}.pagination-sm>li:first-child>a,.pagination-sm>li:first-child>span{border-top-left-radius:3px;border-bottom-left-radius:3px}.pagination-sm>li:last-child>a,.pagination-sm>li:last-child>span{border-top-right-radius:3px;border-bottom-right-radius:3px}.pager{padding-left:0;margin:20px 0;text-align:center;list-style:none}.pager li>a,.pager li>span{display:inline-block;padding:5px 14px;background-color:#fff;border:1px solid #ddd;border-radius:15px}.pager li>a:focus,.pager li>a:hover{text-decoration:none;background-color:#eee}.pager .next>a,.pager .next>span{float:right}.pager .previous>a,.pager .previous>span{float:left}.pager .disabled>a,.pager .disabled>a:focus,.pager .disabled>a:hover,.pager .disabled>span{color:#777;cursor:not-allowed;background-color:#fff}.label{display:inline;padding:.2em .6em .3em;font-size:75%;color:#fff;border-radius:.25em}a.label:focus,a.label:hover{color:#fff;text-decoration:none;cursor:pointer}.label:empty{display:none}.label-default{background-color:#777}.label-default[href]:focus,.label-default[href]:hover{background-color:#5e5e5e}.label-primary{background-color:#337ab7}.label-primary[href]:focus,.label-primary[href]:hover{background-color:#286090}.label-success{background-color:#5cb85c}.label-success[href]:focus,.label-success[href]:hover{background-color:#449d44}.label-info{background-color:#5bc0de}.label-info[href]:focus,.label-info[href]:hover{background-color:#31b0d5}.label-warning{background-color:#f0ad4e}.label-warning[href]:focus,.label-warning[href]:hover{background-color:#ec971f}.label-danger{background-color:#d9534f}.label-danger[href]:focus,.label-danger[href]:hover{background-color:#c9302c}.badge{display:inline-block;min-width:10px;padding:3px 7px;font-size:12px;color:#fff;vertical-align:middle;background-color:#777;border-radius:10px}.badge:empty{display:none}.media-object,.thumbnail{display:block}.btn-group-xs>.btn .badge,.btn-xs .badge{top:0;padding:1px 5px}a.badge:focus,a.badge:hover{color:#fff;text-decoration:none;cursor:pointer}.list-group-item.active>.badge,.nav-pills>.active>a>.badge{color:#337ab7;background-color:#fff}.jumbotron,.jumbotron .h1,.jumbotron h1{color:inherit}.list-group-item>.badge{float:right}.list-group-item>.badge+.badge{margin-right:5px}.nav-pills>li>a>.badge{margin-left:3px}.jumbotron{padding-top:30px;padding-bottom:30px;margin-bottom:30px;background-color:#eee}.jumbotron p{margin-bottom:15px;font-size:21px;font-weight:200}.alert,.thumbnail{margin-bottom:20px}.jumbotron>hr{border-top-color:#d5d5d5}.container .jumbotron,.container-fluid .jumbotron{padding-right:15px;padding-left:15px;border-radius:6px}.jumbotron .container{max-width:100%}@media screen and (min-width:768px){.jumbotron{padding-top:48px;padding-bottom:48px}.container .jumbotron,.container-fluid .jumbotron{padding-right:60px;padding-left:60px}.jumbotron .h1,.jumbotron h1{font-size:63px}}.thumbnail{padding:4px;line-height:1.42857143;background-color:#fff;border:1px solid #ddd;border-radius:4px;-webkit-transition:border .2s ease-in-out;-o-transition:border .2s ease-in-out;transition:border .2s ease-in-out}.thumbnail a>img,.thumbnail>img{margin-right:auto;margin-left:auto}a.thumbnail.active,a.thumbnail:focus,a.thumbnail:hover{border-color:#337ab7}.thumbnail .caption{padding:9px;color:#333}.alert{padding:15px;border:1px solid transparent;border-radius:4px}.alert h4{margin-top:0;color:inherit}.alert .alert-link{font-weight:700}.alert>p,.alert>ul{margin-bottom:0}.alert>p+p{margin-top:5px}.alert-dismissable,.alert-dismissible{padding-right:35px}.alert-dismissable .close,.alert-dismissible .close{position:relative;top:-2px;right:-21px;color:inherit}.modal,.modal-backdrop{top:0;right:0;bottom:0;left:0}.alert-success{color:#3c763d;background-color:#dff0d8;border-color:#d6e9c6}.alert-success hr{border-top-color:#c9e2b3}.alert-success .alert-link{color:#2b542c}.alert-info{color:#31708f;background-color:#d9edf7;border-color:#bce8f1}.alert-info hr{border-top-color:#a6e1ec}.alert-info .alert-link{color:#245269}.alert-warning{color:#8a6d3b;background-color:#fcf8e3;border-color:#faebcc}.alert-warning hr{border-top-color:#f7e1b5}.alert-warning .alert-link{color:#66512c}.alert-danger{color:#a94442;background-color:#f2dede;border-color:#ebccd1}.alert-danger hr{border-top-color:#e4b9c0}.alert-danger .alert-link{color:#843534}@-webkit-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-o-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}.progress{height:20px;margin-bottom:20px;background-color:#f5f5f5;border-radius:4px;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,.1);box-shadow:inset 0 1px 2px rgba(0,0,0,.1)}.progress-bar{float:left;width:0;height:100%;font-size:12px;line-height:20px;color:#fff;text-align:center;background-color:#337ab7;-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,.15);box-shadow:inset 0 -1px 0 rgba(0,0,0,.15);-webkit-transition:width .6s ease;-o-transition:width .6s ease;transition:width .6s ease}.progress-bar-striped,.progress-striped .progress-bar{background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);-webkit-background-size:40px 40px;background-size:40px 40px}.progress-bar.active,.progress.active .progress-bar{-webkit-animation:progress-bar-stripes 2s linear infinite;-o-animation:progress-bar-stripes 2s linear infinite;animation:progress-bar-stripes 2s linear infinite}.progress-bar-success{background-color:#5cb85c}.progress-striped .progress-bar-success{background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-striped .progress-bar-info,.progress-striped .progress-bar-warning{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-info{background-color:#5bc0de}.progress-striped .progress-bar-info{background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-warning{background-color:#f0ad4e}.progress-striped .progress-bar-warning{background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-danger{background-color:#d9534f}.progress-striped .progress-bar-danger{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.media{margin-top:15px}.media:first-child{margin-top:0}.media,.media-body{overflow:hidden;zoom:1}.media-body{width:10000px}.media-object.img-thumbnail{max-width:none}.media-right,.media>.pull-right{padding-left:10px}.media-left,.media>.pull-left{padding-right:10px}.media-body,.media-left,.media-right{display:table-cell;vertical-align:top}.media-middle{vertical-align:middle}.media-bottom{vertical-align:bottom}.media-heading{margin-top:0;margin-bottom:5px}.media-list{padding-left:0;list-style:none}.list-group{padding-left:0;margin-bottom:20px}.list-group-item{position:relative;display:block;padding:10px 15px;margin-bottom:-1px;background-color:#fff;border:1px solid #ddd}.list-group-item:first-child{border-top-left-radius:4px;border-top-right-radius:4px}.list-group-item:last-child{margin-bottom:0;border-bottom-right-radius:4px;border-bottom-left-radius:4px}a.list-group-item,button.list-group-item{color:#555}a.list-group-item .list-group-item-heading,button.list-group-item .list-group-item-heading{color:#333}a.list-group-item:focus,a.list-group-item:hover,button.list-group-item:focus,button.list-group-item:hover{color:#555;text-decoration:none;background-color:#f5f5f5}button.list-group-item{width:100%;text-align:left}.list-group-item.disabled,.list-group-item.disabled:focus,.list-group-item.disabled:hover{color:#777;cursor:not-allowed;background-color:#eee}.list-group-item.disabled .list-group-item-heading,.list-group-item.disabled:focus .list-group-item-heading,.list-group-item.disabled:hover .list-group-item-heading{color:inherit}.list-group-item.disabled .list-group-item-text,.list-group-item.disabled:focus .list-group-item-text,.list-group-item.disabled:hover .list-group-item-text{color:#777}.list-group-item.active,.list-group-item.active:focus,.list-group-item.active:hover{z-index:2;color:#fff;background-color:#337ab7;border-color:#337ab7}.list-group-item.active .list-group-item-heading,.list-group-item.active .list-group-item-heading>.small,.list-group-item.active .list-group-item-heading>small,.list-group-item.active:focus .list-group-item-heading,.list-group-item.active:focus .list-group-item-heading>.small,.list-group-item.active:focus .list-group-item-heading>small,.list-group-item.active:hover .list-group-item-heading,.list-group-item.active:hover .list-group-item-heading>.small,.list-group-item.active:hover .list-group-item-heading>small{color:inherit}.list-group-item.active .list-group-item-text,.list-group-item.active:focus .list-group-item-text,.list-group-item.active:hover .list-group-item-text{color:#c7ddef}.list-group-item-success{color:#3c763d;background-color:#dff0d8}a.list-group-item-success,button.list-group-item-success{color:#3c763d}a.list-group-item-success .list-group-item-heading,button.list-group-item-success .list-group-item-heading{color:inherit}a.list-group-item-success:focus,a.list-group-item-success:hover,button.list-group-item-success:focus,button.list-group-item-success:hover{color:#3c763d;background-color:#d0e9c6}a.list-group-item-success.active,a.list-group-item-success.active:focus,a.list-group-item-success.active:hover,button.list-group-item-success.active,button.list-group-item-success.active:focus,button.list-group-item-success.active:hover{color:#fff;background-color:#3c763d;border-color:#3c763d}.list-group-item-info{color:#31708f;background-color:#d9edf7}a.list-group-item-info,button.list-group-item-info{color:#31708f}a.list-group-item-info .list-group-item-heading,button.list-group-item-info .list-group-item-heading{color:inherit}a.list-group-item-info:focus,a.list-group-item-info:hover,button.list-group-item-info:focus,button.list-group-item-info:hover{color:#31708f;background-color:#c4e3f3}a.list-group-item-info.active,a.list-group-item-info.active:focus,a.list-group-item-info.active:hover,button.list-group-item-info.active,button.list-group-item-info.active:focus,button.list-group-item-info.active:hover{color:#fff;background-color:#31708f;border-color:#31708f}.list-group-item-warning{color:#8a6d3b;background-color:#fcf8e3}a.list-group-item-warning,button.list-group-item-warning{color:#8a6d3b}a.list-group-item-warning .list-group-item-heading,button.list-group-item-warning .list-group-item-heading{color:inherit}a.list-group-item-warning:focus,a.list-group-item-warning:hover,button.list-group-item-warning:focus,button.list-group-item-warning:hover{color:#8a6d3b;background-color:#faf2cc}a.list-group-item-warning.active,a.list-group-item-warning.active:focus,a.list-group-item-warning.active:hover,button.list-group-item-warning.active,button.list-group-item-warning.active:focus,button.list-group-item-warning.active:hover{color:#fff;background-color:#8a6d3b;border-color:#8a6d3b}.list-group-item-danger{color:#a94442;background-color:#f2dede}a.list-group-item-danger,button.list-group-item-danger{color:#a94442}a.list-group-item-danger .list-group-item-heading,button.list-group-item-danger .list-group-item-heading{color:inherit}a.list-group-item-danger:focus,a.list-group-item-danger:hover,button.list-group-item-danger:focus,button.list-group-item-danger:hover{color:#a94442;background-color:#ebcccc}a.list-group-item-danger.active,a.list-group-item-danger.active:focus,a.list-group-item-danger.active:hover,button.list-group-item-danger.active,button.list-group-item-danger.active:focus,button.list-group-item-danger.active:hover{color:#fff;background-color:#a94442;border-color:#a94442}.panel-heading>.dropdown .dropdown-toggle,.panel-title,.panel-title>.small,.panel-title>.small>a,.panel-title>a,.panel-title>small,.panel-title>small>a{color:inherit}.list-group-item-heading{margin-top:0;margin-bottom:5px}.list-group-item-text{margin-bottom:0;line-height:1.3}.panel{margin-bottom:20px;background-color:#fff;border:1px solid transparent;border-radius:4px;-webkit-box-shadow:0 1px 1px rgba(0,0,0,.05);box-shadow:0 1px 1px rgba(0,0,0,.05)}.panel-title,.panel>.list-group,.panel>.panel-collapse>.list-group,.panel>.panel-collapse>.table,.panel>.table,.panel>.table-responsive>.table{margin-bottom:0}.panel-body{padding:15px}.panel-heading{padding:10px 15px;border-bottom:1px solid transparent;border-top-left-radius:3px;border-top-right-radius:3px}.panel-title{margin-top:0;font-size:16px}.panel-footer{padding:10px 15px;background-color:#f5f5f5;border-top:1px solid #ddd;border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.list-group .list-group-item,.panel>.panel-collapse>.list-group .list-group-item{border-width:1px 0;border-radius:0}.panel-group .panel-heading,.panel>.table-bordered>tbody>tr:first-child>td,.panel>.table-bordered>tbody>tr:first-child>th,.panel>.table-bordered>tbody>tr:last-child>td,.panel>.table-bordered>tbody>tr:last-child>th,.panel>.table-bordered>tfoot>tr:last-child>td,.panel>.table-bordered>tfoot>tr:last-child>th,.panel>.table-bordered>thead>tr:first-child>td,.panel>.table-bordered>thead>tr:first-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>th,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>td,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>th,.panel>.table-responsive>.table-bordered>thead>tr:first-child>td,.panel>.table-responsive>.table-bordered>thead>tr:first-child>th{border-bottom:0}.panel>.list-group:first-child .list-group-item:first-child,.panel>.panel-collapse>.list-group:first-child .list-group-item:first-child{border-top:0;border-top-left-radius:3px;border-top-right-radius:3px}.panel>.list-group:last-child .list-group-item:last-child,.panel>.panel-collapse>.list-group:last-child .list-group-item:last-child{border-bottom:0;border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.panel-heading+.panel-collapse>.list-group .list-group-item:first-child{border-top-left-radius:0;border-top-right-radius:0}.list-group+.panel-footer,.panel-heading+.list-group .list-group-item:first-child{border-top-width:0}.panel>.panel-collapse>.table caption,.panel>.table caption,.panel>.table-responsive>.table caption{padding-right:15px;padding-left:15px}.panel>.table-responsive:first-child>.table:first-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child,.panel>.table:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child,.panel>.table:first-child>thead:first-child>tr:first-child{border-top-left-radius:3px;border-top-right-radius:3px}.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:first-child,.panel>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table:first-child>thead:first-child>tr:first-child th:first-child{border-top-left-radius:3px}.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:last-child,.panel>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table:first-child>thead:first-child>tr:first-child th:last-child{border-top-right-radius:3px}.panel>.table-responsive:last-child>.table:last-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child,.panel>.table:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child{border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:first-child,.panel>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:first-child{border-bottom-left-radius:3px}.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:last-child{border-bottom-right-radius:3px}.panel>.panel-body+.table,.panel>.panel-body+.table-responsive,.panel>.table+.panel-body,.panel>.table-responsive+.panel-body{border-top:1px solid #ddd}.panel>.table>tbody:first-child>tr:first-child td,.panel>.table>tbody:first-child>tr:first-child th{border-top:0}.panel>.table-bordered,.panel>.table-responsive>.table-bordered{border:0}.panel>.table-bordered>tbody>tr>td:first-child,.panel>.table-bordered>tbody>tr>th:first-child,.panel>.table-bordered>tfoot>tr>td:first-child,.panel>.table-bordered>tfoot>tr>th:first-child,.panel>.table-bordered>thead>tr>td:first-child,.panel>.table-bordered>thead>tr>th:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:first-child,.panel>.table-responsive>.table-bordered>thead>tr>td:first-child,.panel>.table-responsive>.table-bordered>thead>tr>th:first-child{border-left:0}.panel>.table-bordered>tbody>tr>td:last-child,.panel>.table-bordered>tbody>tr>th:last-child,.panel>.table-bordered>tfoot>tr>td:last-child,.panel>.table-bordered>tfoot>tr>th:last-child,.panel>.table-bordered>thead>tr>td:last-child,.panel>.table-bordered>thead>tr>th:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:last-child,.panel>.table-responsive>.table-bordered>thead>tr>td:last-child,.panel>.table-responsive>.table-bordered>thead>tr>th:last-child{border-right:0}.panel>.table-responsive{margin-bottom:0;border:0}.panel-group{margin-bottom:20px}.panel-group .panel{margin-bottom:0;border-radius:4px}.panel-group .panel+.panel{margin-top:5px}.panel-group .panel-heading+.panel-collapse>.list-group,.panel-group .panel-heading+.panel-collapse>.panel-body{border-top:1px solid #ddd}.panel-group .panel-footer{border-top:0}.panel-group .panel-footer+.panel-collapse .panel-body{border-bottom:1px solid #ddd}.panel-default{border-color:#ddd}.panel-default>.panel-heading{color:#333;background-color:#f5f5f5;border-color:#ddd}.panel-default>.panel-heading+.panel-collapse>.panel-body{border-top-color:#ddd}.panel-default>.panel-heading .badge{color:#f5f5f5;background-color:#333}.panel-default>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#ddd}.panel-primary{border-color:#337ab7}.panel-primary>.panel-heading{color:#fff;background-color:#337ab7;border-color:#337ab7}.panel-primary>.panel-heading+.panel-collapse>.panel-body{border-top-color:#337ab7}.panel-primary>.panel-heading .badge{color:#337ab7;background-color:#fff}.panel-primary>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#337ab7}.panel-success{border-color:#d6e9c6}.panel-success>.panel-heading{color:#3c763d;background-color:#dff0d8;border-color:#d6e9c6}.panel-success>.panel-heading+.panel-collapse>.panel-body{border-top-color:#d6e9c6}.panel-success>.panel-heading .badge{color:#dff0d8;background-color:#3c763d}.panel-success>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#d6e9c6}.panel-info{border-color:#bce8f1}.panel-info>.panel-heading{color:#31708f;background-color:#d9edf7;border-color:#bce8f1}.panel-info>.panel-heading+.panel-collapse>.panel-body{border-top-color:#bce8f1}.panel-info>.panel-heading .badge{color:#d9edf7;background-color:#31708f}.panel-info>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#bce8f1}.panel-warning{border-color:#faebcc}.panel-warning>.panel-heading{color:#8a6d3b;background-color:#fcf8e3;border-color:#faebcc}.panel-warning>.panel-heading+.panel-collapse>.panel-body{border-top-color:#faebcc}.panel-warning>.panel-heading .badge{color:#fcf8e3;background-color:#8a6d3b}.panel-warning>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#faebcc}.panel-danger{border-color:#ebccd1}.panel-danger>.panel-heading{color:#a94442;background-color:#f2dede;border-color:#ebccd1}.panel-danger>.panel-heading+.panel-collapse>.panel-body{border-top-color:#ebccd1}.panel-danger>.panel-heading .badge{color:#f2dede;background-color:#a94442}.panel-danger>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#ebccd1}.embed-responsive{position:relative;display:block;height:0;padding:0}.embed-responsive .embed-responsive-item,.embed-responsive embed,.embed-responsive iframe,.embed-responsive object,.embed-responsive video{position:absolute;top:0;bottom:0;left:0;width:100%;height:100%;border:0}.embed-responsive-16by9{padding-bottom:56.25%}.embed-responsive-4by3{padding-bottom:75%}.well{min-height:20px;padding:19px;margin-bottom:20px;background-color:#f5f5f5;border:1px solid #e3e3e3;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.05);box-shadow:inset 0 1px 1px rgba(0,0,0,.05)}.well blockquote{border-color:#ddd;border-color:rgba(0,0,0,.15)}.well-lg{padding:24px;border-radius:6px}.well-sm{padding:9px;border-radius:3px}.close{float:right;font-size:21px;font-weight:700;line-height:1;color:#000;text-shadow:0 1px 0 #fff;filter:alpha(opacity=20);opacity:.2}.popover,.tooltip{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-style:normal;font-weight:400;line-height:1.42857143;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;word-wrap:normal;line-break:auto;text-decoration:none;white-space:normal}.close:focus,.close:hover{color:#000;text-decoration:none;cursor:pointer;filter:alpha(opacity=50);opacity:.5}button.close{-webkit-appearance:none;padding:0;cursor:pointer;background:0 0;border:0}.modal{position:fixed;z-index:1050;display:none;-webkit-overflow-scrolling:touch;outline:0}.modal.fade .modal-dialog{-webkit-transition:-webkit-transform .3s ease-out;-o-transition:-o-transform .3s ease-out;transition:transform .3s ease-out;-webkit-transform:translate(0,-25%);-ms-transform:translate(0,-25%);-o-transform:translate(0,-25%);transform:translate(0,-25%)}.modal.in .modal-dialog{-webkit-transform:translate(0,0);-ms-transform:translate(0,0);-o-transform:translate(0,0);transform:translate(0,0)}.modal-open .modal{overflow-x:hidden;overflow-y:auto}.modal-dialog{position:relative;width:auto;margin:10px}.modal-content{position:relative;background-color:#fff;background-clip:padding-box;border:1px solid #999;border:1px solid rgba(0,0,0,.2);border-radius:6px;outline:0;-webkit-box-shadow:0 3px 9px rgba(0,0,0,.5);box-shadow:0 3px 9px rgba(0,0,0,.5)}.modal-backdrop{position:fixed;z-index:1040;background-color:#000}.modal-backdrop.fade{filter:alpha(opacity=0);opacity:0}.carousel-control,.modal-backdrop.in{filter:alpha(opacity=50);opacity:.5}.modal-header{padding:15px;border-bottom:1px solid #e5e5e5}.modal-header .close{margin-top:-2px}.modal-title{margin:0;line-height:1.42857143}.modal-body{position:relative;padding:15px}.modal-footer{padding:15px;text-align:right;border-top:1px solid #e5e5e5}.modal-footer .btn+.btn{margin-bottom:0;margin-left:5px}.modal-footer .btn-group .btn+.btn{margin-left:-1px}.modal-footer .btn-block+.btn-block{margin-left:0}.modal-scrollbar-measure{position:absolute;top:-9999px;width:50px;height:50px;overflow:scroll}@media (min-width:768px){.modal-dialog{width:600px;margin:30px auto}.modal-content{-webkit-box-shadow:0 5px 15px rgba(0,0,0,.5);box-shadow:0 5px 15px rgba(0,0,0,.5)}.modal-sm{width:300px}}.tooltip.top-left .tooltip-arrow,.tooltip.top-right .tooltip-arrow{bottom:0;margin-bottom:-5px;border-width:5px 5px 0;border-top-color:#000}@media (min-width:992px){.modal-lg{width:900px}}.tooltip{position:absolute;z-index:1070;display:block;font-size:12px;text-align:left;text-align:start;filter:alpha(opacity=0);opacity:0}.tooltip.in{filter:alpha(opacity=90);opacity:.9}.tooltip.top{padding:5px 0;margin-top:-3px}.tooltip.right{padding:0 5px;margin-left:3px}.tooltip.bottom{padding:5px 0;margin-top:3px}.tooltip.left{padding:0 5px;margin-left:-3px}.tooltip-inner{max-width:200px;padding:3px 8px;color:#fff;text-align:center;background-color:#000;border-radius:4px}.tooltip-arrow{position:absolute;width:0;height:0;border-color:transparent;border-style:solid}.tooltip.top .tooltip-arrow{bottom:0;left:50%;margin-left:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.top-left .tooltip-arrow{right:5px}.tooltip.top-right .tooltip-arrow{left:5px}.tooltip.right .tooltip-arrow{top:50%;left:0;margin-top:-5px;border-width:5px 5px 5px 0;border-right-color:#000}.tooltip.left .tooltip-arrow{top:50%;right:0;margin-top:-5px;border-width:5px 0 5px 5px;border-left-color:#000}.tooltip.bottom .tooltip-arrow,.tooltip.bottom-left .tooltip-arrow,.tooltip.bottom-right .tooltip-arrow{border-width:0 5px 5px;border-bottom-color:#000;top:0}.tooltip.bottom .tooltip-arrow{left:50%;margin-left:-5px}.tooltip.bottom-left .tooltip-arrow{right:5px;margin-top:-5px}.tooltip.bottom-right .tooltip-arrow{left:5px;margin-top:-5px}.popover{position:absolute;top:0;left:0;z-index:1060;display:none;max-width:276px;padding:1px;font-size:14px;text-align:left;text-align:start;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #ccc;border:1px solid rgba(0,0,0,.2);border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0,0,0,.2);box-shadow:0 5px 10px rgba(0,0,0,.2)}.carousel-caption,.carousel-control{color:#fff;text-shadow:0 1px 2px rgba(0,0,0,.6);text-align:center}.popover.top{margin-top:-10px}.popover.right{margin-left:10px}.popover.bottom{margin-top:10px}.popover.left{margin-left:-10px}.popover-title{padding:8px 14px;margin:0;font-size:14px;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;border-radius:5px 5px 0 0}.popover-content{padding:5px 5px}.popover>.arrow,.popover>.arrow:after{position:absolute;display:block;width:0;height:0;border-color:transparent;border-style:solid}.carousel,.carousel-inner{position:relative}.popover>.arrow{border-width:11px}.popover>.arrow:after{content:"";border-width:10px}.popover.top>.arrow{bottom:-11px;left:50%;margin-left:-11px;border-top-color:#999;border-top-color:rgba(0,0,0,.25);border-bottom-width:0}.popover.top>.arrow:after{bottom:1px;margin-left:-10px;content:" ";border-top-color:#fff;border-bottom-width:0}.popover.left>.arrow:after,.popover.right>.arrow:after{bottom:-10px;content:" "}.popover.right>.arrow{top:50%;left:-11px;margin-top:-11px;border-right-color:#999;border-right-color:rgba(0,0,0,.25);border-left-width:0}.popover.right>.arrow:after{left:1px;border-right-color:#fff;border-left-width:0}.popover.bottom>.arrow{top:-11px;left:50%;margin-left:-11px;border-top-width:0;border-bottom-color:#999;border-bottom-color:rgba(0,0,0,.25)}.popover.bottom>.arrow:after{top:1px;margin-left:-10px;content:" ";border-top-width:0;border-bottom-color:#fff}.popover.left>.arrow{top:50%;right:-11px;margin-top:-11px;border-right-width:0;border-left-color:#999;border-left-color:rgba(0,0,0,.25)}.popover.left>.arrow:after{right:1px;border-right-width:0;border-left-color:#fff}.carousel-inner{width:100%;overflow:hidden}.carousel-inner>.item{position:relative;display:none;-webkit-transition:.6s ease-in-out left;-o-transition:.6s ease-in-out left;transition:.6s ease-in-out left}.carousel-inner>.item>a>img,.carousel-inner>.item>img{line-height:1}@media all and (transform-3d),(-webkit-transform-3d){.carousel-inner>.item{-webkit-transition:-webkit-transform .6s ease-in-out;-o-transition:-o-transform .6s ease-in-out;transition:transform .6s ease-in-out;-webkit-backface-visibility:hidden;backface-visibility:hidden;-webkit-perspective:1000px;perspective:1000px}.carousel-inner>.item.active.right,.carousel-inner>.item.next{left:0;-webkit-transform:translate3d(100%,0,0);transform:translate3d(100%,0,0)}.carousel-inner>.item.active.left,.carousel-inner>.item.prev{left:0;-webkit-transform:translate3d(-100%,0,0);transform:translate3d(-100%,0,0)}.carousel-inner>.item.active,.carousel-inner>.item.next.left,.carousel-inner>.item.prev.right{left:0;-webkit-transform:translate3d(0,0,0);transform:translate3d(0,0,0)}}.carousel-inner>.active,.carousel-inner>.next,.carousel-inner>.prev{display:block}.carousel-inner>.active{left:0}.carousel-inner>.next,.carousel-inner>.prev{position:absolute;top:0;width:100%}.carousel-inner>.next{left:100%}.carousel-inner>.prev{left:-100%}.carousel-inner>.next.left,.carousel-inner>.prev.right{left:0}.carousel-inner>.active.left{left:-100%}.carousel-inner>.active.right{left:100%}.carousel-control{position:absolute;top:0;bottom:0;left:0;width:15%;font-size:20px;background-color:rgba(0,0,0,0)}.carousel-control.left{background-image:-webkit-linear-gradient(left,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);background-image:-o-linear-gradient(left,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);background-image:-webkit-gradient(linear,left top,right top,from(rgba(0,0,0,.5)),to(rgba(0,0,0,.0001)));background-image:linear-gradient(to right,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#80000000', endColorstr='#00000000', GradientType=1);background-repeat:repeat-x}.carousel-control.right{right:0;left:auto;background-image:-webkit-linear-gradient(left,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);background-image:-o-linear-gradient(left,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);background-image:-webkit-gradient(linear,left top,right top,from(rgba(0,0,0,.0001)),to(rgba(0,0,0,.5)));background-image:linear-gradient(to right,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#00000000', endColorstr='#80000000', GradientType=1);background-repeat:repeat-x}.carousel-control:focus,.carousel-control:hover{color:#fff;text-decoration:none;filter:alpha(opacity=90);outline:0;opacity:.9}.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next,.carousel-control .icon-prev{position:absolute;top:50%;z-index:5;display:inline-block;margin-top:-10px}.carousel-control .glyphicon-chevron-left,.carousel-control .icon-prev{left:50%;margin-left:-10px}.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next{right:50%;margin-right:-10px}.carousel-control .icon-next,.carousel-control .icon-prev{width:20px;height:20px;font-family:serif;line-height:1}.carousel-control .icon-prev:before{content:'\2039'}.carousel-control .icon-next:before{content:'\203a'}.carousel-indicators{position:absolute;bottom:10px;left:50%;z-index:15;width:60%;padding-left:0;margin-left:-30%;text-align:center;list-style:none}.carousel-indicators li{display:inline-block;width:10px;height:10px;margin:1px;text-indent:-999px;cursor:pointer;background-color:#000\9;background-color:rgba(0,0,0,0);border:1px solid #fff;border-radius:10px}.carousel-indicators .active{width:12px;height:12px;margin:0;background-color:#fff}.carousel-caption{position:absolute;right:15%;bottom:20px;left:15%;z-index:10;padding-top:20px;padding-bottom:20px}.carousel-caption .btn,.text-hide{text-shadow:none}@media screen and (min-width:768px){.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next,.carousel-control .icon-prev{width:30px;height:30px;margin-top:-10px;font-size:30px}.carousel-control .glyphicon-chevron-left,.carousel-control .icon-prev{margin-left:-10px}.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next{margin-right:-10px}.carousel-caption{right:20%;left:20%;padding-bottom:30px}.carousel-indicators{bottom:20px}}.btn-group-vertical>.btn-group:after,.btn-group-vertical>.btn-group:before,.btn-toolbar:after,.btn-toolbar:before,.clearfix:after,.clearfix:before,.container-fluid:after,.container-fluid:before,.container:after,.container:before,.dl-horizontal dd:after,.dl-horizontal dd:before,.form-horizontal .form-group:after,.form-horizontal .form-group:before,.modal-footer:after,.modal-footer:before,.modal-header:after,.modal-header:before,.nav:after,.nav:before,.navbar-collapse:after,.navbar-collapse:before,.navbar-header:after,.navbar-header:before,.navbar:after,.navbar:before,.pager:after,.pager:before,.panel-body:after,.panel-body:before,.row:after,.row:before{display:table;content:" "}.btn-group-vertical>.btn-group:after,.btn-toolbar:after,.clearfix:after,.container-fluid:after,.container:after,.dl-horizontal dd:after,.form-horizontal .form-group:after,.modal-footer:after,.modal-header:after,.nav:after,.navbar-collapse:after,.navbar-header:after,.navbar:after,.pager:after,.panel-body:after,.row:after{clear:both}.center-block{display:block;margin-right:auto;margin-left:auto}.pull-right{float:right!important}.pull-left{float:left!important}.hide{display:none!important}.show{display:block!important}.hidden,.visible-lg,.visible-lg-block,.visible-lg-inline,.visible-lg-inline-block,.visible-md,.visible-md-block,.visible-md-inline,.visible-md-inline-block,.visible-sm,.visible-sm-block,.visible-sm-inline,.visible-sm-inline-block,.visible-xs,.visible-xs-block,.visible-xs-inline,.visible-xs-inline-block{display:none!important}.invisible{visibility:hidden}.text-hide{font:0/0 a;color:transparent;background-color:transparent;border:0}div.token-input-dropdown,ul.token-input-list{overflow:hidden;font-size:12px;font-family:Verdana,sans-serif}.affix{position:fixed}@-ms-viewport{width:device-width}@media (max-width:767px){.visible-xs{display:block!important}table.visible-xs{display:table!important}tr.visible-xs{display:table-row!important}td.visible-xs,th.visible-xs{display:table-cell!important}.visible-xs-block{display:block!important}.visible-xs-inline{display:inline!important}.visible-xs-inline-block{display:inline-block!important}}@media (min-width:768px) and (max-width:991px){.visible-sm{display:block!important}table.visible-sm{display:table!important}tr.visible-sm{display:table-row!important}td.visible-sm,th.visible-sm{display:table-cell!important}.visible-sm-block{display:block!important}.visible-sm-inline{display:inline!important}.visible-sm-inline-block{display:inline-block!important}}@media (min-width:992px) and (max-width:1199px){.visible-md{display:block!important}table.visible-md{display:table!important}tr.visible-md{display:table-row!important}td.visible-md,th.visible-md{display:table-cell!important}.visible-md-block{display:block!important}.visible-md-inline{display:inline!important}.visible-md-inline-block{display:inline-block!important}}@media (min-width:1200px){.visible-lg{display:block!important}table.visible-lg{display:table!important}tr.visible-lg{display:table-row!important}td.visible-lg,th.visible-lg{display:table-cell!important}.visible-lg-block{display:block!important}.visible-lg-inline{display:inline!important}.visible-lg-inline-block{display:inline-block!important}.hidden-lg{display:none!important}}@media (max-width:767px){.hidden-xs{display:none!important}}@media (min-width:768px) and (max-width:991px){.hidden-sm{display:none!important}}@media (min-width:992px) and (max-width:1199px){.hidden-md{display:none!important}}.visible-print{display:none!important}@media print{.visible-print{display:block!important}table.visible-print{display:table!important}tr.visible-print{display:table-row!important}td.visible-print,th.visible-print{display:table-cell!important}}.visible-print-block{display:none!important}@media print{.visible-print-block{display:block!important}}.visible-print-inline{display:none!important}@media print{.visible-print-inline{display:inline!important}}.visible-print-inline-block{display:none!important}@media print{.visible-print-inline-block{display:inline-block!important}.hidden-print{display:none!important}}ul.token-input-list{height:auto!important;height:1%;width:400px;border:1px solid #999;cursor:text;z-index:999;margin:0;padding:0;background-color:#fff;list-style-type:none;clear:left}ul.token-input-list li{list-style-type:none}ul.token-input-list li input{border:0;width:350px;padding:3px 8px;background-color:#fff;-webkit-appearance:caret}ul.token-input-disabled,ul.token-input-disabled li input{background-color:#E8E8E8}ul.token-input-disabled li.token-input-token{background-color:#D9E3CA;color:#7D7D7D}ul.token-input-disabled li.token-input-token span{color:#CFCFCF;cursor:default}li.token-input-token{overflow:hidden;height:auto!important;height:1%;margin:3px;padding:3px 5px;background-color:#d0efa0;color:#000;font-weight:700;cursor:default;display:block}li.token-input-token p{float:left;padding:0;margin:0}li.token-input-token span{float:right;color:#777;cursor:pointer}li.token-input-selected-token{background-color:#08844e;color:#fff}li.token-input-selected-token span{color:#bbb}div.token-input-dropdown{position:absolute;width:400px;background-color:#fff;border-left:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc;cursor:default;z-index:1}div.token-input-dropdown p{margin:0;padding:5px;font-weight:700;color:#777}div.token-input-dropdown ul{margin:0;padding:0}div.token-input-dropdown ul li{background-color:#fff;padding:3px;list-style-type:none}div.token-input-dropdown ul li.token-input-dropdown-item{background-color:#fafafa}div.token-input-dropdown ul li.token-input-dropdown-item2{background-color:#fff}div.token-input-dropdown ul li em{font-weight:700;font-style:normal}div.token-input-dropdown ul li.token-input-selected-dropdown-item{background-color:#d0efa0}@font-face{font-family:'Open Sans';font-style:normal;font-weight:300;src:local('Open Sans Light'),local('OpenSans-Light'),url(1ORHCpsQm3Vp6mXoaTYnF5uFdDttMLvmWuJdhhgs.ttf) format('truetype')}@font-face{font-family:'Open Sans';font-style:normal;font-weight:400;src:local('Open Sans'),local('OpenSans'),url(KeOuBrn4kERxqtaUH3aCWcynf_cDxXwCLxiixG1c.ttf) format('truetype')}@font-face{font-family:'Open Sans';font-style:normal;font-weight:600;src:local('Open Sans Semibold'),local('OpenSans-Semibold'),url(_ySUJH_bn48VBG8sNSonF5uFdDttMLvmWuJdhhgs.ttf) format('truetype')}@font-face{font-family:'Open Sans';font-style:normal;font-weight:700;src:local('Open Sans Bold'),local('OpenSans-Bold'),url(702ZOKiLJc3WVjuplzInF5uFdDttMLvmWuJdhhgs.ttf) format('truetype')}@font-face{font-family:'Open Sans';font-style:normal;font-weight:800;src:local('Open Sans Extrabold'),local('OpenSans-Extrabold'),url(bV5DfGHOiMmvb1Xr-honF5uFdDttMLvmWuJdhhgs.ttf) format('truetype')}@font-face{font-family:'Open Sans';font-style:italic;font-weight:300;src:local('Open Sans Light Italic'),local('OpenSansLight-Italic'),url(iXeptR36kaC0GEAetxrfB31yxOzP-czbf6AAKCVo.ttf) format('truetype')}@font-face{font-family:'Open Sans';font-style:italic;font-weight:400;src:local('Open Sans Italic'),local('OpenSans-Italic'),url(JXh38I15wypJXxuGMBp0EAVxt0G0biEntp43Qt6E.ttf) format('truetype')}@font-face{font-family:'Open Sans';font-style:italic;font-weight:600;src:local('Open Sans Semibold Italic'),local('OpenSans-SemiboldItalic'),url(iXeptR36kaC0GEAetxi8cqLH4MEiSE0ROcU-qHOA.ttf) format('truetype')}@font-face{font-family:'Open Sans';font-style:italic;font-weight:700;src:local('Open Sans Bold Italic'),local('OpenSans-BoldItalic'),url(iXeptR36kaC0GEAetxp_TkvowlIOtbR7ePgFOpF4.ttf) format('truetype')}@font-face{font-family:'Open Sans';font-style:italic;font-weight:800;src:local('Open Sans Extrabold Italic'),local('OpenSans-ExtraboldItalic'),url(iXeptR36kaC0GEAetxlDMrAYtoOisqqMDW9M_Mqc.ttf) format('truetype')}.ui-pnotify{top:25px;right:25px;position:absolute;height:auto;z-index:9999}html>body>.ui-pnotify{position:fixed}.ui-pnotify .ui-pnotify-shadow{-webkit-box-shadow:0 2px 10px rgba(50,50,50,.5);-moz-box-shadow:0 2px 10px rgba(50,50,50,.5);box-shadow:0 2px 10px rgba(50,50,50,.5)}.ui-pnotify-container{background-position:0 0;padding:.8em;height:100%;margin:0}.ui-pnotify-container.ui-pnotify-sharp{-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.ui-pnotify-title{display:block;margin-bottom:.4em;margin-top:0}.ui-pnotify-text{display:block}.ui-pnotify-icon,.ui-pnotify-icon span{display:block;float:left;margin-right:.2em}.ui-pnotify.stack-bottomleft,.ui-pnotify.stack-topleft{left:25px;right:auto}.ui-pnotify.stack-bottomleft,.ui-pnotify.stack-bottomright{bottom:25px;top:auto}.ui-pnotify-closer,.ui-pnotify-sticker{float:right;margin-left:.2em}.ui-pnotify-history-container{position:absolute;top:0;right:18px;width:70px;border-top:none;padding:0;-webkit-border-top-left-radius:0;-moz-border-top-left-radius:0;border-top-left-radius:0;-webkit-border-top-right-radius:0;-moz-border-top-right-radius:0;border-top-right-radius:0;z-index:10000}.ui-pnotify-history-container.ui-pnotify-history-fixed{position:fixed}.ui-pnotify-history-container .ui-pnotify-history-header{padding:2px;text-align:center}.ui-pnotify-history-container button{cursor:pointer;display:block;width:100%}.ui-pnotify-history-container .ui-pnotify-history-pulldown{display:block;margin:0 auto}.tablesorter-blue{width:100%;background-color:#fff;margin:10px 0 15px;text-align:left;border-spacing:0;border:1px solid #cdcdcd;border-width:1px 0 0 1px}.tablesorter-blue td,.tablesorter-blue th{border:1px solid #cdcdcd;border-width:0 1px 1px 0}.tablesorter-blue th,.tablesorter-blue thead td{font:12px/18px Arial,Sans-serif;font-weight:700;color:#000;background-color:#99bfe6;border-collapse:collapse;padding:4px;text-shadow:0 1px 0 rgba(204,204,204,.7)}.tablesorter-blue tbody td,.tablesorter-blue tfoot td,.tablesorter-blue tfoot th{padding:4px;vertical-align:top}.tablesorter-blue .header,.tablesorter-blue .tablesorter-header{background-image:url(data:image/gif;base64,R0lGODlhFQAJAIAAACMtMP///yH5BAEAAAEALAAAAAAVAAkAAAIXjI+AywnaYnhUMoqt3gZXPmVg94yJVQAAOw==);background-repeat:no-repeat;background-position:center right;padding:4px 18px 4px 4px;white-space:normal;cursor:pointer}.tablesorter-blue .headerSortUp,.tablesorter-blue .tablesorter-headerAsc,.tablesorter-blue .tablesorter-headerSortUp{background-color:#9fbfdf;background-image:url(data:image/gif;base64,R0lGODlhFQAEAIAAACMtMP///yH5BAEAAAEALAAAAAAVAAQAAAINjI8Bya2wnINUMopZAQA7)}.tablesorter-blue .headerSortDown,.tablesorter-blue .tablesorter-headerDesc,.tablesorter-blue .tablesorter-headerSortDown{background-color:#8cb3d9;background-image:url(data:image/gif;base64,R0lGODlhFQAEAIAAACMtMP///yH5BAEAAAEALAAAAAAVAAQAAAINjB+gC+jP2ptn0WskLQA7)}.tablesorter-blue thead .sorter-false{background-image:none;cursor:default;padding:4px}.tablesorter-blue tfoot .tablesorter-headerAsc,.tablesorter-blue tfoot .tablesorter-headerDesc,.tablesorter-blue tfoot .tablesorter-headerSortDown,.tablesorter-blue tfoot .tablesorter-headerSortUp{background-image:none}.tablesorter-blue td{color:#3d3d3d;background-color:#fff;padding:4px;vertical-align:top}.tablesorter-blue tbody>tr.even.hover>td,.tablesorter-blue tbody>tr.even:hover+tr.tablesorter-childRow+tr.tablesorter-childRow>td,.tablesorter-blue tbody>tr.even:hover+tr.tablesorter-childRow>td,.tablesorter-blue tbody>tr.even:hover>td,.tablesorter-blue tbody>tr.hover>td,.tablesorter-blue tbody>tr:hover+tr.tablesorter-childRow+tr.tablesorter-childRow>td,.tablesorter-blue tbody>tr:hover+tr.tablesorter-childRow>td,.tablesorter-blue tbody>tr:hover>td{background-color:#d9d9d9}.tablesorter-blue tbody>tr.odd.hover>td,.tablesorter-blue tbody>tr.odd:hover+tr.tablesorter-childRow+tr.tablesorter-childRow>td,.tablesorter-blue tbody>tr.odd:hover+tr.tablesorter-childRow>td,.tablesorter-blue tbody>tr.odd:hover>td{background-color:#bfbfbf}.tablesorter-blue .tablesorter-processing{background-position:center center!important;background-repeat:no-repeat!important;background-image:url(data:image/gif;base64,R0lGODlhFAAUAKEAAO7u7lpaWgAAAAAAACH/C05FVFNDQVBFMi4wAwEAAAAh+QQBCgACACwAAAAAFAAUAAACQZRvoIDtu1wLQUAlqKTVxqwhXIiBnDg6Y4eyx4lKW5XK7wrLeK3vbq8J2W4T4e1nMhpWrZCTt3xKZ8kgsggdJmUFACH5BAEKAAIALAcAAAALAAcAAAIUVB6ii7jajgCAuUmtovxtXnmdUAAAIfkEAQoAAgAsDQACAAcACwAAAhRUIpmHy/3gUVQAQO9NetuugCFWAAAh+QQBCgACACwNAAcABwALAAACE5QVcZjKbVo6ck2AF95m5/6BSwEAIfkEAQoAAgAsBwANAAsABwAAAhOUH3kr6QaAcSrGWe1VQl+mMUIBACH5BAEKAAIALAIADQALAAcAAAIUlICmh7ncTAgqijkruDiv7n2YUAAAIfkEAQoAAgAsAAAHAAcACwAAAhQUIGmHyedehIoqFXLKfPOAaZdWAAAh+QQFCgACACwAAAIABwALAAACFJQFcJiXb15zLYRl7cla8OtlGGgUADs=)!important}.tablesorter-blue tbody tr.odd>td{background-color:#ebf2fa}.tablesorter-blue tbody tr.even>td{background-color:#fff}.tablesorter-blue td.primary,.tablesorter-blue tr.odd td.primary{background-color:#99b3e6}.tablesorter-blue td.secondary,.tablesorter-blue tr.even td.primary,.tablesorter-blue tr.odd td.secondary{background-color:#c2d1f0}.tablesorter-blue td.tertiary,.tablesorter-blue tr.even td.secondary,.tablesorter-blue tr.odd td.tertiary{background-color:#d6e0f5}.tablesorter-blue tr.even td.tertiary{background-color:#ebf0fa}caption{background-color:#fff}.tablesorter-blue .tablesorter-filter-row{background-color:#eee}.tablesorter-blue .tablesorter-filter-row td{background-color:#eee;line-height:normal;text-align:center;-webkit-transition:line-height .1s ease;-moz-transition:line-height .1s ease;-o-transition:line-height .1s ease;transition:line-height .1s ease}.tablesorter-blue .tablesorter-filter-row .disabled{opacity:.5;filter:alpha(opacity=50);cursor:not-allowed}.tablesorter-blue .tablesorter-filter-row.hideme td{padding:2px;margin:0;line-height:0;cursor:pointer}.tablesorter-blue .tablesorter-filter-row.hideme *{height:1px;min-height:0;border:0;padding:0;margin:0;opacity:0;filter:alpha(opacity=0)}.tablesorter-blue input.tablesorter-filter,.tablesorter-blue select.tablesorter-filter{width:98%;height:auto;margin:0;padding:4px;background-color:#fff;border:1px solid #bbb;color:#333;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;-webkit-transition:height .1s ease;-moz-transition:height .1s ease;-o-transition:height .1s ease;transition:height .1s ease}.tablesorter .filtered{display:none}.tablesorter .tablesorter-errorRow td{text-align:center;cursor:pointer;background-color:#e6bf99} + *//*! normalize.css v3.0.3 | MIT License | github.com/necolas/normalize.css */.label,sub,sup{vertical-align:baseline}.btn,.btn-group,.btn-group-vertical,.caret,.checkbox-inline,.radio-inline,img{vertical-align:middle}hr,img{border:0}body,figure{margin:0}.btn-group>.btn-group,.btn-toolbar .btn,.btn-toolbar .btn-group,.btn-toolbar .input-group,.col-xs-1,.col-xs-10,.col-xs-11,.col-xs-12,.col-xs-2,.col-xs-3,.col-xs-4,.col-xs-5,.col-xs-6,.col-xs-7,.col-xs-8,.col-xs-9,.dropdown-menu{float:left}.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse,.pre-scrollable{max-height:340px}html{font-family:sans-serif;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}article,aside,details,figcaption,figure,footer,header,hgroup,main,menu,nav,section,summary{display:block}audio,canvas,progress,video{display:inline-block;vertical-align:baseline}audio:not([controls]){display:none;height:0}[hidden],template{display:none}a{background-color:transparent}a:active,a:hover{outline:0}b,optgroup,strong{font-weight:700}dfn{font-style:italic}h1{margin:.67em 0}mark{color:#000;background:#ff0}sub,sup{position:relative;font-size:75%;line-height:0}sup{top:-.5em}sub{bottom:-.25em}svg:not(:root){overflow:hidden}hr{height:0;-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box}pre,textarea{overflow:auto}code,kbd,pre,samp{font-size:1em}button,input,optgroup,select,textarea{margin:0;font:inherit;color:inherit}.glyphicon,address{font-style:normal}button{overflow:visible}button,select{text-transform:none}button,html input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer}button[disabled],html input[disabled]{cursor:default}button::-moz-focus-inner,input::-moz-focus-inner{padding:0;border:0}input[type=checkbox],input[type=radio]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:0}input[type=number]::-webkit-inner-spin-button,input[type=number]::-webkit-outer-spin-button{height:auto}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}table{border-spacing:0;border-collapse:collapse}td,th{padding:0}/*! Source: https://github.com/h5bp/html5-boilerplate/blob/master/src/css/main.css */@media print{blockquote,img,pre,tr{page-break-inside:avoid}*,:after,:before{color:#000!important;text-shadow:none!important;background:0 0!important;-webkit-box-shadow:none!important;box-shadow:none!important}a,a:visited{text-decoration:underline}a[href]:after{content:" (" attr(href) ")"}abbr[title]:after{content:" (" attr(title) ")"}a[href^="javascript:"]:after,a[href^="#"]:after{content:""}blockquote,pre{border:1px solid #999}thead{display:table-header-group}img{max-width:100%!important}h2,h3,p{orphans:3;widows:3}h2,h3{page-break-after:avoid}.navbar{display:none}.btn>.caret,.dropup>.btn>.caret{border-top-color:#000!important}.label{border:1px solid #000}.table{border-collapse:collapse!important}.table td,.table th{background-color:#fff!important}.table-bordered td,.table-bordered th{border:1px solid #ddd!important}}.dropdown-menu,.modal-content{-webkit-background-clip:padding-box}.btn,.btn-danger.active,.btn-danger:active,.btn-default.active,.btn-default:active,.btn-info.active,.btn-info:active,.btn-primary.active,.btn-primary:active,.btn-warning.active,.btn-warning:active,.btn.active,.btn:active,.dropdown-menu>.disabled>a:focus,.dropdown-menu>.disabled>a:hover,.form-control,.navbar-toggle,.open>.dropdown-toggle.btn-danger,.open>.dropdown-toggle.btn-default,.open>.dropdown-toggle.btn-info,.open>.dropdown-toggle.btn-primary,.open>.dropdown-toggle.btn-warning{background-image:none}.img-thumbnail,body{background-color:#fff}@font-face{font-family:'Glyphicons Halflings';src:url(../fonts/glyphicons-halflings-regular.eot);src:url(../fonts/glyphicons-halflings-regular.eot?#iefix) format('embedded-opentype'),url(../fonts/glyphicons-halflings-regular.woff2) format('woff2'),url(../fonts/glyphicons-halflings-regular.woff) format('woff'),url(../fonts/glyphicons-halflings-regular.ttf) format('truetype'),url(../fonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular) format('svg')}.glyphicon{position:relative;top:1px;display:inline-block;font-family:'Glyphicons Halflings';font-weight:400;line-height:1;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.glyphicon-asterisk:before{content:"\002a"}.glyphicon-plus:before{content:"\002b"}.glyphicon-eur:before,.glyphicon-euro:before{content:"\20ac"}.glyphicon-minus:before{content:"\2212"}.glyphicon-cloud:before{content:"\2601"}.glyphicon-envelope:before{content:"\2709"}.glyphicon-pencil:before{content:"\270f"}.glyphicon-glass:before{content:"\e001"}.glyphicon-music:before{content:"\e002"}.glyphicon-search:before{content:"\e003"}.glyphicon-heart:before{content:"\e005"}.glyphicon-star:before{content:"\e006"}.glyphicon-star-empty:before{content:"\e007"}.glyphicon-user:before{content:"\e008"}.glyphicon-film:before{content:"\e009"}.glyphicon-th-large:before{content:"\e010"}.glyphicon-th:before{content:"\e011"}.glyphicon-th-list:before{content:"\e012"}.glyphicon-ok:before{content:"\e013"}.glyphicon-remove:before{content:"\e014"}.glyphicon-zoom-in:before{content:"\e015"}.glyphicon-zoom-out:before{content:"\e016"}.glyphicon-off:before{content:"\e017"}.glyphicon-signal:before{content:"\e018"}.glyphicon-cog:before{content:"\e019"}.glyphicon-trash:before{content:"\e020"}.glyphicon-home:before{content:"\e021"}.glyphicon-file:before{content:"\e022"}.glyphicon-time:before{content:"\e023"}.glyphicon-road:before{content:"\e024"}.glyphicon-download-alt:before{content:"\e025"}.glyphicon-download:before{content:"\e026"}.glyphicon-upload:before{content:"\e027"}.glyphicon-inbox:before{content:"\e028"}.glyphicon-play-circle:before{content:"\e029"}.glyphicon-repeat:before{content:"\e030"}.glyphicon-refresh:before{content:"\e031"}.glyphicon-list-alt:before{content:"\e032"}.glyphicon-lock:before{content:"\e033"}.glyphicon-flag:before{content:"\e034"}.glyphicon-headphones:before{content:"\e035"}.glyphicon-volume-off:before{content:"\e036"}.glyphicon-volume-down:before{content:"\e037"}.glyphicon-volume-up:before{content:"\e038"}.glyphicon-qrcode:before{content:"\e039"}.glyphicon-barcode:before{content:"\e040"}.glyphicon-tag:before{content:"\e041"}.glyphicon-tags:before{content:"\e042"}.glyphicon-book:before{content:"\e043"}.glyphicon-bookmark:before{content:"\e044"}.glyphicon-print:before{content:"\e045"}.glyphicon-camera:before{content:"\e046"}.glyphicon-font:before{content:"\e047"}.glyphicon-bold:before{content:"\e048"}.glyphicon-italic:before{content:"\e049"}.glyphicon-text-height:before{content:"\e050"}.glyphicon-text-width:before{content:"\e051"}.glyphicon-align-left:before{content:"\e052"}.glyphicon-align-center:before{content:"\e053"}.glyphicon-align-right:before{content:"\e054"}.glyphicon-align-justify:before{content:"\e055"}.glyphicon-list:before{content:"\e056"}.glyphicon-indent-left:before{content:"\e057"}.glyphicon-indent-right:before{content:"\e058"}.glyphicon-facetime-video:before{content:"\e059"}.glyphicon-picture:before{content:"\e060"}.glyphicon-map-marker:before{content:"\e062"}.glyphicon-adjust:before{content:"\e063"}.glyphicon-tint:before{content:"\e064"}.glyphicon-edit:before{content:"\e065"}.glyphicon-share:before{content:"\e066"}.glyphicon-check:before{content:"\e067"}.glyphicon-move:before{content:"\e068"}.glyphicon-step-backward:before{content:"\e069"}.glyphicon-fast-backward:before{content:"\e070"}.glyphicon-backward:before{content:"\e071"}.glyphicon-play:before{content:"\e072"}.glyphicon-pause:before{content:"\e073"}.glyphicon-stop:before{content:"\e074"}.glyphicon-forward:before{content:"\e075"}.glyphicon-fast-forward:before{content:"\e076"}.glyphicon-step-forward:before{content:"\e077"}.glyphicon-eject:before{content:"\e078"}.glyphicon-chevron-left:before{content:"\e079"}.glyphicon-chevron-right:before{content:"\e080"}.glyphicon-plus-sign:before{content:"\e081"}.glyphicon-minus-sign:before{content:"\e082"}.glyphicon-remove-sign:before{content:"\e083"}.glyphicon-ok-sign:before{content:"\e084"}.glyphicon-question-sign:before{content:"\e085"}.glyphicon-info-sign:before{content:"\e086"}.glyphicon-screenshot:before{content:"\e087"}.glyphicon-remove-circle:before{content:"\e088"}.glyphicon-ok-circle:before{content:"\e089"}.glyphicon-ban-circle:before{content:"\e090"}.glyphicon-arrow-left:before{content:"\e091"}.glyphicon-arrow-right:before{content:"\e092"}.glyphicon-arrow-up:before{content:"\e093"}.glyphicon-arrow-down:before{content:"\e094"}.glyphicon-share-alt:before{content:"\e095"}.glyphicon-resize-full:before{content:"\e096"}.glyphicon-resize-small:before{content:"\e097"}.glyphicon-exclamation-sign:before{content:"\e101"}.glyphicon-gift:before{content:"\e102"}.glyphicon-leaf:before{content:"\e103"}.glyphicon-fire:before{content:"\e104"}.glyphicon-eye-open:before{content:"\e105"}.glyphicon-eye-close:before{content:"\e106"}.glyphicon-warning-sign:before{content:"\e107"}.glyphicon-plane:before{content:"\e108"}.glyphicon-calendar:before{content:"\e109"}.glyphicon-random:before{content:"\e110"}.glyphicon-comment:before{content:"\e111"}.glyphicon-magnet:before{content:"\e112"}.glyphicon-chevron-up:before{content:"\e113"}.glyphicon-chevron-down:before{content:"\e114"}.glyphicon-retweet:before{content:"\e115"}.glyphicon-shopping-cart:before{content:"\e116"}.glyphicon-folder-close:before{content:"\e117"}.glyphicon-folder-open:before{content:"\e118"}.glyphicon-resize-vertical:before{content:"\e119"}.glyphicon-resize-horizontal:before{content:"\e120"}.glyphicon-hdd:before{content:"\e121"}.glyphicon-bullhorn:before{content:"\e122"}.glyphicon-bell:before{content:"\e123"}.glyphicon-certificate:before{content:"\e124"}.glyphicon-thumbs-up:before{content:"\e125"}.glyphicon-thumbs-down:before{content:"\e126"}.glyphicon-hand-right:before{content:"\e127"}.glyphicon-hand-left:before{content:"\e128"}.glyphicon-hand-up:before{content:"\e129"}.glyphicon-hand-down:before{content:"\e130"}.glyphicon-circle-arrow-right:before{content:"\e131"}.glyphicon-circle-arrow-left:before{content:"\e132"}.glyphicon-circle-arrow-up:before{content:"\e133"}.glyphicon-circle-arrow-down:before{content:"\e134"}.glyphicon-globe:before{content:"\e135"}.glyphicon-wrench:before{content:"\e136"}.glyphicon-tasks:before{content:"\e137"}.glyphicon-filter:before{content:"\e138"}.glyphicon-briefcase:before{content:"\e139"}.glyphicon-fullscreen:before{content:"\e140"}.glyphicon-dashboard:before{content:"\e141"}.glyphicon-paperclip:before{content:"\e142"}.glyphicon-heart-empty:before{content:"\e143"}.glyphicon-link:before{content:"\e144"}.glyphicon-phone:before{content:"\e145"}.glyphicon-pushpin:before{content:"\e146"}.glyphicon-usd:before{content:"\e148"}.glyphicon-gbp:before{content:"\e149"}.glyphicon-sort:before{content:"\e150"}.glyphicon-sort-by-alphabet:before{content:"\e151"}.glyphicon-sort-by-alphabet-alt:before{content:"\e152"}.glyphicon-sort-by-order:before{content:"\e153"}.glyphicon-sort-by-order-alt:before{content:"\e154"}.glyphicon-sort-by-attributes:before{content:"\e155"}.glyphicon-sort-by-attributes-alt:before{content:"\e156"}.glyphicon-unchecked:before{content:"\e157"}.glyphicon-expand:before{content:"\e158"}.glyphicon-collapse-down:before{content:"\e159"}.glyphicon-collapse-up:before{content:"\e160"}.glyphicon-log-in:before{content:"\e161"}.glyphicon-flash:before{content:"\e162"}.glyphicon-log-out:before{content:"\e163"}.glyphicon-new-window:before{content:"\e164"}.glyphicon-record:before{content:"\e165"}.glyphicon-save:before{content:"\e166"}.glyphicon-open:before{content:"\e167"}.glyphicon-saved:before{content:"\e168"}.glyphicon-import:before{content:"\e169"}.glyphicon-export:before{content:"\e170"}.glyphicon-send:before{content:"\e171"}.glyphicon-floppy-disk:before{content:"\e172"}.glyphicon-floppy-saved:before{content:"\e173"}.glyphicon-floppy-remove:before{content:"\e174"}.glyphicon-floppy-save:before{content:"\e175"}.glyphicon-floppy-open:before{content:"\e176"}.glyphicon-credit-card:before{content:"\e177"}.glyphicon-transfer:before{content:"\e178"}.glyphicon-cutlery:before{content:"\e179"}.glyphicon-header:before{content:"\e180"}.glyphicon-compressed:before{content:"\e181"}.glyphicon-earphone:before{content:"\e182"}.glyphicon-phone-alt:before{content:"\e183"}.glyphicon-tower:before{content:"\e184"}.glyphicon-stats:before{content:"\e185"}.glyphicon-sd-video:before{content:"\e186"}.glyphicon-hd-video:before{content:"\e187"}.glyphicon-subtitles:before{content:"\e188"}.glyphicon-sound-stereo:before{content:"\e189"}.glyphicon-sound-dolby:before{content:"\e190"}.glyphicon-sound-5-1:before{content:"\e191"}.glyphicon-sound-6-1:before{content:"\e192"}.glyphicon-sound-7-1:before{content:"\e193"}.glyphicon-copyright-mark:before{content:"\e194"}.glyphicon-registration-mark:before{content:"\e195"}.glyphicon-cloud-download:before{content:"\e197"}.glyphicon-cloud-upload:before{content:"\e198"}.glyphicon-tree-conifer:before{content:"\e199"}.glyphicon-tree-deciduous:before{content:"\e200"}.glyphicon-cd:before{content:"\e201"}.glyphicon-save-file:before{content:"\e202"}.glyphicon-open-file:before{content:"\e203"}.glyphicon-level-up:before{content:"\e204"}.glyphicon-copy:before{content:"\e205"}.glyphicon-paste:before{content:"\e206"}.glyphicon-alert:before{content:"\e209"}.glyphicon-equalizer:before{content:"\e210"}.glyphicon-king:before{content:"\e211"}.glyphicon-queen:before{content:"\e212"}.glyphicon-pawn:before{content:"\e213"}.glyphicon-bishop:before{content:"\e214"}.glyphicon-knight:before{content:"\e215"}.glyphicon-baby-formula:before{content:"\e216"}.glyphicon-tent:before{content:"\26fa"}.glyphicon-blackboard:before{content:"\e218"}.glyphicon-bed:before{content:"\e219"}.glyphicon-apple:before{content:"\f8ff"}.glyphicon-erase:before{content:"\e221"}.glyphicon-hourglass:before{content:"\231b"}.glyphicon-lamp:before{content:"\e223"}.glyphicon-duplicate:before{content:"\e224"}.glyphicon-piggy-bank:before{content:"\e225"}.glyphicon-scissors:before{content:"\e226"}.glyphicon-bitcoin:before,.glyphicon-btc:before,.glyphicon-xbt:before{content:"\e227"}.glyphicon-jpy:before,.glyphicon-yen:before{content:"\00a5"}.glyphicon-rub:before,.glyphicon-ruble:before{content:"\20bd"}.glyphicon-scale:before{content:"\e230"}.glyphicon-ice-lolly:before{content:"\e231"}.glyphicon-ice-lolly-tasted:before{content:"\e232"}.glyphicon-education:before{content:"\e233"}.glyphicon-option-horizontal:before{content:"\e234"}.glyphicon-option-vertical:before{content:"\e235"}.glyphicon-menu-hamburger:before{content:"\e236"}.glyphicon-modal-window:before{content:"\e237"}.glyphicon-oil:before{content:"\e238"}.glyphicon-grain:before{content:"\e239"}.glyphicon-sunglasses:before{content:"\e240"}.glyphicon-text-size:before{content:"\e241"}.glyphicon-text-color:before{content:"\e242"}.glyphicon-text-background:before{content:"\e243"}.glyphicon-object-align-top:before{content:"\e244"}.glyphicon-object-align-bottom:before{content:"\e245"}.glyphicon-object-align-horizontal:before{content:"\e246"}.glyphicon-object-align-left:before{content:"\e247"}.glyphicon-object-align-vertical:before{content:"\e248"}.glyphicon-object-align-right:before{content:"\e249"}.glyphicon-triangle-right:before{content:"\e250"}.glyphicon-triangle-left:before{content:"\e251"}.glyphicon-triangle-bottom:before{content:"\e252"}.glyphicon-triangle-top:before{content:"\e253"}.glyphicon-console:before{content:"\e254"}.glyphicon-superscript:before{content:"\e255"}.glyphicon-subscript:before{content:"\e256"}.glyphicon-menu-left:before{content:"\e257"}.glyphicon-menu-right:before{content:"\e258"}.glyphicon-menu-down:before{content:"\e259"}.glyphicon-menu-up:before{content:"\e260"}*,:after,:before{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:10px;-webkit-tap-highlight-color:transparent}body{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;line-height:1.42857143;color:#333}button,input,select,textarea{font-family:inherit;font-size:inherit;line-height:inherit}a{color:#337ab7;text-decoration:none}a:focus,a:hover{color:#23527c;text-decoration:underline}a:focus{outline:-webkit-focus-ring-color auto 5px;outline-offset:-2px}.carousel-inner>.item>a>img,.carousel-inner>.item>img,.img-responsive,.thumbnail a>img,.thumbnail>img{display:block;max-width:100%;height:auto}.img-rounded{border-radius:6px}.img-thumbnail{display:inline-block;max-width:100%;height:auto;padding:4px;line-height:1.42857143;border:1px solid #ddd;border-radius:4px;-webkit-transition:all .2s ease-in-out;-o-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.img-circle{border-radius:50%}hr{margin-top:20px;margin-bottom:20px;border-top:1px solid #eee}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}[role=button]{cursor:pointer}.h1,.h2,.h3,.h4,.h5,.h6,h1,h2,h3,h4,h5,h6{font-family:inherit;font-weight:500;line-height:1.1;color:inherit}.h1 .small,.h1 small,.h2 .small,.h2 small,.h3 .small,.h3 small,.h4 .small,.h4 small,.h5 .small,.h5 small,.h6 .small,.h6 small,h1 .small,h1 small,h2 .small,h2 small,h3 .small,h3 small,h4 .small,h4 small,h5 .small,h5 small,h6 .small,h6 small{font-weight:400;line-height:1;color:#777}.h1,.h2,.h3,h1,h2,h3{margin-top:20px;margin-bottom:10px}.h1 .small,.h1 small,.h2 .small,.h2 small,.h3 .small,.h3 small,h1 .small,h1 small,h2 .small,h2 small,h3 .small,h3 small{font-size:65%}.h4,.h5,.h6,h4,h5,h6{margin-top:10px;margin-bottom:10px}.h4 .small,.h4 small,.h5 .small,.h5 small,.h6 .small,.h6 small,h4 .small,h4 small,h5 .small,h5 small,h6 .small,h6 small{font-size:75%}.h1,h1{font-size:36px}.h2,h2{font-size:30px}.h3,h3{font-size:24px}.h4,h4{font-size:18px}.h5,h5{font-size:14px}.h6,h6{font-size:12px}p{margin:0 0 10px}.lead{margin-bottom:20px;font-size:16px;font-weight:300;line-height:1.4}dt,kbd kbd,label{font-weight:700}address,blockquote .small,blockquote footer,blockquote small,dd,dt,pre{line-height:1.42857143}@media (min-width:768px){.lead{font-size:21px}}.small,small{font-size:85%}.mark,mark{padding:.2em;background-color:#fcf8e3}.list-inline,.list-unstyled{padding-left:0;list-style:none}.text-left{text-align:left}.text-right{text-align:right}.text-center{text-align:center}.text-justify{text-align:justify}.text-nowrap{white-space:nowrap}.text-lowercase{text-transform:lowercase}.text-uppercase{text-transform:uppercase}.text-capitalize{text-transform:capitalize}.text-muted{color:#777}.text-primary{color:#337ab7}a.text-primary:focus,a.text-primary:hover{color:#286090}.text-success{color:#3c763d}a.text-success:focus,a.text-success:hover{color:#2b542c}.text-info{color:#31708f}a.text-info:focus,a.text-info:hover{color:#245269}.text-warning{color:#8a6d3b}a.text-warning:focus,a.text-warning:hover{color:#66512c}.text-danger{color:#a94442}a.text-danger:focus,a.text-danger:hover{color:#843534}.bg-primary{color:#fff;background-color:#337ab7}a.bg-primary:focus,a.bg-primary:hover{background-color:#286090}.bg-success{background-color:#dff0d8}a.bg-success:focus,a.bg-success:hover{background-color:#c1e2b3}.bg-info{background-color:#d9edf7}a.bg-info:focus,a.bg-info:hover{background-color:#afd9ee}.bg-warning{background-color:#fcf8e3}a.bg-warning:focus,a.bg-warning:hover{background-color:#f7ecb5}.bg-danger{background-color:#f2dede}a.bg-danger:focus,a.bg-danger:hover{background-color:#e4b9b9}pre code,table{background-color:transparent}.page-header{padding-bottom:9px;margin:40px 0 20px;border-bottom:1px solid #eee}dl,ol,ul{margin-top:0}blockquote ol:last-child,blockquote p:last-child,blockquote ul:last-child,ol ol,ol ul,ul ol,ul ul{margin-bottom:0}address,dl{margin-bottom:20px}ol,ul{margin-bottom:10px}.list-inline{margin-left:-5px}.list-inline>li{display:inline-block;padding-right:5px;padding-left:5px}dd{margin-left:0}@media (min-width:768px){.dl-horizontal dt{float:left;width:160px;overflow:hidden;clear:left;text-align:right;text-overflow:ellipsis;white-space:nowrap}.dl-horizontal dd{margin-left:180px}.container{width:750px}}abbr[data-original-title],abbr[title]{cursor:help;border-bottom:1px dotted #777}.initialism{font-size:90%;text-transform:uppercase}blockquote{padding:10px 20px;margin:0 0 20px;font-size:17.5px;border-left:5px solid #eee}blockquote .small,blockquote footer,blockquote small{display:block;font-size:80%;color:#777}legend,pre{display:block;color:#333}blockquote .small:before,blockquote footer:before,blockquote small:before{content:'\2014 \00A0'}.blockquote-reverse,blockquote.pull-right{padding-right:15px;padding-left:0;text-align:right;border-right:5px solid #eee;border-left:0}code,kbd{padding:2px 4px;font-size:90%}caption,th{text-align:left}.blockquote-reverse .small:before,.blockquote-reverse footer:before,.blockquote-reverse small:before,blockquote.pull-right .small:before,blockquote.pull-right footer:before,blockquote.pull-right small:before{content:''}.blockquote-reverse .small:after,.blockquote-reverse footer:after,.blockquote-reverse small:after,blockquote.pull-right .small:after,blockquote.pull-right footer:after,blockquote.pull-right small:after{content:'\00A0 \2014'}code,kbd,pre,samp{font-family:Menlo,Monaco,Consolas,"Courier New",monospace}code{color:#c7254e;background-color:#f9f2f4;border-radius:4px}kbd{color:#fff;background-color:#333;border-radius:3px;-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,.25);box-shadow:inset 0 -1px 0 rgba(0,0,0,.25)}kbd kbd{padding:0;font-size:100%;-webkit-box-shadow:none;box-shadow:none}pre{padding:9.5px;margin:0 0 10px;font-size:13px;word-break:break-all;word-wrap:break-word;background-color:#f5f5f5;border:1px solid #ccc;border-radius:4px}.container,.container-fluid{margin-right:auto;margin-left:auto}pre code{padding:0;font-size:inherit;color:inherit;white-space:pre-wrap;border-radius:0}.container,.container-fluid{padding-right:15px;padding-left:15px}.pre-scrollable{overflow-y:scroll}@media (min-width:992px){.container{width:970px}}@media (min-width:1200px){.container{width:1170px}}.row{margin-right:-15px;margin-left:-15px}.col-lg-1,.col-lg-10,.col-lg-11,.col-lg-12,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9,.col-md-1,.col-md-10,.col-md-11,.col-md-12,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9,.col-sm-1,.col-sm-10,.col-sm-11,.col-sm-12,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-xs-1,.col-xs-10,.col-xs-11,.col-xs-12,.col-xs-2,.col-xs-3,.col-xs-4,.col-xs-5,.col-xs-6,.col-xs-7,.col-xs-8,.col-xs-9{position:relative;min-height:1px;padding-right:15px;padding-left:15px}.col-xs-12{width:100%}.col-xs-11{width:91.66666667%}.col-xs-10{width:83.33333333%}.col-xs-9{width:75%}.col-xs-8{width:66.66666667%}.col-xs-7{width:58.33333333%}.col-xs-6{width:50%}.col-xs-5{width:41.66666667%}.col-xs-4{width:33.33333333%}.col-xs-3{width:25%}.col-xs-2{width:16.66666667%}.col-xs-1{width:8.33333333%}.col-xs-pull-12{right:100%}.col-xs-pull-11{right:91.66666667%}.col-xs-pull-10{right:83.33333333%}.col-xs-pull-9{right:75%}.col-xs-pull-8{right:66.66666667%}.col-xs-pull-7{right:58.33333333%}.col-xs-pull-6{right:50%}.col-xs-pull-5{right:41.66666667%}.col-xs-pull-4{right:33.33333333%}.col-xs-pull-3{right:25%}.col-xs-pull-2{right:16.66666667%}.col-xs-pull-1{right:8.33333333%}.col-xs-pull-0{right:auto}.col-xs-push-12{left:100%}.col-xs-push-11{left:91.66666667%}.col-xs-push-10{left:83.33333333%}.col-xs-push-9{left:75%}.col-xs-push-8{left:66.66666667%}.col-xs-push-7{left:58.33333333%}.col-xs-push-6{left:50%}.col-xs-push-5{left:41.66666667%}.col-xs-push-4{left:33.33333333%}.col-xs-push-3{left:25%}.col-xs-push-2{left:16.66666667%}.col-xs-push-1{left:8.33333333%}.col-xs-push-0{left:auto}.col-xs-offset-12{margin-left:100%}.col-xs-offset-11{margin-left:91.66666667%}.col-xs-offset-10{margin-left:83.33333333%}.col-xs-offset-9{margin-left:75%}.col-xs-offset-8{margin-left:66.66666667%}.col-xs-offset-7{margin-left:58.33333333%}.col-xs-offset-6{margin-left:50%}.col-xs-offset-5{margin-left:41.66666667%}.col-xs-offset-4{margin-left:33.33333333%}.col-xs-offset-3{margin-left:25%}.col-xs-offset-2{margin-left:16.66666667%}.col-xs-offset-1{margin-left:8.33333333%}.col-xs-offset-0{margin-left:0}@media (min-width:768px){.col-sm-1,.col-sm-10,.col-sm-11,.col-sm-12,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9{float:left}.col-sm-12{width:100%}.col-sm-11{width:91.66666667%}.col-sm-10{width:83.33333333%}.col-sm-9{width:75%}.col-sm-8{width:66.66666667%}.col-sm-7{width:58.33333333%}.col-sm-6{width:50%}.col-sm-5{width:41.66666667%}.col-sm-4{width:33.33333333%}.col-sm-3{width:25%}.col-sm-2{width:16.66666667%}.col-sm-1{width:8.33333333%}.col-sm-pull-12{right:100%}.col-sm-pull-11{right:91.66666667%}.col-sm-pull-10{right:83.33333333%}.col-sm-pull-9{right:75%}.col-sm-pull-8{right:66.66666667%}.col-sm-pull-7{right:58.33333333%}.col-sm-pull-6{right:50%}.col-sm-pull-5{right:41.66666667%}.col-sm-pull-4{right:33.33333333%}.col-sm-pull-3{right:25%}.col-sm-pull-2{right:16.66666667%}.col-sm-pull-1{right:8.33333333%}.col-sm-pull-0{right:auto}.col-sm-push-12{left:100%}.col-sm-push-11{left:91.66666667%}.col-sm-push-10{left:83.33333333%}.col-sm-push-9{left:75%}.col-sm-push-8{left:66.66666667%}.col-sm-push-7{left:58.33333333%}.col-sm-push-6{left:50%}.col-sm-push-5{left:41.66666667%}.col-sm-push-4{left:33.33333333%}.col-sm-push-3{left:25%}.col-sm-push-2{left:16.66666667%}.col-sm-push-1{left:8.33333333%}.col-sm-push-0{left:auto}.col-sm-offset-12{margin-left:100%}.col-sm-offset-11{margin-left:91.66666667%}.col-sm-offset-10{margin-left:83.33333333%}.col-sm-offset-9{margin-left:75%}.col-sm-offset-8{margin-left:66.66666667%}.col-sm-offset-7{margin-left:58.33333333%}.col-sm-offset-6{margin-left:50%}.col-sm-offset-5{margin-left:41.66666667%}.col-sm-offset-4{margin-left:33.33333333%}.col-sm-offset-3{margin-left:25%}.col-sm-offset-2{margin-left:16.66666667%}.col-sm-offset-1{margin-left:8.33333333%}.col-sm-offset-0{margin-left:0}}@media (min-width:992px){.col-md-1,.col-md-10,.col-md-11,.col-md-12,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9{float:left}.col-md-12{width:100%}.col-md-11{width:91.66666667%}.col-md-10{width:83.33333333%}.col-md-9{width:75%}.col-md-8{width:66.66666667%}.col-md-7{width:58.33333333%}.col-md-6{width:50%}.col-md-5{width:41.66666667%}.col-md-4{width:33.33333333%}.col-md-3{width:25%}.col-md-2{width:16.66666667%}.col-md-1{width:8.33333333%}.col-md-pull-12{right:100%}.col-md-pull-11{right:91.66666667%}.col-md-pull-10{right:83.33333333%}.col-md-pull-9{right:75%}.col-md-pull-8{right:66.66666667%}.col-md-pull-7{right:58.33333333%}.col-md-pull-6{right:50%}.col-md-pull-5{right:41.66666667%}.col-md-pull-4{right:33.33333333%}.col-md-pull-3{right:25%}.col-md-pull-2{right:16.66666667%}.col-md-pull-1{right:8.33333333%}.col-md-pull-0{right:auto}.col-md-push-12{left:100%}.col-md-push-11{left:91.66666667%}.col-md-push-10{left:83.33333333%}.col-md-push-9{left:75%}.col-md-push-8{left:66.66666667%}.col-md-push-7{left:58.33333333%}.col-md-push-6{left:50%}.col-md-push-5{left:41.66666667%}.col-md-push-4{left:33.33333333%}.col-md-push-3{left:25%}.col-md-push-2{left:16.66666667%}.col-md-push-1{left:8.33333333%}.col-md-push-0{left:auto}.col-md-offset-12{margin-left:100%}.col-md-offset-11{margin-left:91.66666667%}.col-md-offset-10{margin-left:83.33333333%}.col-md-offset-9{margin-left:75%}.col-md-offset-8{margin-left:66.66666667%}.col-md-offset-7{margin-left:58.33333333%}.col-md-offset-6{margin-left:50%}.col-md-offset-5{margin-left:41.66666667%}.col-md-offset-4{margin-left:33.33333333%}.col-md-offset-3{margin-left:25%}.col-md-offset-2{margin-left:16.66666667%}.col-md-offset-1{margin-left:8.33333333%}.col-md-offset-0{margin-left:0}}@media (min-width:1200px){.col-lg-1,.col-lg-10,.col-lg-11,.col-lg-12,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9{float:left}.col-lg-12{width:100%}.col-lg-11{width:91.66666667%}.col-lg-10{width:83.33333333%}.col-lg-9{width:75%}.col-lg-8{width:66.66666667%}.col-lg-7{width:58.33333333%}.col-lg-6{width:50%}.col-lg-5{width:41.66666667%}.col-lg-4{width:33.33333333%}.col-lg-3{width:25%}.col-lg-2{width:16.66666667%}.col-lg-1{width:8.33333333%}.col-lg-pull-12{right:100%}.col-lg-pull-11{right:91.66666667%}.col-lg-pull-10{right:83.33333333%}.col-lg-pull-9{right:75%}.col-lg-pull-8{right:66.66666667%}.col-lg-pull-7{right:58.33333333%}.col-lg-pull-6{right:50%}.col-lg-pull-5{right:41.66666667%}.col-lg-pull-4{right:33.33333333%}.col-lg-pull-3{right:25%}.col-lg-pull-2{right:16.66666667%}.col-lg-pull-1{right:8.33333333%}.col-lg-pull-0{right:auto}.col-lg-push-12{left:100%}.col-lg-push-11{left:91.66666667%}.col-lg-push-10{left:83.33333333%}.col-lg-push-9{left:75%}.col-lg-push-8{left:66.66666667%}.col-lg-push-7{left:58.33333333%}.col-lg-push-6{left:50%}.col-lg-push-5{left:41.66666667%}.col-lg-push-4{left:33.33333333%}.col-lg-push-3{left:25%}.col-lg-push-2{left:16.66666667%}.col-lg-push-1{left:8.33333333%}.col-lg-push-0{left:auto}.col-lg-offset-12{margin-left:100%}.col-lg-offset-11{margin-left:91.66666667%}.col-lg-offset-10{margin-left:83.33333333%}.col-lg-offset-9{margin-left:75%}.col-lg-offset-8{margin-left:66.66666667%}.col-lg-offset-7{margin-left:58.33333333%}.col-lg-offset-6{margin-left:50%}.col-lg-offset-5{margin-left:41.66666667%}.col-lg-offset-4{margin-left:33.33333333%}.col-lg-offset-3{margin-left:25%}.col-lg-offset-2{margin-left:16.66666667%}.col-lg-offset-1{margin-left:8.33333333%}.col-lg-offset-0{margin-left:0}}caption{padding-top:8px;padding-bottom:8px;color:#777}.table{width:100%;max-width:100%;margin-bottom:20px}.table>tbody>tr>td,.table>tbody>tr>th,.table>tfoot>tr>td,.table>tfoot>tr>th,.table>thead>tr>td,.table>thead>tr>th{padding:8px;line-height:1.42857143;vertical-align:top;border-top:1px solid #ddd}.table>thead>tr>th{vertical-align:bottom;border-bottom:2px solid #ddd}.table>caption+thead>tr:first-child>td,.table>caption+thead>tr:first-child>th,.table>colgroup+thead>tr:first-child>td,.table>colgroup+thead>tr:first-child>th,.table>thead:first-child>tr:first-child>td,.table>thead:first-child>tr:first-child>th{border-top:0}.table>tbody+tbody{border-top:2px solid #ddd}.table .table{background-color:#fff}.table-condensed>tbody>tr>td,.table-condensed>tbody>tr>th,.table-condensed>tfoot>tr>td,.table-condensed>tfoot>tr>th,.table-condensed>thead>tr>td,.table-condensed>thead>tr>th{padding:5px}.table-bordered,.table-bordered>tbody>tr>td,.table-bordered>tbody>tr>th,.table-bordered>tfoot>tr>td,.table-bordered>tfoot>tr>th,.table-bordered>thead>tr>td,.table-bordered>thead>tr>th{border:1px solid #ddd}.table-bordered>thead>tr>td,.table-bordered>thead>tr>th{border-bottom-width:2px}.table-striped>tbody>tr:nth-of-type(odd){background-color:#f9f9f9}.table-hover>tbody>tr:hover,.table>tbody>tr.active>td,.table>tbody>tr.active>th,.table>tbody>tr>td.active,.table>tbody>tr>th.active,.table>tfoot>tr.active>td,.table>tfoot>tr.active>th,.table>tfoot>tr>td.active,.table>tfoot>tr>th.active,.table>thead>tr.active>td,.table>thead>tr.active>th,.table>thead>tr>td.active,.table>thead>tr>th.active{background-color:#f5f5f5}table col[class*=col-]{position:static;display:table-column;float:none}table td[class*=col-],table th[class*=col-]{position:static;display:table-cell;float:none}.table-hover>tbody>tr.active:hover>td,.table-hover>tbody>tr.active:hover>th,.table-hover>tbody>tr:hover>.active,.table-hover>tbody>tr>td.active:hover,.table-hover>tbody>tr>th.active:hover{background-color:#e8e8e8}.table>tbody>tr.success>td,.table>tbody>tr.success>th,.table>tbody>tr>td.success,.table>tbody>tr>th.success,.table>tfoot>tr.success>td,.table>tfoot>tr.success>th,.table>tfoot>tr>td.success,.table>tfoot>tr>th.success,.table>thead>tr.success>td,.table>thead>tr.success>th,.table>thead>tr>td.success,.table>thead>tr>th.success{background-color:#dff0d8}.table-hover>tbody>tr.success:hover>td,.table-hover>tbody>tr.success:hover>th,.table-hover>tbody>tr:hover>.success,.table-hover>tbody>tr>td.success:hover,.table-hover>tbody>tr>th.success:hover{background-color:#d0e9c6}.table>tbody>tr.info>td,.table>tbody>tr.info>th,.table>tbody>tr>td.info,.table>tbody>tr>th.info,.table>tfoot>tr.info>td,.table>tfoot>tr.info>th,.table>tfoot>tr>td.info,.table>tfoot>tr>th.info,.table>thead>tr.info>td,.table>thead>tr.info>th,.table>thead>tr>td.info,.table>thead>tr>th.info{background-color:#d9edf7}.table-hover>tbody>tr.info:hover>td,.table-hover>tbody>tr.info:hover>th,.table-hover>tbody>tr:hover>.info,.table-hover>tbody>tr>td.info:hover,.table-hover>tbody>tr>th.info:hover{background-color:#c4e3f3}.table>tbody>tr.warning>td,.table>tbody>tr.warning>th,.table>tbody>tr>td.warning,.table>tbody>tr>th.warning,.table>tfoot>tr.warning>td,.table>tfoot>tr.warning>th,.table>tfoot>tr>td.warning,.table>tfoot>tr>th.warning,.table>thead>tr.warning>td,.table>thead>tr.warning>th,.table>thead>tr>td.warning,.table>thead>tr>th.warning{background-color:#fcf8e3}.table-hover>tbody>tr.warning:hover>td,.table-hover>tbody>tr.warning:hover>th,.table-hover>tbody>tr:hover>.warning,.table-hover>tbody>tr>td.warning:hover,.table-hover>tbody>tr>th.warning:hover{background-color:#faf2cc}.table>tbody>tr.danger>td,.table>tbody>tr.danger>th,.table>tbody>tr>td.danger,.table>tbody>tr>th.danger,.table>tfoot>tr.danger>td,.table>tfoot>tr.danger>th,.table>tfoot>tr>td.danger,.table>tfoot>tr>th.danger,.table>thead>tr.danger>td,.table>thead>tr.danger>th,.table>thead>tr>td.danger,.table>thead>tr>th.danger{background-color:#f2dede}.table-hover>tbody>tr.danger:hover>td,.table-hover>tbody>tr.danger:hover>th,.table-hover>tbody>tr:hover>.danger,.table-hover>tbody>tr>td.danger:hover,.table-hover>tbody>tr>th.danger:hover{background-color:#ebcccc}.table-responsive{min-height:.01%;overflow-x:auto}@media screen and (max-width:767px){.table-responsive{width:100%;margin-bottom:15px;overflow-y:hidden;-ms-overflow-style:-ms-autohiding-scrollbar;border:1px solid #ddd}.table-responsive>.table{margin-bottom:0}.table-responsive>.table>tbody>tr>td,.table-responsive>.table>tbody>tr>th,.table-responsive>.table>tfoot>tr>td,.table-responsive>.table>tfoot>tr>th,.table-responsive>.table>thead>tr>td,.table-responsive>.table>thead>tr>th{white-space:nowrap}.table-responsive>.table-bordered{border:0}.table-responsive>.table-bordered>tbody>tr>td:first-child,.table-responsive>.table-bordered>tbody>tr>th:first-child,.table-responsive>.table-bordered>tfoot>tr>td:first-child,.table-responsive>.table-bordered>tfoot>tr>th:first-child,.table-responsive>.table-bordered>thead>tr>td:first-child,.table-responsive>.table-bordered>thead>tr>th:first-child{border-left:0}.table-responsive>.table-bordered>tbody>tr>td:last-child,.table-responsive>.table-bordered>tbody>tr>th:last-child,.table-responsive>.table-bordered>tfoot>tr>td:last-child,.table-responsive>.table-bordered>tfoot>tr>th:last-child,.table-responsive>.table-bordered>thead>tr>td:last-child,.table-responsive>.table-bordered>thead>tr>th:last-child{border-right:0}.table-responsive>.table-bordered>tbody>tr:last-child>td,.table-responsive>.table-bordered>tbody>tr:last-child>th,.table-responsive>.table-bordered>tfoot>tr:last-child>td,.table-responsive>.table-bordered>tfoot>tr:last-child>th{border-bottom:0}}fieldset,legend{padding:0;border:0}fieldset{min-width:0;margin:0}legend{width:100%;margin-bottom:20px;font-size:21px;line-height:inherit;border-bottom:1px solid #e5e5e5}label{display:inline-block;max-width:100%;margin-bottom:5px}input[type=search]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;-webkit-appearance:none}input[type=checkbox],input[type=radio]{margin:4px 0 0;margin-top:1px\9;line-height:normal}.form-control,output{font-size:14px;line-height:1.42857143;color:#555;display:block}input[type=file]{display:block}input[type=range]{display:block;width:100%}select[multiple],select[size]{height:auto}input[type=checkbox]:focus,input[type=file]:focus,input[type=radio]:focus{outline:-webkit-focus-ring-color auto 5px;outline-offset:-2px}output{padding-top:7px}.form-control{width:100%;height:34px;padding:6px 12px;background-color:#fff;border:1px solid #ccc;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075);-webkit-transition:border-color ease-in-out .15s,-webkit-box-shadow ease-in-out .15s;-o-transition:border-color ease-in-out .15s,box-shadow ease-in-out .15s;transition:border-color ease-in-out .15s,box-shadow ease-in-out .15s}.form-control:focus{border-color:#66afe9;outline:0;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 8px rgba(102,175,233,.6);box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 8px rgba(102,175,233,.6)}.form-control::-moz-placeholder{color:#999;opacity:1}.form-control:-ms-input-placeholder{color:#999}.form-control::-webkit-input-placeholder{color:#999}.has-success .checkbox,.has-success .checkbox-inline,.has-success .control-label,.has-success .form-control-feedback,.has-success .help-block,.has-success .radio,.has-success .radio-inline,.has-success.checkbox label,.has-success.checkbox-inline label,.has-success.radio label,.has-success.radio-inline label{color:#3c763d}.form-control::-ms-expand{background-color:transparent;border:0}.form-control[disabled],.form-control[readonly],fieldset[disabled] .form-control{background-color:#eee;opacity:1}.form-control[disabled],fieldset[disabled] .form-control{cursor:not-allowed}textarea.form-control{height:auto}@media screen and (-webkit-min-device-pixel-ratio:0){input[type=date].form-control,input[type=datetime-local].form-control,input[type=month].form-control,input[type=time].form-control{line-height:34px}.input-group-sm input[type=date],.input-group-sm input[type=datetime-local],.input-group-sm input[type=month],.input-group-sm input[type=time],input[type=date].input-sm,input[type=datetime-local].input-sm,input[type=month].input-sm,input[type=time].input-sm{line-height:30px}.input-group-lg input[type=date],.input-group-lg input[type=datetime-local],.input-group-lg input[type=month],.input-group-lg input[type=time],input[type=date].input-lg,input[type=datetime-local].input-lg,input[type=month].input-lg,input[type=time].input-lg{line-height:46px}}.form-group{margin-bottom:15px}.checkbox,.radio{position:relative;display:block;margin-top:10px;margin-bottom:10px}.checkbox label,.radio label{min-height:20px;padding-left:20px;margin-bottom:0;font-weight:400;cursor:pointer}.checkbox input[type=checkbox],.checkbox-inline input[type=checkbox],.radio input[type=radio],.radio-inline input[type=radio]{position:absolute;margin-top:4px\9;margin-left:-20px}.checkbox+.checkbox,.radio+.radio{margin-top:-5px}.checkbox-inline,.radio-inline{position:relative;display:inline-block;padding-left:20px;margin-bottom:0;font-weight:400;cursor:pointer}.checkbox-inline+.checkbox-inline,.radio-inline+.radio-inline{margin-top:0;margin-left:10px}.checkbox-inline.disabled,.checkbox.disabled label,.radio-inline.disabled,.radio.disabled label,fieldset[disabled] .checkbox label,fieldset[disabled] .checkbox-inline,fieldset[disabled] .radio label,fieldset[disabled] .radio-inline,fieldset[disabled] input[type=checkbox],fieldset[disabled] input[type=radio],input[type=checkbox].disabled,input[type=checkbox][disabled],input[type=radio].disabled,input[type=radio][disabled]{cursor:not-allowed}.form-control-static{min-height:34px;padding-top:7px;padding-bottom:7px;margin-bottom:0}.form-control-static.input-lg,.form-control-static.input-sm{padding-right:0;padding-left:0}.form-group-sm .form-control,.input-sm{padding:5px 10px;border-radius:3px;font-size:12px}.input-sm{height:30px;line-height:1.5}select.input-sm{height:30px;line-height:30px}select[multiple].input-sm,textarea.input-sm{height:auto}.form-group-sm .form-control{height:30px;line-height:1.5}.form-group-lg .form-control,.input-lg{border-radius:6px;padding:10px 16px;font-size:18px}.form-group-sm select.form-control{height:30px;line-height:30px}.form-group-sm select[multiple].form-control,.form-group-sm textarea.form-control{height:auto}.form-group-sm .form-control-static{height:30px;min-height:32px;padding:6px 10px;font-size:12px;line-height:1.5}.input-lg{height:46px;line-height:1.3333333}select.input-lg{height:46px;line-height:46px}select[multiple].input-lg,textarea.input-lg{height:auto}.form-group-lg .form-control{height:46px;line-height:1.3333333}.form-group-lg select.form-control{height:46px;line-height:46px}.form-group-lg select[multiple].form-control,.form-group-lg textarea.form-control{height:auto}.form-group-lg .form-control-static{height:46px;min-height:38px;padding:11px 16px;font-size:18px;line-height:1.3333333}.has-feedback{position:relative}.has-feedback .form-control{padding-right:42.5px}.form-control-feedback{position:absolute;top:0;right:0;z-index:2;display:block;width:34px;height:34px;line-height:34px;text-align:center;pointer-events:none}.collapsing,.dropdown,.dropup{position:relative}.form-group-lg .form-control+.form-control-feedback,.input-group-lg+.form-control-feedback,.input-lg+.form-control-feedback{width:46px;height:46px;line-height:46px}.form-group-sm .form-control+.form-control-feedback,.input-group-sm+.form-control-feedback,.input-sm+.form-control-feedback{width:30px;height:30px;line-height:30px}.has-success .form-control{border-color:#3c763d;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-success .form-control:focus{border-color:#2b542c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #67b168;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #67b168}.has-success .input-group-addon{color:#3c763d;background-color:#dff0d8;border-color:#3c763d}.has-warning .checkbox,.has-warning .checkbox-inline,.has-warning .control-label,.has-warning .form-control-feedback,.has-warning .help-block,.has-warning .radio,.has-warning .radio-inline,.has-warning.checkbox label,.has-warning.checkbox-inline label,.has-warning.radio label,.has-warning.radio-inline label{color:#8a6d3b}.has-warning .form-control{border-color:#8a6d3b;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-warning .form-control:focus{border-color:#66512c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #c0a16b;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #c0a16b}.has-warning .input-group-addon{color:#8a6d3b;background-color:#fcf8e3;border-color:#8a6d3b}.has-error .checkbox,.has-error .checkbox-inline,.has-error .control-label,.has-error .form-control-feedback,.has-error .help-block,.has-error .radio,.has-error .radio-inline,.has-error.checkbox label,.has-error.checkbox-inline label,.has-error.radio label,.has-error.radio-inline label{color:#a94442}.has-error .form-control{border-color:#a94442;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-error .form-control:focus{border-color:#843534;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #ce8483;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #ce8483}.has-error .input-group-addon{color:#a94442;background-color:#f2dede;border-color:#a94442}.has-feedback label~.form-control-feedback{top:25px}.has-feedback label.sr-only~.form-control-feedback{top:0}.help-block{display:block;margin-top:5px;margin-bottom:10px;color:#737373}@media (min-width:768px){.form-inline .form-control-static,.form-inline .form-group{display:inline-block}.form-inline .control-label,.form-inline .form-group{margin-bottom:0;vertical-align:middle}.form-inline .form-control{display:inline-block;width:auto;vertical-align:middle}.form-inline .input-group{display:inline-table;vertical-align:middle}.form-inline .input-group .form-control,.form-inline .input-group .input-group-addon,.form-inline .input-group .input-group-btn{width:auto}.form-inline .input-group>.form-control{width:100%}.form-inline .checkbox,.form-inline .radio{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.form-inline .checkbox label,.form-inline .radio label{padding-left:0}.form-inline .checkbox input[type=checkbox],.form-inline .radio input[type=radio]{position:relative;margin-left:0}.form-inline .has-feedback .form-control-feedback{top:0}.form-horizontal .control-label{padding-top:7px;margin-bottom:0;text-align:right}}.form-horizontal .checkbox,.form-horizontal .checkbox-inline,.form-horizontal .radio,.form-horizontal .radio-inline{padding-top:7px;margin-top:0;margin-bottom:0}.form-horizontal .checkbox,.form-horizontal .radio{min-height:27px}.form-horizontal .form-group{margin-right:-15px;margin-left:-15px}.form-horizontal .has-feedback .form-control-feedback{right:15px}@media (min-width:768px){.form-horizontal .form-group-lg .control-label{padding-top:11px;font-size:18px}.form-horizontal .form-group-sm .control-label{padding-top:6px;font-size:12px}}.btn{display:inline-block;padding:6px 12px;margin-bottom:0;font-size:14px;font-weight:400;line-height:1.42857143;text-align:center;white-space:nowrap;-ms-touch-action:manipulation;touch-action:manipulation;cursor:pointer;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;border:1px solid transparent;border-radius:4px}.btn.active.focus,.btn.active:focus,.btn.focus,.btn:active.focus,.btn:active:focus,.btn:focus{outline:-webkit-focus-ring-color auto 5px;outline-offset:-2px}.btn.focus,.btn:focus,.btn:hover{color:#333;text-decoration:none}.btn.active,.btn:active{outline:0;-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125);box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn.disabled,.btn[disabled],fieldset[disabled] .btn{cursor:not-allowed;filter:alpha(opacity=65);-webkit-box-shadow:none;box-shadow:none;opacity:.65}a.btn.disabled,fieldset[disabled] a.btn{pointer-events:none}.btn-default{color:#333;background-color:#fff;border-color:#ccc}.btn-default.focus,.btn-default:focus{color:#333;background-color:#e6e6e6;border-color:#8c8c8c}.btn-default.active,.btn-default:active,.btn-default:hover,.open>.dropdown-toggle.btn-default{color:#333;background-color:#e6e6e6;border-color:#adadad}.btn-default.active.focus,.btn-default.active:focus,.btn-default.active:hover,.btn-default:active.focus,.btn-default:active:focus,.btn-default:active:hover,.open>.dropdown-toggle.btn-default.focus,.open>.dropdown-toggle.btn-default:focus,.open>.dropdown-toggle.btn-default:hover{color:#333;background-color:#d4d4d4;border-color:#8c8c8c}.btn-default.disabled.focus,.btn-default.disabled:focus,.btn-default.disabled:hover,.btn-default[disabled].focus,.btn-default[disabled]:focus,.btn-default[disabled]:hover,fieldset[disabled] .btn-default.focus,fieldset[disabled] .btn-default:focus,fieldset[disabled] .btn-default:hover{background-color:#fff;border-color:#ccc}.btn-default .badge{color:#fff;background-color:#333}.btn-primary{color:#fff;background-color:#337ab7;border-color:#2e6da4}.btn-primary.focus,.btn-primary:focus{color:#fff;background-color:#286090;border-color:#122b40}.btn-primary.active,.btn-primary:active,.btn-primary:hover,.open>.dropdown-toggle.btn-primary{color:#fff;background-color:#286090;border-color:#204d74}.btn-primary.active.focus,.btn-primary.active:focus,.btn-primary.active:hover,.btn-primary:active.focus,.btn-primary:active:focus,.btn-primary:active:hover,.open>.dropdown-toggle.btn-primary.focus,.open>.dropdown-toggle.btn-primary:focus,.open>.dropdown-toggle.btn-primary:hover{color:#fff;background-color:#204d74;border-color:#122b40}.btn-primary.disabled.focus,.btn-primary.disabled:focus,.btn-primary.disabled:hover,.btn-primary[disabled].focus,.btn-primary[disabled]:focus,.btn-primary[disabled]:hover,fieldset[disabled] .btn-primary.focus,fieldset[disabled] .btn-primary:focus,fieldset[disabled] .btn-primary:hover{background-color:#337ab7;border-color:#2e6da4}.btn-primary .badge{color:#337ab7;background-color:#fff}.btn-success{color:#fff;background-color:#5cb85c;border-color:#4cae4c}.btn-success.focus,.btn-success:focus{color:#fff;background-color:#449d44;border-color:#255625}.btn-success.active,.btn-success:active,.btn-success:hover,.open>.dropdown-toggle.btn-success{color:#fff;background-color:#449d44;border-color:#398439}.btn-success.active.focus,.btn-success.active:focus,.btn-success.active:hover,.btn-success:active.focus,.btn-success:active:focus,.btn-success:active:hover,.open>.dropdown-toggle.btn-success.focus,.open>.dropdown-toggle.btn-success:focus,.open>.dropdown-toggle.btn-success:hover{color:#fff;background-color:#398439;border-color:#255625}.btn-success.active,.btn-success:active,.open>.dropdown-toggle.btn-success{background-image:none}.btn-success.disabled.focus,.btn-success.disabled:focus,.btn-success.disabled:hover,.btn-success[disabled].focus,.btn-success[disabled]:focus,.btn-success[disabled]:hover,fieldset[disabled] .btn-success.focus,fieldset[disabled] .btn-success:focus,fieldset[disabled] .btn-success:hover{background-color:#5cb85c;border-color:#4cae4c}.btn-success .badge{color:#5cb85c;background-color:#fff}.btn-info{color:#fff;background-color:#5bc0de;border-color:#46b8da}.btn-info.focus,.btn-info:focus{color:#fff;background-color:#31b0d5;border-color:#1b6d85}.btn-info.active,.btn-info:active,.btn-info:hover,.open>.dropdown-toggle.btn-info{color:#fff;background-color:#31b0d5;border-color:#269abc}.btn-info.active.focus,.btn-info.active:focus,.btn-info.active:hover,.btn-info:active.focus,.btn-info:active:focus,.btn-info:active:hover,.open>.dropdown-toggle.btn-info.focus,.open>.dropdown-toggle.btn-info:focus,.open>.dropdown-toggle.btn-info:hover{color:#fff;background-color:#269abc;border-color:#1b6d85}.btn-info.disabled.focus,.btn-info.disabled:focus,.btn-info.disabled:hover,.btn-info[disabled].focus,.btn-info[disabled]:focus,.btn-info[disabled]:hover,fieldset[disabled] .btn-info.focus,fieldset[disabled] .btn-info:focus,fieldset[disabled] .btn-info:hover{background-color:#5bc0de;border-color:#46b8da}.btn-info .badge{color:#5bc0de;background-color:#fff}.btn-warning{color:#fff;background-color:#f0ad4e;border-color:#eea236}.btn-warning.focus,.btn-warning:focus{color:#fff;background-color:#ec971f;border-color:#985f0d}.btn-warning.active,.btn-warning:active,.btn-warning:hover,.open>.dropdown-toggle.btn-warning{color:#fff;background-color:#ec971f;border-color:#d58512}.btn-warning.active.focus,.btn-warning.active:focus,.btn-warning.active:hover,.btn-warning:active.focus,.btn-warning:active:focus,.btn-warning:active:hover,.open>.dropdown-toggle.btn-warning.focus,.open>.dropdown-toggle.btn-warning:focus,.open>.dropdown-toggle.btn-warning:hover{color:#fff;background-color:#d58512;border-color:#985f0d}.btn-warning.disabled.focus,.btn-warning.disabled:focus,.btn-warning.disabled:hover,.btn-warning[disabled].focus,.btn-warning[disabled]:focus,.btn-warning[disabled]:hover,fieldset[disabled] .btn-warning.focus,fieldset[disabled] .btn-warning:focus,fieldset[disabled] .btn-warning:hover{background-color:#f0ad4e;border-color:#eea236}.btn-warning .badge{color:#f0ad4e;background-color:#fff}.btn-danger{color:#fff;background-color:#d9534f;border-color:#d43f3a}.btn-danger.focus,.btn-danger:focus{color:#fff;background-color:#c9302c;border-color:#761c19}.btn-danger.active,.btn-danger:active,.btn-danger:hover,.open>.dropdown-toggle.btn-danger{color:#fff;background-color:#c9302c;border-color:#ac2925}.btn-danger.active.focus,.btn-danger.active:focus,.btn-danger.active:hover,.btn-danger:active.focus,.btn-danger:active:focus,.btn-danger:active:hover,.open>.dropdown-toggle.btn-danger.focus,.open>.dropdown-toggle.btn-danger:focus,.open>.dropdown-toggle.btn-danger:hover{color:#fff;background-color:#ac2925;border-color:#761c19}.btn-danger.disabled.focus,.btn-danger.disabled:focus,.btn-danger.disabled:hover,.btn-danger[disabled].focus,.btn-danger[disabled]:focus,.btn-danger[disabled]:hover,fieldset[disabled] .btn-danger.focus,fieldset[disabled] .btn-danger:focus,fieldset[disabled] .btn-danger:hover{background-color:#d9534f;border-color:#d43f3a}.btn-danger .badge{color:#d9534f;background-color:#fff}.btn-link{font-weight:400;color:#337ab7;border-radius:0}.btn-link,.btn-link.active,.btn-link:active,.btn-link[disabled],fieldset[disabled] .btn-link{background-color:transparent;-webkit-box-shadow:none;box-shadow:none}.btn-link,.btn-link:active,.btn-link:focus,.btn-link:hover{border-color:transparent}.btn-link:focus,.btn-link:hover{color:#23527c;text-decoration:underline;background-color:transparent}.btn-link[disabled]:focus,.btn-link[disabled]:hover,fieldset[disabled] .btn-link:focus,fieldset[disabled] .btn-link:hover{color:#777;text-decoration:none}.btn-group-lg>.btn,.btn-lg{padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}.btn-group-sm>.btn,.btn-sm{padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.btn-group-xs>.btn,.btn-xs{padding:1px 5px;font-size:12px;line-height:1.5;border-radius:3px}.btn-block{display:block;width:100%}.btn-block+.btn-block{margin-top:5px}input[type=button].btn-block,input[type=reset].btn-block,input[type=submit].btn-block{width:100%}.fade{opacity:0;-webkit-transition:opacity .15s linear;-o-transition:opacity .15s linear;transition:opacity .15s linear}.fade.in{opacity:1}.collapse{display:none}.collapse.in{display:block}tr.collapse.in{display:table-row}tbody.collapse.in{display:table-row-group}.collapsing{height:0;overflow:hidden;-webkit-transition-timing-function:ease;-o-transition-timing-function:ease;transition-timing-function:ease;-webkit-transition-duration:.35s;-o-transition-duration:.35s;transition-duration:.35s;-webkit-transition-property:height,visibility;-o-transition-property:height,visibility;transition-property:height,visibility}.caret{display:inline-block;width:0;height:0;margin-left:2px;border-top:4px dashed;border-top:4px solid\9;border-right:4px solid transparent;border-left:4px solid transparent}.dropdown-toggle:focus{outline:0}.dropdown-menu{position:absolute;top:100%;left:0;z-index:1000;display:none;min-width:160px;padding:5px 0;margin:2px 0 0;font-size:14px;text-align:left;list-style:none;background-color:#fff;background-clip:padding-box;border:1px solid #ccc;border:1px solid rgba(0,0,0,.15);border-radius:4px;-webkit-box-shadow:0 6px 12px rgba(0,0,0,.175);box-shadow:0 6px 12px rgba(0,0,0,.175)}.dropdown-menu-right,.dropdown-menu.pull-right{right:0;left:auto}.dropdown-header,.dropdown-menu>li>a{display:block;padding:3px 20px;line-height:1.42857143;white-space:nowrap}.btn-group>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group>.btn-group:first-child:not(:last-child)>.dropdown-toggle,.btn-group>.btn:first-child:not(:last-child):not(.dropdown-toggle){border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn-group:last-child:not(:first-child)>.btn:first-child,.btn-group>.btn:last-child:not(:first-child),.btn-group>.dropdown-toggle:not(:first-child){border-top-left-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn:not(:first-child):not(:last-child),.btn-group>.btn-group:not(:first-child):not(:last-child)>.btn,.btn-group>.btn:not(:first-child):not(:last-child):not(.dropdown-toggle){border-radius:0}.dropdown-menu .divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.dropdown-menu>li>a{clear:both;font-weight:400;color:#333}.dropdown-menu>li>a:focus,.dropdown-menu>li>a:hover{color:#262626;text-decoration:none;background-color:#f5f5f5}.dropdown-menu>.active>a,.dropdown-menu>.active>a:focus,.dropdown-menu>.active>a:hover{color:#fff;text-decoration:none;background-color:#337ab7;outline:0}.dropdown-menu>.disabled>a,.dropdown-menu>.disabled>a:focus,.dropdown-menu>.disabled>a:hover{color:#777}.dropdown-menu>.disabled>a:focus,.dropdown-menu>.disabled>a:hover{text-decoration:none;cursor:not-allowed;background-color:transparent;filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.open>.dropdown-menu{display:block}.open>a{outline:0}.dropdown-menu-left{right:auto;left:0}.dropdown-header{font-size:12px;color:#777}.dropdown-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:990}.nav-justified>.dropdown .dropdown-menu,.nav-tabs.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}.pull-right>.dropdown-menu{right:0;left:auto}.dropup .caret,.navbar-fixed-bottom .dropdown .caret{content:"";border-top:0;border-bottom:4px dashed;border-bottom:4px solid\9}.dropup .dropdown-menu,.navbar-fixed-bottom .dropdown .dropdown-menu{top:auto;bottom:100%;margin-bottom:2px}@media (min-width:768px){.navbar-right .dropdown-menu{right:0;left:auto}.navbar-right .dropdown-menu-left{right:auto;left:0}}.btn-group,.btn-group-vertical{position:relative;display:inline-block}.btn-group-vertical>.btn,.btn-group>.btn{position:relative;float:left}.btn-group-vertical>.btn.active,.btn-group-vertical>.btn:active,.btn-group-vertical>.btn:focus,.btn-group-vertical>.btn:hover,.btn-group>.btn.active,.btn-group>.btn:active,.btn-group>.btn:focus,.btn-group>.btn:hover{z-index:2}.btn-group .btn+.btn,.btn-group .btn+.btn-group,.btn-group .btn-group+.btn,.btn-group .btn-group+.btn-group{margin-left:-1px}.btn-toolbar{margin-left:-5px}.btn-toolbar>.btn,.btn-toolbar>.btn-group,.btn-toolbar>.input-group{margin-left:5px}.btn .caret,.btn-group>.btn:first-child{margin-left:0}.btn-group .dropdown-toggle:active,.btn-group.open .dropdown-toggle{outline:0}.btn-group>.btn+.dropdown-toggle{padding-right:8px;padding-left:8px}.btn-group>.btn-lg+.dropdown-toggle{padding-right:12px;padding-left:12px}.btn-group.open .dropdown-toggle{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125);box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn-group.open .dropdown-toggle.btn-link{-webkit-box-shadow:none;box-shadow:none}.btn-lg .caret{border-width:5px 5px 0;border-bottom-width:0}.dropup .btn-lg .caret{border-width:0 5px 5px}.btn-group-vertical>.btn,.btn-group-vertical>.btn-group,.btn-group-vertical>.btn-group>.btn{display:block;float:none;width:100%;max-width:100%}.btn-group-vertical>.btn-group>.btn{float:none}.btn-group-vertical>.btn+.btn,.btn-group-vertical>.btn+.btn-group,.btn-group-vertical>.btn-group+.btn,.btn-group-vertical>.btn-group+.btn-group{margin-top:-1px;margin-left:0}.btn-group-vertical>.btn:first-child:not(:last-child){border-top-left-radius:4px;border-top-right-radius:4px;border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn:last-child:not(:first-child){border-top-left-radius:0;border-top-right-radius:0;border-bottom-right-radius:4px;border-bottom-left-radius:4px}.btn-group-vertical>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group-vertical>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group-vertical>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn-group:last-child:not(:first-child)>.btn:first-child{border-top-left-radius:0;border-top-right-radius:0}.btn-group-justified{display:table;width:100%;table-layout:fixed;border-collapse:separate}.btn-group-justified>.btn,.btn-group-justified>.btn-group{display:table-cell;float:none;width:1%}.btn-group-justified>.btn-group .btn{width:100%}.btn-group-justified>.btn-group .dropdown-menu{left:auto}[data-toggle=buttons]>.btn input[type=checkbox],[data-toggle=buttons]>.btn input[type=radio],[data-toggle=buttons]>.btn-group>.btn input[type=checkbox],[data-toggle=buttons]>.btn-group>.btn input[type=radio]{position:absolute;clip:rect(0,0,0,0);pointer-events:none}.input-group{position:relative;display:table;border-collapse:separate}.input-group[class*=col-]{float:none;padding-right:0;padding-left:0}.input-group .form-control{position:relative;z-index:2;float:left;width:100%;margin-bottom:0}.input-group .form-control:focus{z-index:3}.input-group-lg>.form-control,.input-group-lg>.input-group-addon,.input-group-lg>.input-group-btn>.btn{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}select.input-group-lg>.form-control,select.input-group-lg>.input-group-addon,select.input-group-lg>.input-group-btn>.btn{height:46px;line-height:46px}select[multiple].input-group-lg>.form-control,select[multiple].input-group-lg>.input-group-addon,select[multiple].input-group-lg>.input-group-btn>.btn,textarea.input-group-lg>.form-control,textarea.input-group-lg>.input-group-addon,textarea.input-group-lg>.input-group-btn>.btn{height:auto}.input-group-sm>.form-control,.input-group-sm>.input-group-addon,.input-group-sm>.input-group-btn>.btn{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}select.input-group-sm>.form-control,select.input-group-sm>.input-group-addon,select.input-group-sm>.input-group-btn>.btn{height:30px;line-height:30px}select[multiple].input-group-sm>.form-control,select[multiple].input-group-sm>.input-group-addon,select[multiple].input-group-sm>.input-group-btn>.btn,textarea.input-group-sm>.form-control,textarea.input-group-sm>.input-group-addon,textarea.input-group-sm>.input-group-btn>.btn{height:auto}.input-group .form-control,.input-group-addon,.input-group-btn{display:table-cell}.nav>li,.nav>li>a{display:block;position:relative}.input-group .form-control:not(:first-child):not(:last-child),.input-group-addon:not(:first-child):not(:last-child),.input-group-btn:not(:first-child):not(:last-child){border-radius:0}.input-group-addon,.input-group-btn{width:1%;white-space:nowrap;vertical-align:middle}.input-group-addon{padding:6px 12px;font-size:14px;font-weight:400;line-height:1;color:#555;text-align:center;background-color:#eee;border:1px solid #ccc;border-radius:4px}.input-group-addon.input-sm{padding:5px 10px;font-size:12px;border-radius:3px}.input-group-addon.input-lg{padding:10px 16px;font-size:18px;border-radius:6px}.input-group-addon input[type=checkbox],.input-group-addon input[type=radio]{margin-top:0}.input-group .form-control:first-child,.input-group-addon:first-child,.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group>.btn,.input-group-btn:first-child>.dropdown-toggle,.input-group-btn:last-child>.btn-group:not(:last-child)>.btn,.input-group-btn:last-child>.btn:not(:last-child):not(.dropdown-toggle){border-top-right-radius:0;border-bottom-right-radius:0}.input-group-addon:first-child{border-right:0}.input-group .form-control:last-child,.input-group-addon:last-child,.input-group-btn:first-child>.btn-group:not(:first-child)>.btn,.input-group-btn:first-child>.btn:not(:first-child),.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group>.btn,.input-group-btn:last-child>.dropdown-toggle{border-top-left-radius:0;border-bottom-left-radius:0}.input-group-addon:last-child{border-left:0}.input-group-btn{position:relative;font-size:0;white-space:nowrap}.input-group-btn>.btn{position:relative}.input-group-btn>.btn+.btn{margin-left:-1px}.input-group-btn>.btn:active,.input-group-btn>.btn:focus,.input-group-btn>.btn:hover{z-index:2}.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group{margin-right:-1px}.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group{z-index:2;margin-left:-1px}.nav{padding-left:0;margin-bottom:0;list-style:none}.nav>li>a{padding:10px 15px}.nav>li>a:focus,.nav>li>a:hover{text-decoration:none;background-color:#eee}.nav>li.disabled>a{color:#777}.nav>li.disabled>a:focus,.nav>li.disabled>a:hover{color:#777;text-decoration:none;cursor:not-allowed;background-color:transparent}.nav .open>a,.nav .open>a:focus,.nav .open>a:hover{background-color:#eee;border-color:#337ab7}.nav .nav-divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.nav>li>a>img{max-width:none}.nav-tabs{border-bottom:1px solid #ddd}.nav-tabs>li{float:left;margin-bottom:-1px}.nav-tabs>li>a{margin-right:2px;line-height:1.42857143;border:1px solid transparent;border-radius:4px 4px 0 0}.nav-tabs>li>a:hover{border-color:#eee #eee #ddd}.nav-tabs>li.active>a,.nav-tabs>li.active>a:focus,.nav-tabs>li.active>a:hover{color:#555;cursor:default;background-color:#fff;border:1px solid #ddd;border-bottom-color:transparent}.nav-tabs.nav-justified{width:100%;border-bottom:0}.nav-tabs.nav-justified>li{float:none}.nav-tabs.nav-justified>li>a{margin-bottom:5px;text-align:center;margin-right:0;border-radius:4px}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:focus,.nav-tabs.nav-justified>.active>a:hover{border:1px solid #ddd}@media (min-width:768px){.nav-tabs.nav-justified>li{display:table-cell;width:1%}.nav-tabs.nav-justified>li>a{margin-bottom:0;border-bottom:1px solid #ddd;border-radius:4px 4px 0 0}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:focus,.nav-tabs.nav-justified>.active>a:hover{border-bottom-color:#fff}}.nav-pills>li{float:left}.nav-justified>li,.nav-stacked>li{float:none}.nav-pills>li>a{border-radius:4px}.nav-pills>li+li{margin-left:2px}.nav-pills>li.active>a,.nav-pills>li.active>a:focus,.nav-pills>li.active>a:hover{color:#fff;background-color:#337ab7}.nav-stacked>li+li{margin-top:2px;margin-left:0}.nav-justified{width:100%}.nav-justified>li>a{margin-bottom:5px;text-align:center}.nav-tabs-justified{border-bottom:0}.nav-tabs-justified>li>a{margin-right:0;border-radius:4px}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:focus,.nav-tabs-justified>.active>a:hover{border:1px solid #ddd}@media (min-width:768px){.nav-justified>li{display:table-cell;width:1%}.nav-justified>li>a{margin-bottom:0}.nav-tabs-justified>li>a{border-bottom:1px solid #ddd;border-radius:4px 4px 0 0}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:focus,.nav-tabs-justified>.active>a:hover{border-bottom-color:#fff}}.tab-content>.tab-pane{display:none}.tab-content>.active{display:block}.nav-tabs .dropdown-menu{margin-top:-1px;border-top-left-radius:0;border-top-right-radius:0}.navbar{position:relative;min-height:50px;margin-bottom:20px;border:1px solid transparent}.navbar-collapse{padding-right:15px;padding-left:15px;overflow-x:visible;-webkit-overflow-scrolling:touch;border-top:1px solid transparent;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.1);box-shadow:inset 0 1px 0 rgba(255,255,255,.1)}.navbar-collapse.in{overflow-y:auto}@media (min-width:768px){.navbar{border-radius:4px}.navbar-header{float:left}.navbar-collapse{width:auto;border-top:0;-webkit-box-shadow:none;box-shadow:none}.navbar-collapse.collapse{display:block!important;height:auto!important;padding-bottom:0;overflow:visible!important}.navbar-collapse.in{overflow-y:visible}.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse,.navbar-static-top .navbar-collapse{padding-right:0;padding-left:0}}.embed-responsive,.modal,.modal-open,.progress{overflow:hidden}@media (max-device-width:480px) and (orientation:landscape){.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse{max-height:200px}}.container-fluid>.navbar-collapse,.container-fluid>.navbar-header,.container>.navbar-collapse,.container>.navbar-header{margin-right:-15px;margin-left:-15px}.navbar-static-top{z-index:1000;border-width:0 0 1px}.navbar-fixed-bottom,.navbar-fixed-top{position:fixed;right:0;left:0;z-index:1030}.navbar-fixed-top{top:0;border-width:0 0 1px}.navbar-fixed-bottom{bottom:0;margin-bottom:0;border-width:1px 0 0}.navbar-brand{float:left;height:50px;padding:15px;font-size:18px;line-height:20px}.navbar-brand:focus,.navbar-brand:hover{text-decoration:none}.navbar-brand>img{display:block}@media (min-width:768px){.container-fluid>.navbar-collapse,.container-fluid>.navbar-header,.container>.navbar-collapse,.container>.navbar-header{margin-right:0;margin-left:0}.navbar-fixed-bottom,.navbar-fixed-top,.navbar-static-top{border-radius:0}.navbar>.container .navbar-brand,.navbar>.container-fluid .navbar-brand{margin-left:-15px}}.navbar-toggle{position:relative;float:right;padding:9px 10px;margin-top:8px;margin-right:15px;margin-bottom:8px;background-color:transparent;border:1px solid transparent;border-radius:4px}.navbar-toggle:focus{outline:0}.navbar-toggle .icon-bar{display:block;width:22px;height:2px;border-radius:1px}.navbar-toggle .icon-bar+.icon-bar{margin-top:4px}.navbar-nav{margin:7.5px -15px}.navbar-nav>li>a{padding-top:10px;padding-bottom:10px;line-height:20px}@media (max-width:767px){.navbar-nav .open .dropdown-menu{position:static;float:none;width:auto;margin-top:0;background-color:transparent;border:0;-webkit-box-shadow:none;box-shadow:none}.navbar-nav .open .dropdown-menu .dropdown-header,.navbar-nav .open .dropdown-menu>li>a{padding:5px 15px 5px 25px}.navbar-nav .open .dropdown-menu>li>a{line-height:20px}.navbar-nav .open .dropdown-menu>li>a:focus,.navbar-nav .open .dropdown-menu>li>a:hover{background-image:none}}.progress-bar-striped,.progress-striped .progress-bar,.progress-striped .progress-bar-success{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}@media (min-width:768px){.navbar-toggle{display:none}.navbar-nav{float:left;margin:0}.navbar-nav>li{float:left}.navbar-nav>li>a{padding-top:15px;padding-bottom:15px}}.navbar-form{padding:10px 15px;margin-top:8px;margin-right:-15px;margin-bottom:8px;margin-left:-15px;border-top:1px solid transparent;border-bottom:1px solid transparent;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.1),0 1px 0 rgba(255,255,255,.1);box-shadow:inset 0 1px 0 rgba(255,255,255,.1),0 1px 0 rgba(255,255,255,.1)}@media (min-width:768px){.navbar-form .form-control-static,.navbar-form .form-group{display:inline-block}.navbar-form .control-label,.navbar-form .form-group{margin-bottom:0;vertical-align:middle}.navbar-form .form-control{display:inline-block;width:auto;vertical-align:middle}.navbar-form .input-group{display:inline-table;vertical-align:middle}.navbar-form .input-group .form-control,.navbar-form .input-group .input-group-addon,.navbar-form .input-group .input-group-btn{width:auto}.navbar-form .input-group>.form-control{width:100%}.navbar-form .checkbox,.navbar-form .radio{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.navbar-form .checkbox label,.navbar-form .radio label{padding-left:0}.navbar-form .checkbox input[type=checkbox],.navbar-form .radio input[type=radio]{position:relative;margin-left:0}.navbar-form .has-feedback .form-control-feedback{top:0}.navbar-form{width:auto;padding-top:0;padding-bottom:0;margin-right:0;margin-left:0;border:0;-webkit-box-shadow:none;box-shadow:none}}.breadcrumb>li,.pagination{display:inline-block}.btn .badge,.btn .label{top:-1px;position:relative}@media (max-width:767px){.navbar-form .form-group{margin-bottom:5px}.navbar-form .form-group:last-child{margin-bottom:0}}.navbar-nav>li>.dropdown-menu{margin-top:0;border-top-left-radius:0;border-top-right-radius:0}.navbar-fixed-bottom .navbar-nav>li>.dropdown-menu{margin-bottom:0;border-top-left-radius:4px;border-top-right-radius:4px;border-bottom-right-radius:0;border-bottom-left-radius:0}.navbar-btn{margin-top:8px;margin-bottom:8px}.navbar-btn.btn-sm{margin-top:10px;margin-bottom:10px}.navbar-btn.btn-xs{margin-top:14px;margin-bottom:14px}.media,.navbar-text{margin-top:15px}.navbar-text{margin-bottom:15px}@media (min-width:768px){.navbar-text{float:left;margin-right:15px;margin-left:15px}.navbar-left{float:left!important}.navbar-right{float:right!important;margin-right:-15px}.navbar-right~.navbar-right{margin-right:0}}.navbar-default{background-color:#f8f8f8;border-color:#e7e7e7}.navbar-default .navbar-brand{color:#777}.navbar-default .navbar-brand:focus,.navbar-default .navbar-brand:hover{color:#5e5e5e;background-color:transparent}.navbar-default .navbar-nav>li>a,.navbar-default .navbar-text{color:#777}.navbar-default .navbar-nav>li>a:focus,.navbar-default .navbar-nav>li>a:hover{color:#333;background-color:transparent}.navbar-default .navbar-nav>.active>a,.navbar-default .navbar-nav>.active>a:focus,.navbar-default .navbar-nav>.active>a:hover{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav>.disabled>a,.navbar-default .navbar-nav>.disabled>a:focus,.navbar-default .navbar-nav>.disabled>a:hover{color:#ccc;background-color:transparent}.navbar-default .navbar-toggle{border-color:#ddd}.navbar-default .navbar-toggle:focus,.navbar-default .navbar-toggle:hover{background-color:#ddd}.navbar-default .navbar-toggle .icon-bar{background-color:#888}.navbar-default .navbar-collapse,.navbar-default .navbar-form{border-color:#e7e7e7}.navbar-default .navbar-nav>.open>a,.navbar-default .navbar-nav>.open>a:focus,.navbar-default .navbar-nav>.open>a:hover{color:#555;background-color:#e7e7e7}@media (max-width:767px){.navbar-default .navbar-nav .open .dropdown-menu>li>a{color:#777}.navbar-default .navbar-nav .open .dropdown-menu>li>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>li>a:hover{color:#333;background-color:transparent}.navbar-default .navbar-nav .open .dropdown-menu>.active>a,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:hover{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:hover{color:#ccc;background-color:transparent}}.navbar-default .navbar-link{color:#777}.navbar-default .navbar-link:hover{color:#333}.navbar-default .btn-link{color:#777}.navbar-default .btn-link:focus,.navbar-default .btn-link:hover{color:#333}.navbar-default .btn-link[disabled]:focus,.navbar-default .btn-link[disabled]:hover,fieldset[disabled] .navbar-default .btn-link:focus,fieldset[disabled] .navbar-default .btn-link:hover{color:#ccc}.navbar-inverse{background-color:#222;border-color:#080808}.navbar-inverse .navbar-brand{color:#9d9d9d}.navbar-inverse .navbar-brand:focus,.navbar-inverse .navbar-brand:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav>li>a,.navbar-inverse .navbar-text{color:#9d9d9d}.navbar-inverse .navbar-nav>li>a:focus,.navbar-inverse .navbar-nav>li>a:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav>.active>a,.navbar-inverse .navbar-nav>.active>a:focus,.navbar-inverse .navbar-nav>.active>a:hover{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav>.disabled>a,.navbar-inverse .navbar-nav>.disabled>a:focus,.navbar-inverse .navbar-nav>.disabled>a:hover{color:#444;background-color:transparent}.navbar-inverse .navbar-toggle{border-color:#333}.navbar-inverse .navbar-toggle:focus,.navbar-inverse .navbar-toggle:hover{background-color:#333}.navbar-inverse .navbar-toggle .icon-bar{background-color:#fff}.navbar-inverse .navbar-collapse,.navbar-inverse .navbar-form{border-color:#101010}.navbar-inverse .navbar-nav>.open>a,.navbar-inverse .navbar-nav>.open>a:focus,.navbar-inverse .navbar-nav>.open>a:hover{color:#fff;background-color:#080808}@media (max-width:767px){.navbar-inverse .navbar-nav .open .dropdown-menu>.dropdown-header{border-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu .divider{background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a{color:#9d9d9d}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:hover{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:hover{color:#444;background-color:transparent}}.navbar-inverse .navbar-link{color:#9d9d9d}.navbar-inverse .navbar-link:hover{color:#fff}.navbar-inverse .btn-link{color:#9d9d9d}.navbar-inverse .btn-link:focus,.navbar-inverse .btn-link:hover{color:#fff}.navbar-inverse .btn-link[disabled]:focus,.navbar-inverse .btn-link[disabled]:hover,fieldset[disabled] .navbar-inverse .btn-link:focus,fieldset[disabled] .navbar-inverse .btn-link:hover{color:#444}.breadcrumb{padding:8px 15px;margin-bottom:20px;list-style:none;background-color:#f5f5f5;border-radius:4px}.breadcrumb>li+li:before{padding:0 5px;color:#ccc;content:"/\00a0"}.breadcrumb>.active{color:#777}.pagination{padding-left:0;margin:20px 0;border-radius:4px}.pager li,.pagination>li{display:inline}.pagination>li>a,.pagination>li>span{position:relative;float:left;padding:6px 12px;margin-left:-1px;line-height:1.42857143;color:#337ab7;text-decoration:none;background-color:#fff;border:1px solid #ddd}.pagination>li:first-child>a,.pagination>li:first-child>span{margin-left:0;border-top-left-radius:4px;border-bottom-left-radius:4px}.pagination>li:last-child>a,.pagination>li:last-child>span{border-top-right-radius:4px;border-bottom-right-radius:4px}.pagination>li>a:focus,.pagination>li>a:hover,.pagination>li>span:focus,.pagination>li>span:hover{z-index:2;color:#23527c;background-color:#eee;border-color:#ddd}.pagination>.active>a,.pagination>.active>a:focus,.pagination>.active>a:hover,.pagination>.active>span,.pagination>.active>span:focus,.pagination>.active>span:hover{z-index:3;color:#fff;cursor:default;background-color:#337ab7;border-color:#337ab7}.pagination>.disabled>a,.pagination>.disabled>a:focus,.pagination>.disabled>a:hover,.pagination>.disabled>span,.pagination>.disabled>span:focus,.pagination>.disabled>span:hover{color:#777;cursor:not-allowed;background-color:#fff;border-color:#ddd}.pagination-lg>li>a,.pagination-lg>li>span{padding:10px 16px;font-size:18px;line-height:1.3333333}.pagination-lg>li:first-child>a,.pagination-lg>li:first-child>span{border-top-left-radius:6px;border-bottom-left-radius:6px}.pagination-lg>li:last-child>a,.pagination-lg>li:last-child>span{border-top-right-radius:6px;border-bottom-right-radius:6px}.pagination-sm>li>a,.pagination-sm>li>span{padding:5px 10px;font-size:12px;line-height:1.5}.badge,.label{font-weight:700;line-height:1;white-space:nowrap;text-align:center}.pagination-sm>li:first-child>a,.pagination-sm>li:first-child>span{border-top-left-radius:3px;border-bottom-left-radius:3px}.pagination-sm>li:last-child>a,.pagination-sm>li:last-child>span{border-top-right-radius:3px;border-bottom-right-radius:3px}.pager{padding-left:0;margin:20px 0;text-align:center;list-style:none}.pager li>a,.pager li>span{display:inline-block;padding:5px 14px;background-color:#fff;border:1px solid #ddd;border-radius:15px}.pager li>a:focus,.pager li>a:hover{text-decoration:none;background-color:#eee}.pager .next>a,.pager .next>span{float:right}.pager .previous>a,.pager .previous>span{float:left}.pager .disabled>a,.pager .disabled>a:focus,.pager .disabled>a:hover,.pager .disabled>span{color:#777;cursor:not-allowed;background-color:#fff}.label{display:inline;padding:.2em .6em .3em;font-size:75%;color:#fff;border-radius:.25em}a.label:focus,a.label:hover{color:#fff;text-decoration:none;cursor:pointer}.label:empty{display:none}.label-default{background-color:#777}.label-default[href]:focus,.label-default[href]:hover{background-color:#5e5e5e}.label-primary{background-color:#337ab7}.label-primary[href]:focus,.label-primary[href]:hover{background-color:#286090}.label-success{background-color:#5cb85c}.label-success[href]:focus,.label-success[href]:hover{background-color:#449d44}.label-info{background-color:#5bc0de}.label-info[href]:focus,.label-info[href]:hover{background-color:#31b0d5}.label-warning{background-color:#f0ad4e}.label-warning[href]:focus,.label-warning[href]:hover{background-color:#ec971f}.label-danger{background-color:#d9534f}.label-danger[href]:focus,.label-danger[href]:hover{background-color:#c9302c}.badge{display:inline-block;min-width:10px;padding:3px 7px;font-size:12px;color:#fff;vertical-align:middle;background-color:#777;border-radius:10px}.badge:empty{display:none}.media-object,.thumbnail{display:block}.btn-group-xs>.btn .badge,.btn-xs .badge{top:0;padding:1px 5px}a.badge:focus,a.badge:hover{color:#fff;text-decoration:none;cursor:pointer}.list-group-item.active>.badge,.nav-pills>.active>a>.badge{color:#337ab7;background-color:#fff}.jumbotron,.jumbotron .h1,.jumbotron h1{color:inherit}.list-group-item>.badge{float:right}.list-group-item>.badge+.badge{margin-right:5px}.nav-pills>li>a>.badge{margin-left:3px}.jumbotron{padding-top:30px;padding-bottom:30px;margin-bottom:30px;background-color:#eee}.jumbotron p{margin-bottom:15px;font-size:21px;font-weight:200}.alert,.thumbnail{margin-bottom:20px}.jumbotron>hr{border-top-color:#d5d5d5}.container .jumbotron,.container-fluid .jumbotron{padding-right:15px;padding-left:15px;border-radius:6px}.jumbotron .container{max-width:100%}@media screen and (min-width:768px){.jumbotron{padding-top:48px;padding-bottom:48px}.container .jumbotron,.container-fluid .jumbotron{padding-right:60px;padding-left:60px}.jumbotron .h1,.jumbotron h1{font-size:63px}}.thumbnail{padding:4px;line-height:1.42857143;background-color:#fff;border:1px solid #ddd;border-radius:4px;-webkit-transition:border .2s ease-in-out;-o-transition:border .2s ease-in-out;transition:border .2s ease-in-out}.thumbnail a>img,.thumbnail>img{margin-right:auto;margin-left:auto}a.thumbnail.active,a.thumbnail:focus,a.thumbnail:hover{border-color:#337ab7}.thumbnail .caption{padding:9px;color:#333}.alert{padding:15px;border:1px solid transparent;border-radius:4px}.alert h4{margin-top:0;color:inherit}.alert .alert-link{font-weight:700}.alert>p,.alert>ul{margin-bottom:0}.alert>p+p{margin-top:5px}.alert-dismissable,.alert-dismissible{padding-right:35px}.alert-dismissable .close,.alert-dismissible .close{position:relative;top:-2px;right:-21px;color:inherit}.modal,.modal-backdrop{top:0;right:0;bottom:0;left:0}.alert-success{color:#3c763d;background-color:#dff0d8;border-color:#d6e9c6}.alert-success hr{border-top-color:#c9e2b3}.alert-success .alert-link{color:#2b542c}.alert-info{color:#31708f;background-color:#d9edf7;border-color:#bce8f1}.alert-info hr{border-top-color:#a6e1ec}.alert-info .alert-link{color:#245269}.alert-warning{color:#8a6d3b;background-color:#fcf8e3;border-color:#faebcc}.alert-warning hr{border-top-color:#f7e1b5}.alert-warning .alert-link{color:#66512c}.alert-danger{color:#a94442;background-color:#f2dede;border-color:#ebccd1}.alert-danger hr{border-top-color:#e4b9c0}.alert-danger .alert-link{color:#843534}@-webkit-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-o-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}.progress{height:20px;margin-bottom:20px;background-color:#f5f5f5;border-radius:4px;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,.1);box-shadow:inset 0 1px 2px rgba(0,0,0,.1)}.progress-bar{float:left;width:0;height:100%;font-size:12px;line-height:20px;color:#fff;text-align:center;background-color:#337ab7;-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,.15);box-shadow:inset 0 -1px 0 rgba(0,0,0,.15);-webkit-transition:width .6s ease;-o-transition:width .6s ease;transition:width .6s ease}.progress-bar-striped,.progress-striped .progress-bar{background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);-webkit-background-size:40px 40px;background-size:40px 40px}.progress-bar.active,.progress.active .progress-bar{-webkit-animation:progress-bar-stripes 2s linear infinite;-o-animation:progress-bar-stripes 2s linear infinite;animation:progress-bar-stripes 2s linear infinite}.progress-bar-success{background-color:#5cb85c}.progress-striped .progress-bar-success{background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-striped .progress-bar-info,.progress-striped .progress-bar-warning{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-info{background-color:#5bc0de}.progress-striped .progress-bar-info{background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-warning{background-color:#f0ad4e}.progress-striped .progress-bar-warning{background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-danger{background-color:#d9534f}.progress-striped .progress-bar-danger{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.media:first-child{margin-top:0}.media,.media-body{overflow:hidden;zoom:1}.media-body{width:10000px}.media-object.img-thumbnail{max-width:none}.media-right,.media>.pull-right{padding-left:10px}.media-left,.media>.pull-left{padding-right:10px}.media-body,.media-left,.media-right{display:table-cell;vertical-align:top}.media-middle{vertical-align:middle}.media-bottom{vertical-align:bottom}.media-heading{margin-top:0;margin-bottom:5px}.media-list{padding-left:0;list-style:none}.list-group{padding-left:0;margin-bottom:20px}.list-group-item{position:relative;display:block;padding:10px 15px;margin-bottom:-1px;background-color:#fff;border:1px solid #ddd}.list-group-item:first-child{border-top-left-radius:4px;border-top-right-radius:4px}.list-group-item:last-child{margin-bottom:0;border-bottom-right-radius:4px;border-bottom-left-radius:4px}a.list-group-item,button.list-group-item{color:#555}a.list-group-item .list-group-item-heading,button.list-group-item .list-group-item-heading{color:#333}a.list-group-item:focus,a.list-group-item:hover,button.list-group-item:focus,button.list-group-item:hover{color:#555;text-decoration:none;background-color:#f5f5f5}button.list-group-item{width:100%;text-align:left}.list-group-item.disabled,.list-group-item.disabled:focus,.list-group-item.disabled:hover{color:#777;cursor:not-allowed;background-color:#eee}.list-group-item.disabled .list-group-item-heading,.list-group-item.disabled:focus .list-group-item-heading,.list-group-item.disabled:hover .list-group-item-heading{color:inherit}.list-group-item.disabled .list-group-item-text,.list-group-item.disabled:focus .list-group-item-text,.list-group-item.disabled:hover .list-group-item-text{color:#777}.list-group-item.active,.list-group-item.active:focus,.list-group-item.active:hover{z-index:2;color:#fff;background-color:#337ab7;border-color:#337ab7}.list-group-item.active .list-group-item-heading,.list-group-item.active .list-group-item-heading>.small,.list-group-item.active .list-group-item-heading>small,.list-group-item.active:focus .list-group-item-heading,.list-group-item.active:focus .list-group-item-heading>.small,.list-group-item.active:focus .list-group-item-heading>small,.list-group-item.active:hover .list-group-item-heading,.list-group-item.active:hover .list-group-item-heading>.small,.list-group-item.active:hover .list-group-item-heading>small{color:inherit}.list-group-item.active .list-group-item-text,.list-group-item.active:focus .list-group-item-text,.list-group-item.active:hover .list-group-item-text{color:#c7ddef}.list-group-item-success{color:#3c763d;background-color:#dff0d8}a.list-group-item-success,button.list-group-item-success{color:#3c763d}a.list-group-item-success .list-group-item-heading,button.list-group-item-success .list-group-item-heading{color:inherit}a.list-group-item-success:focus,a.list-group-item-success:hover,button.list-group-item-success:focus,button.list-group-item-success:hover{color:#3c763d;background-color:#d0e9c6}a.list-group-item-success.active,a.list-group-item-success.active:focus,a.list-group-item-success.active:hover,button.list-group-item-success.active,button.list-group-item-success.active:focus,button.list-group-item-success.active:hover{color:#fff;background-color:#3c763d;border-color:#3c763d}.list-group-item-info{color:#31708f;background-color:#d9edf7}a.list-group-item-info,button.list-group-item-info{color:#31708f}a.list-group-item-info .list-group-item-heading,button.list-group-item-info .list-group-item-heading{color:inherit}a.list-group-item-info:focus,a.list-group-item-info:hover,button.list-group-item-info:focus,button.list-group-item-info:hover{color:#31708f;background-color:#c4e3f3}a.list-group-item-info.active,a.list-group-item-info.active:focus,a.list-group-item-info.active:hover,button.list-group-item-info.active,button.list-group-item-info.active:focus,button.list-group-item-info.active:hover{color:#fff;background-color:#31708f;border-color:#31708f}.list-group-item-warning{color:#8a6d3b;background-color:#fcf8e3}a.list-group-item-warning,button.list-group-item-warning{color:#8a6d3b}a.list-group-item-warning .list-group-item-heading,button.list-group-item-warning .list-group-item-heading{color:inherit}a.list-group-item-warning:focus,a.list-group-item-warning:hover,button.list-group-item-warning:focus,button.list-group-item-warning:hover{color:#8a6d3b;background-color:#faf2cc}a.list-group-item-warning.active,a.list-group-item-warning.active:focus,a.list-group-item-warning.active:hover,button.list-group-item-warning.active,button.list-group-item-warning.active:focus,button.list-group-item-warning.active:hover{color:#fff;background-color:#8a6d3b;border-color:#8a6d3b}.list-group-item-danger{color:#a94442;background-color:#f2dede}a.list-group-item-danger,button.list-group-item-danger{color:#a94442}a.list-group-item-danger .list-group-item-heading,button.list-group-item-danger .list-group-item-heading{color:inherit}a.list-group-item-danger:focus,a.list-group-item-danger:hover,button.list-group-item-danger:focus,button.list-group-item-danger:hover{color:#a94442;background-color:#ebcccc}a.list-group-item-danger.active,a.list-group-item-danger.active:focus,a.list-group-item-danger.active:hover,button.list-group-item-danger.active,button.list-group-item-danger.active:focus,button.list-group-item-danger.active:hover{color:#fff;background-color:#a94442;border-color:#a94442}.panel-heading>.dropdown .dropdown-toggle,.panel-title,.panel-title>.small,.panel-title>.small>a,.panel-title>a,.panel-title>small,.panel-title>small>a{color:inherit}.list-group-item-heading{margin-top:0;margin-bottom:5px}.list-group-item-text{margin-bottom:0;line-height:1.3}.panel{margin-bottom:20px;background-color:#fff;border:1px solid transparent;border-radius:4px;-webkit-box-shadow:0 1px 1px rgba(0,0,0,.05);box-shadow:0 1px 1px rgba(0,0,0,.05)}.panel-title,.panel>.list-group,.panel>.panel-collapse>.list-group,.panel>.panel-collapse>.table,.panel>.table,.panel>.table-responsive>.table{margin-bottom:0}.panel-body{padding:15px}.panel-heading{padding:10px 15px;border-bottom:1px solid transparent;border-top-left-radius:3px;border-top-right-radius:3px}.panel-title{margin-top:0;font-size:16px}.panel-footer{padding:10px 15px;background-color:#f5f5f5;border-top:1px solid #ddd;border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.list-group .list-group-item,.panel>.panel-collapse>.list-group .list-group-item{border-width:1px 0;border-radius:0}.panel-group .panel-heading,.panel>.table-bordered>tbody>tr:first-child>td,.panel>.table-bordered>tbody>tr:first-child>th,.panel>.table-bordered>tbody>tr:last-child>td,.panel>.table-bordered>tbody>tr:last-child>th,.panel>.table-bordered>tfoot>tr:last-child>td,.panel>.table-bordered>tfoot>tr:last-child>th,.panel>.table-bordered>thead>tr:first-child>td,.panel>.table-bordered>thead>tr:first-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>th,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>td,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>th,.panel>.table-responsive>.table-bordered>thead>tr:first-child>td,.panel>.table-responsive>.table-bordered>thead>tr:first-child>th{border-bottom:0}.panel>.list-group:first-child .list-group-item:first-child,.panel>.panel-collapse>.list-group:first-child .list-group-item:first-child{border-top:0;border-top-left-radius:3px;border-top-right-radius:3px}.panel>.list-group:last-child .list-group-item:last-child,.panel>.panel-collapse>.list-group:last-child .list-group-item:last-child{border-bottom:0;border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.panel-heading+.panel-collapse>.list-group .list-group-item:first-child{border-top-left-radius:0;border-top-right-radius:0}.list-group+.panel-footer,.panel-heading+.list-group .list-group-item:first-child{border-top-width:0}.panel>.panel-collapse>.table caption,.panel>.table caption,.panel>.table-responsive>.table caption{padding-right:15px;padding-left:15px}.panel>.table-responsive:first-child>.table:first-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child,.panel>.table:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child,.panel>.table:first-child>thead:first-child>tr:first-child{border-top-left-radius:3px;border-top-right-radius:3px}.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:first-child,.panel>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table:first-child>thead:first-child>tr:first-child th:first-child{border-top-left-radius:3px}.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:last-child,.panel>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table:first-child>thead:first-child>tr:first-child th:last-child{border-top-right-radius:3px}.panel>.table-responsive:last-child>.table:last-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child,.panel>.table:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child{border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:first-child,.panel>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:first-child{border-bottom-left-radius:3px}.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:last-child{border-bottom-right-radius:3px}.panel>.panel-body+.table,.panel>.panel-body+.table-responsive,.panel>.table+.panel-body,.panel>.table-responsive+.panel-body{border-top:1px solid #ddd}.panel>.table>tbody:first-child>tr:first-child td,.panel>.table>tbody:first-child>tr:first-child th{border-top:0}.panel>.table-bordered,.panel>.table-responsive>.table-bordered{border:0}.panel>.table-bordered>tbody>tr>td:first-child,.panel>.table-bordered>tbody>tr>th:first-child,.panel>.table-bordered>tfoot>tr>td:first-child,.panel>.table-bordered>tfoot>tr>th:first-child,.panel>.table-bordered>thead>tr>td:first-child,.panel>.table-bordered>thead>tr>th:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:first-child,.panel>.table-responsive>.table-bordered>thead>tr>td:first-child,.panel>.table-responsive>.table-bordered>thead>tr>th:first-child{border-left:0}.panel>.table-bordered>tbody>tr>td:last-child,.panel>.table-bordered>tbody>tr>th:last-child,.panel>.table-bordered>tfoot>tr>td:last-child,.panel>.table-bordered>tfoot>tr>th:last-child,.panel>.table-bordered>thead>tr>td:last-child,.panel>.table-bordered>thead>tr>th:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:last-child,.panel>.table-responsive>.table-bordered>thead>tr>td:last-child,.panel>.table-responsive>.table-bordered>thead>tr>th:last-child{border-right:0}.panel>.table-responsive{margin-bottom:0;border:0}.panel-group{margin-bottom:20px}.panel-group .panel{margin-bottom:0;border-radius:4px}.panel-group .panel+.panel{margin-top:5px}.panel-group .panel-heading+.panel-collapse>.list-group,.panel-group .panel-heading+.panel-collapse>.panel-body{border-top:1px solid #ddd}.panel-group .panel-footer{border-top:0}.panel-group .panel-footer+.panel-collapse .panel-body{border-bottom:1px solid #ddd}.panel-default{border-color:#ddd}.panel-default>.panel-heading{color:#333;background-color:#f5f5f5;border-color:#ddd}.panel-default>.panel-heading+.panel-collapse>.panel-body{border-top-color:#ddd}.panel-default>.panel-heading .badge{color:#f5f5f5;background-color:#333}.panel-default>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#ddd}.panel-primary{border-color:#337ab7}.panel-primary>.panel-heading{color:#fff;background-color:#337ab7;border-color:#337ab7}.panel-primary>.panel-heading+.panel-collapse>.panel-body{border-top-color:#337ab7}.panel-primary>.panel-heading .badge{color:#337ab7;background-color:#fff}.panel-primary>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#337ab7}.panel-success{border-color:#d6e9c6}.panel-success>.panel-heading{color:#3c763d;background-color:#dff0d8;border-color:#d6e9c6}.panel-success>.panel-heading+.panel-collapse>.panel-body{border-top-color:#d6e9c6}.panel-success>.panel-heading .badge{color:#dff0d8;background-color:#3c763d}.panel-success>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#d6e9c6}.panel-info{border-color:#bce8f1}.panel-info>.panel-heading{color:#31708f;background-color:#d9edf7;border-color:#bce8f1}.panel-info>.panel-heading+.panel-collapse>.panel-body{border-top-color:#bce8f1}.panel-info>.panel-heading .badge{color:#d9edf7;background-color:#31708f}.panel-info>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#bce8f1}.panel-warning{border-color:#faebcc}.panel-warning>.panel-heading{color:#8a6d3b;background-color:#fcf8e3;border-color:#faebcc}.panel-warning>.panel-heading+.panel-collapse>.panel-body{border-top-color:#faebcc}.panel-warning>.panel-heading .badge{color:#fcf8e3;background-color:#8a6d3b}.panel-warning>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#faebcc}.panel-danger{border-color:#ebccd1}.panel-danger>.panel-heading{color:#a94442;background-color:#f2dede;border-color:#ebccd1}.panel-danger>.panel-heading+.panel-collapse>.panel-body{border-top-color:#ebccd1}.panel-danger>.panel-heading .badge{color:#f2dede;background-color:#a94442}.panel-danger>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#ebccd1}.embed-responsive{position:relative;display:block;height:0;padding:0}.embed-responsive .embed-responsive-item,.embed-responsive embed,.embed-responsive iframe,.embed-responsive object,.embed-responsive video{position:absolute;top:0;bottom:0;left:0;width:100%;height:100%;border:0}.embed-responsive-16by9{padding-bottom:56.25%}.embed-responsive-4by3{padding-bottom:75%}.well{min-height:20px;padding:19px;margin-bottom:20px;background-color:#f5f5f5;border:1px solid #e3e3e3;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.05);box-shadow:inset 0 1px 1px rgba(0,0,0,.05)}.well blockquote{border-color:#ddd;border-color:rgba(0,0,0,.15)}.well-lg{padding:24px;border-radius:6px}.well-sm{padding:9px;border-radius:3px}.close{float:right;font-size:21px;font-weight:700;line-height:1;color:#000;text-shadow:0 1px 0 #fff;filter:alpha(opacity=20);opacity:.2}.popover,.tooltip{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-style:normal;font-weight:400;line-height:1.42857143;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;word-wrap:normal;line-break:auto;text-decoration:none;white-space:normal}.close:focus,.close:hover{color:#000;text-decoration:none;cursor:pointer;filter:alpha(opacity=50);opacity:.5}button.close{-webkit-appearance:none;padding:0;cursor:pointer;background:0 0;border:0}.modal{position:fixed;z-index:1050;display:none;-webkit-overflow-scrolling:touch;outline:0}.modal.fade .modal-dialog{-webkit-transition:-webkit-transform .3s ease-out;-o-transition:-o-transform .3s ease-out;transition:transform .3s ease-out;-webkit-transform:translate(0,-25%);-ms-transform:translate(0,-25%);-o-transform:translate(0,-25%);transform:translate(0,-25%)}.modal.in .modal-dialog{-webkit-transform:translate(0,0);-ms-transform:translate(0,0);-o-transform:translate(0,0);transform:translate(0,0)}.modal-open .modal{overflow-x:hidden;overflow-y:auto}.modal-dialog{position:relative;width:auto;margin:10px}.modal-content{position:relative;background-color:#fff;background-clip:padding-box;border:1px solid #999;border:1px solid rgba(0,0,0,.2);border-radius:6px;outline:0;-webkit-box-shadow:0 3px 9px rgba(0,0,0,.5);box-shadow:0 3px 9px rgba(0,0,0,.5)}.modal-backdrop{position:fixed;z-index:1040;background-color:#000}.modal-backdrop.fade{filter:alpha(opacity=0);opacity:0}.modal-backdrop.in{filter:alpha(opacity=50);opacity:.5}.modal-header{padding:15px;border-bottom:1px solid #e5e5e5}.modal-header .close{margin-top:-2px}.modal-title{margin:0;line-height:1.42857143}.modal-body{position:relative;padding:15px}.modal-footer{padding:15px;text-align:right;border-top:1px solid #e5e5e5}.modal-footer .btn+.btn{margin-bottom:0;margin-left:5px}.modal-footer .btn-group .btn+.btn{margin-left:-1px}.modal-footer .btn-block+.btn-block{margin-left:0}.modal-scrollbar-measure{position:absolute;top:-9999px;width:50px;height:50px;overflow:scroll}@media (min-width:768px){.modal-dialog{width:600px;margin:30px auto}.modal-content{-webkit-box-shadow:0 5px 15px rgba(0,0,0,.5);box-shadow:0 5px 15px rgba(0,0,0,.5)}.modal-sm{width:300px}}@media (min-width:992px){.modal-lg{width:900px}}.tooltip{position:absolute;z-index:1070;display:block;font-size:12px;text-align:left;text-align:start;filter:alpha(opacity=0);opacity:0}.tooltip.in{filter:alpha(opacity=90);opacity:.9}.tooltip.top{padding:5px 0;margin-top:-3px}.tooltip.right{padding:0 5px;margin-left:3px}.tooltip.bottom{padding:5px 0;margin-top:3px}.tooltip.left{padding:0 5px;margin-left:-3px}.tooltip-inner{max-width:200px;padding:3px 8px;color:#fff;text-align:center;background-color:#000;border-radius:4px}.tooltip-arrow{position:absolute;width:0;height:0;border-color:transparent;border-style:solid}.tooltip.top .tooltip-arrow,.tooltip.top-left .tooltip-arrow,.tooltip.top-right .tooltip-arrow{bottom:0;border-width:5px 5px 0;border-top-color:#000}.tooltip.top .tooltip-arrow{left:50%;margin-left:-5px}.tooltip.top-left .tooltip-arrow{right:5px;margin-bottom:-5px}.tooltip.top-right .tooltip-arrow{left:5px;margin-bottom:-5px}.tooltip.right .tooltip-arrow{top:50%;left:0;margin-top:-5px;border-width:5px 5px 5px 0;border-right-color:#000}.tooltip.left .tooltip-arrow{top:50%;right:0;margin-top:-5px;border-width:5px 0 5px 5px;border-left-color:#000}.tooltip.bottom .tooltip-arrow,.tooltip.bottom-left .tooltip-arrow,.tooltip.bottom-right .tooltip-arrow{border-width:0 5px 5px;border-bottom-color:#000;top:0}.tooltip.bottom .tooltip-arrow{left:50%;margin-left:-5px}.tooltip.bottom-left .tooltip-arrow{right:5px;margin-top:-5px}.tooltip.bottom-right .tooltip-arrow{left:5px;margin-top:-5px}.popover{position:absolute;top:0;left:0;z-index:1060;display:none;max-width:276px;padding:1px;font-size:14px;text-align:left;text-align:start;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #ccc;border:1px solid rgba(0,0,0,.2);border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0,0,0,.2);box-shadow:0 5px 10px rgba(0,0,0,.2)}.carousel-caption,.carousel-control{color:#fff;text-shadow:0 1px 2px rgba(0,0,0,.6);text-align:center}.popover.top{margin-top:-10px}.popover.right{margin-left:10px}.popover.bottom{margin-top:10px}.popover.left{margin-left:-10px}.popover-title{padding:8px 14px;margin:0;font-size:14px;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;border-radius:5px 5px 0 0}.popover-content{padding:9px 14px}.popover>.arrow,.popover>.arrow:after{position:absolute;display:block;width:0;height:0;border-color:transparent;border-style:solid}.carousel,.carousel-inner{position:relative}.popover>.arrow{border-width:11px}.popover>.arrow:after{content:"";border-width:10px}.popover.top>.arrow{bottom:-11px;left:50%;margin-left:-11px;border-top-color:#999;border-top-color:rgba(0,0,0,.25);border-bottom-width:0}.popover.top>.arrow:after{bottom:1px;margin-left:-10px;content:" ";border-top-color:#fff;border-bottom-width:0}.popover.left>.arrow:after,.popover.right>.arrow:after{bottom:-10px;content:" "}.popover.right>.arrow{top:50%;left:-11px;margin-top:-11px;border-right-color:#999;border-right-color:rgba(0,0,0,.25);border-left-width:0}.popover.right>.arrow:after{left:1px;border-right-color:#fff;border-left-width:0}.popover.bottom>.arrow{top:-11px;left:50%;margin-left:-11px;border-top-width:0;border-bottom-color:#999;border-bottom-color:rgba(0,0,0,.25)}.popover.bottom>.arrow:after{top:1px;margin-left:-10px;content:" ";border-top-width:0;border-bottom-color:#fff}.popover.left>.arrow{top:50%;right:-11px;margin-top:-11px;border-right-width:0;border-left-color:#999;border-left-color:rgba(0,0,0,.25)}.popover.left>.arrow:after{right:1px;border-right-width:0;border-left-color:#fff}.carousel-inner{width:100%;overflow:hidden}.carousel-inner>.item{position:relative;display:none;-webkit-transition:.6s ease-in-out left;-o-transition:.6s ease-in-out left;transition:.6s ease-in-out left}.carousel-inner>.item>a>img,.carousel-inner>.item>img{line-height:1}@media all and (transform-3d),(-webkit-transform-3d){.carousel-inner>.item{-webkit-transition:-webkit-transform .6s ease-in-out;-o-transition:-o-transform .6s ease-in-out;transition:transform .6s ease-in-out;-webkit-backface-visibility:hidden;backface-visibility:hidden;-webkit-perspective:1000px;perspective:1000px}.carousel-inner>.item.active.right,.carousel-inner>.item.next{left:0;-webkit-transform:translate3d(100%,0,0);transform:translate3d(100%,0,0)}.carousel-inner>.item.active.left,.carousel-inner>.item.prev{left:0;-webkit-transform:translate3d(-100%,0,0);transform:translate3d(-100%,0,0)}.carousel-inner>.item.active,.carousel-inner>.item.next.left,.carousel-inner>.item.prev.right{left:0;-webkit-transform:translate3d(0,0,0);transform:translate3d(0,0,0)}}.carousel-inner>.active,.carousel-inner>.next,.carousel-inner>.prev{display:block}.carousel-inner>.active{left:0}.carousel-inner>.next,.carousel-inner>.prev{position:absolute;top:0;width:100%}.carousel-inner>.next{left:100%}.carousel-inner>.prev{left:-100%}.carousel-inner>.next.left,.carousel-inner>.prev.right{left:0}.carousel-inner>.active.left{left:-100%}.carousel-inner>.active.right{left:100%}.carousel-control{position:absolute;top:0;bottom:0;left:0;width:15%;font-size:20px;background-color:rgba(0,0,0,0);filter:alpha(opacity=50);opacity:.5}.carousel-control.left{background-image:-webkit-linear-gradient(left,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);background-image:-o-linear-gradient(left,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);background-image:-webkit-gradient(linear,left top,right top,from(rgba(0,0,0,.5)),to(rgba(0,0,0,.0001)));background-image:linear-gradient(to right,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#80000000', endColorstr='#00000000', GradientType=1);background-repeat:repeat-x}.carousel-control.right{right:0;left:auto;background-image:-webkit-linear-gradient(left,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);background-image:-o-linear-gradient(left,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);background-image:-webkit-gradient(linear,left top,right top,from(rgba(0,0,0,.0001)),to(rgba(0,0,0,.5)));background-image:linear-gradient(to right,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#00000000', endColorstr='#80000000', GradientType=1);background-repeat:repeat-x}.carousel-control:focus,.carousel-control:hover{color:#fff;text-decoration:none;filter:alpha(opacity=90);outline:0;opacity:.9}.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next,.carousel-control .icon-prev{position:absolute;top:50%;z-index:5;display:inline-block;margin-top:-10px}.carousel-control .glyphicon-chevron-left,.carousel-control .icon-prev{left:50%;margin-left:-10px}.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next{right:50%;margin-right:-10px}.carousel-control .icon-next,.carousel-control .icon-prev{width:20px;height:20px;font-family:serif;line-height:1}.carousel-control .icon-prev:before{content:'\2039'}.carousel-control .icon-next:before{content:'\203a'}.carousel-indicators{position:absolute;bottom:10px;left:50%;z-index:15;width:60%;padding-left:0;margin-left:-30%;text-align:center;list-style:none}.carousel-indicators li{display:inline-block;width:10px;height:10px;margin:1px;text-indent:-999px;cursor:pointer;background-color:#000\9;background-color:rgba(0,0,0,0);border:1px solid #fff;border-radius:10px}.carousel-indicators .active{width:12px;height:12px;margin:0;background-color:#fff}.carousel-caption{position:absolute;right:15%;bottom:20px;left:15%;z-index:10;padding-top:20px;padding-bottom:20px}.carousel-caption .btn,.text-hide{text-shadow:none}@media screen and (min-width:768px){.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next,.carousel-control .icon-prev{width:30px;height:30px;margin-top:-10px;font-size:30px}.carousel-control .glyphicon-chevron-left,.carousel-control .icon-prev{margin-left:-10px}.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next{margin-right:-10px}.carousel-caption{right:20%;left:20%;padding-bottom:30px}.carousel-indicators{bottom:20px}}.btn-group-vertical>.btn-group:after,.btn-group-vertical>.btn-group:before,.btn-toolbar:after,.btn-toolbar:before,.clearfix:after,.clearfix:before,.container-fluid:after,.container-fluid:before,.container:after,.container:before,.dl-horizontal dd:after,.dl-horizontal dd:before,.form-horizontal .form-group:after,.form-horizontal .form-group:before,.modal-footer:after,.modal-footer:before,.modal-header:after,.modal-header:before,.nav:after,.nav:before,.navbar-collapse:after,.navbar-collapse:before,.navbar-header:after,.navbar-header:before,.navbar:after,.navbar:before,.pager:after,.pager:before,.panel-body:after,.panel-body:before,.row:after,.row:before{display:table;content:" "}.btn-group-vertical>.btn-group:after,.btn-toolbar:after,.clearfix:after,.container-fluid:after,.container:after,.dl-horizontal dd:after,.form-horizontal .form-group:after,.modal-footer:after,.modal-header:after,.nav:after,.navbar-collapse:after,.navbar-header:after,.navbar:after,.pager:after,.panel-body:after,.row:after{clear:both}.center-block{display:block;margin-right:auto;margin-left:auto}.pull-right{float:right!important}.pull-left{float:left!important}.hide{display:none!important}.show{display:block!important}.hidden,.visible-lg,.visible-lg-block,.visible-lg-inline,.visible-lg-inline-block,.visible-md,.visible-md-block,.visible-md-inline,.visible-md-inline-block,.visible-sm,.visible-sm-block,.visible-sm-inline,.visible-sm-inline-block,.visible-xs,.visible-xs-block,.visible-xs-inline,.visible-xs-inline-block{display:none!important}.invisible{visibility:hidden}.text-hide{font:0/0 a;color:transparent;background-color:transparent;border:0}div.token-input-dropdown,ul.token-input-list{overflow:hidden;font-size:12px;font-family:Verdana,sans-serif}.affix{position:fixed}@-ms-viewport{width:device-width}@media (max-width:767px){.visible-xs{display:block!important}table.visible-xs{display:table!important}tr.visible-xs{display:table-row!important}td.visible-xs,th.visible-xs{display:table-cell!important}.visible-xs-block{display:block!important}.visible-xs-inline{display:inline!important}.visible-xs-inline-block{display:inline-block!important}}@media (min-width:768px) and (max-width:991px){.visible-sm{display:block!important}table.visible-sm{display:table!important}tr.visible-sm{display:table-row!important}td.visible-sm,th.visible-sm{display:table-cell!important}.visible-sm-block{display:block!important}.visible-sm-inline{display:inline!important}.visible-sm-inline-block{display:inline-block!important}}@media (min-width:992px) and (max-width:1199px){.visible-md{display:block!important}table.visible-md{display:table!important}tr.visible-md{display:table-row!important}td.visible-md,th.visible-md{display:table-cell!important}.visible-md-block{display:block!important}.visible-md-inline{display:inline!important}.visible-md-inline-block{display:inline-block!important}}@media (min-width:1200px){.visible-lg{display:block!important}table.visible-lg{display:table!important}tr.visible-lg{display:table-row!important}td.visible-lg,th.visible-lg{display:table-cell!important}.visible-lg-block{display:block!important}.visible-lg-inline{display:inline!important}.visible-lg-inline-block{display:inline-block!important}.hidden-lg{display:none!important}}@media (max-width:767px){.hidden-xs{display:none!important}}@media (min-width:768px) and (max-width:991px){.hidden-sm{display:none!important}}@media (min-width:992px) and (max-width:1199px){.hidden-md{display:none!important}}.visible-print{display:none!important}@media print{.visible-print{display:block!important}table.visible-print{display:table!important}tr.visible-print{display:table-row!important}td.visible-print,th.visible-print{display:table-cell!important}}.visible-print-block{display:none!important}@media print{.visible-print-block{display:block!important}}.visible-print-inline{display:none!important}@media print{.visible-print-inline{display:inline!important}}.visible-print-inline-block{display:none!important}@media print{.visible-print-inline-block{display:inline-block!important}.hidden-print{display:none!important}}ul.token-input-list{height:auto!important;height:1%;width:400px;border:1px solid #999;cursor:text;z-index:999;margin:0;padding:0;background-color:#fff;list-style-type:none;clear:left}ul.token-input-list li{list-style-type:none}ul.token-input-list li input{border:0;width:350px;padding:3px 8px;background-color:#fff;-webkit-appearance:caret}ul.token-input-disabled,ul.token-input-disabled li input{background-color:#E8E8E8}ul.token-input-disabled li.token-input-token{background-color:#D9E3CA;color:#7D7D7D}ul.token-input-disabled li.token-input-token span{color:#CFCFCF;cursor:default}li.token-input-token{overflow:hidden;height:auto!important;height:1%;margin:3px;padding:3px 5px;background-color:#d0efa0;color:#000;font-weight:700;cursor:default;display:block}li.token-input-token p{float:left;padding:0;margin:0}li.token-input-token span{float:right;color:#777;cursor:pointer}li.token-input-selected-token{background-color:#08844e;color:#fff}li.token-input-selected-token span{color:#bbb}div.token-input-dropdown{position:absolute;width:400px;background-color:#fff;border-left:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc;cursor:default;z-index:1}div.token-input-dropdown p{margin:0;padding:5px;font-weight:700;color:#777}div.token-input-dropdown ul{margin:0;padding:0}div.token-input-dropdown ul li{background-color:#fff;padding:3px;list-style-type:none}div.token-input-dropdown ul li.token-input-dropdown-item{background-color:#fafafa}div.token-input-dropdown ul li.token-input-dropdown-item2{background-color:#fff}div.token-input-dropdown ul li em{font-weight:700;font-style:normal}div.token-input-dropdown ul li.token-input-selected-dropdown-item{background-color:#d0efa0}.ui-pnotify{top:25px;right:25px;position:absolute;height:auto;z-index:9999}html>body>.ui-pnotify{position:fixed}.ui-pnotify .ui-pnotify-shadow{-webkit-box-shadow:0 2px 10px rgba(50,50,50,.5);-moz-box-shadow:0 2px 10px rgba(50,50,50,.5);box-shadow:0 2px 10px rgba(50,50,50,.5)}.ui-pnotify-container{background-position:0 0;padding:.8em;height:100%;margin:0}.ui-pnotify-container.ui-pnotify-sharp{-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.ui-pnotify-title{display:block;margin-bottom:.4em;margin-top:0}.ui-pnotify-text{display:block}.ui-pnotify-icon,.ui-pnotify-icon span{display:block;float:left;margin-right:.2em}.ui-pnotify.stack-bottomleft,.ui-pnotify.stack-topleft{left:25px;right:auto}.ui-pnotify.stack-bottomleft,.ui-pnotify.stack-bottomright{bottom:25px;top:auto}.ui-pnotify-closer,.ui-pnotify-sticker{float:right;margin-left:.2em}.ui-pnotify-history-container{position:absolute;top:0;right:18px;width:70px;border-top:none;padding:0;-webkit-border-top-left-radius:0;-moz-border-top-left-radius:0;border-top-left-radius:0;-webkit-border-top-right-radius:0;-moz-border-top-right-radius:0;border-top-right-radius:0;z-index:10000}.ui-pnotify-history-container.ui-pnotify-history-fixed{position:fixed}.ui-pnotify-history-container .ui-pnotify-history-header{padding:2px;text-align:center}.ui-pnotify-history-container button{cursor:pointer;display:block;width:100%}.ui-pnotify-history-container .ui-pnotify-history-pulldown{display:block;margin:0 auto}.tablesorter-blue{width:100%;background-color:#fff;margin:10px 0 15px;text-align:left;border-spacing:0;border:1px solid #cdcdcd;border-width:1px 0 0 1px}.tablesorter-blue td,.tablesorter-blue th{border:1px solid #cdcdcd;border-width:0 1px 1px 0}.tablesorter-blue th,.tablesorter-blue thead td{font:12px/18px Arial,Sans-serif;font-weight:700;color:#000;background-color:#99bfe6;border-collapse:collapse;padding:4px;text-shadow:0 1px 0 rgba(204,204,204,.7)}.tablesorter-blue tbody td,.tablesorter-blue tfoot td,.tablesorter-blue tfoot th{padding:4px;vertical-align:top}.tablesorter-blue .header,.tablesorter-blue .tablesorter-header{background-image:url(data:image/gif;base64,R0lGODlhFQAJAIAAACMtMP///yH5BAEAAAEALAAAAAAVAAkAAAIXjI+AywnaYnhUMoqt3gZXPmVg94yJVQAAOw==);background-repeat:no-repeat;background-position:center right;padding:4px 18px 4px 4px;white-space:normal;cursor:pointer}.tablesorter-blue .headerSortUp,.tablesorter-blue .tablesorter-headerAsc,.tablesorter-blue .tablesorter-headerSortUp{background-color:#9fbfdf;background-image:url(data:image/gif;base64,R0lGODlhFQAEAIAAACMtMP///yH5BAEAAAEALAAAAAAVAAQAAAINjI8Bya2wnINUMopZAQA7)}.tablesorter-blue .headerSortDown,.tablesorter-blue .tablesorter-headerDesc,.tablesorter-blue .tablesorter-headerSortDown{background-color:#8cb3d9;background-image:url(data:image/gif;base64,R0lGODlhFQAEAIAAACMtMP///yH5BAEAAAEALAAAAAAVAAQAAAINjB+gC+jP2ptn0WskLQA7)}.tablesorter-blue thead .sorter-false{background-image:none;cursor:default;padding:4px}.tablesorter-blue tfoot .tablesorter-headerAsc,.tablesorter-blue tfoot .tablesorter-headerDesc,.tablesorter-blue tfoot .tablesorter-headerSortDown,.tablesorter-blue tfoot .tablesorter-headerSortUp{background-image:none}.tablesorter-blue td{color:#3d3d3d;background-color:#fff;padding:4px;vertical-align:top}.tablesorter-blue tbody>tr.even.hover>td,.tablesorter-blue tbody>tr.even:hover+tr.tablesorter-childRow+tr.tablesorter-childRow>td,.tablesorter-blue tbody>tr.even:hover+tr.tablesorter-childRow>td,.tablesorter-blue tbody>tr.even:hover>td,.tablesorter-blue tbody>tr.hover>td,.tablesorter-blue tbody>tr:hover+tr.tablesorter-childRow+tr.tablesorter-childRow>td,.tablesorter-blue tbody>tr:hover+tr.tablesorter-childRow>td,.tablesorter-blue tbody>tr:hover>td{background-color:#d9d9d9}.tablesorter-blue tbody>tr.odd.hover>td,.tablesorter-blue tbody>tr.odd:hover+tr.tablesorter-childRow+tr.tablesorter-childRow>td,.tablesorter-blue tbody>tr.odd:hover+tr.tablesorter-childRow>td,.tablesorter-blue tbody>tr.odd:hover>td{background-color:#bfbfbf}.tablesorter-blue .tablesorter-processing{background-position:center center!important;background-repeat:no-repeat!important;background-image:url(data:image/gif;base64,R0lGODlhFAAUAKEAAO7u7lpaWgAAAAAAACH/C05FVFNDQVBFMi4wAwEAAAAh+QQBCgACACwAAAAAFAAUAAACQZRvoIDtu1wLQUAlqKTVxqwhXIiBnDg6Y4eyx4lKW5XK7wrLeK3vbq8J2W4T4e1nMhpWrZCTt3xKZ8kgsggdJmUFACH5BAEKAAIALAcAAAALAAcAAAIUVB6ii7jajgCAuUmtovxtXnmdUAAAIfkEAQoAAgAsDQACAAcACwAAAhRUIpmHy/3gUVQAQO9NetuugCFWAAAh+QQBCgACACwNAAcABwALAAACE5QVcZjKbVo6ck2AF95m5/6BSwEAIfkEAQoAAgAsBwANAAsABwAAAhOUH3kr6QaAcSrGWe1VQl+mMUIBACH5BAEKAAIALAIADQALAAcAAAIUlICmh7ncTAgqijkruDiv7n2YUAAAIfkEAQoAAgAsAAAHAAcACwAAAhQUIGmHyedehIoqFXLKfPOAaZdWAAAh+QQFCgACACwAAAIABwALAAACFJQFcJiXb15zLYRl7cla8OtlGGgUADs=)!important}.tablesorter-blue tbody tr.odd>td{background-color:#ebf2fa}.tablesorter-blue tbody tr.even>td{background-color:#fff}.tablesorter-blue td.primary,.tablesorter-blue tr.odd td.primary{background-color:#99b3e6}.tablesorter-blue td.secondary,.tablesorter-blue tr.even td.primary,.tablesorter-blue tr.odd td.secondary{background-color:#c2d1f0}.tablesorter-blue td.tertiary,.tablesorter-blue tr.even td.secondary,.tablesorter-blue tr.odd td.tertiary{background-color:#d6e0f5}.tablesorter-blue tr.even td.tertiary{background-color:#ebf0fa}caption{background-color:#fff}.tablesorter-blue .tablesorter-filter-row{background-color:#eee}.tablesorter-blue .tablesorter-filter-row td{background-color:#eee;line-height:normal;text-align:center;-webkit-transition:line-height .1s ease;-moz-transition:line-height .1s ease;-o-transition:line-height .1s ease;transition:line-height .1s ease}.tablesorter-blue .tablesorter-filter-row .disabled{opacity:.5;filter:alpha(opacity=50);cursor:not-allowed}.tablesorter-blue .tablesorter-filter-row.hideme td{padding:2px;margin:0;line-height:0;cursor:pointer}.tablesorter-blue .tablesorter-filter-row.hideme *{height:1px;min-height:0;border:0;padding:0;margin:0;opacity:0;filter:alpha(opacity=0)}.tablesorter-blue input.tablesorter-filter,.tablesorter-blue select.tablesorter-filter{width:98%;height:auto;margin:0;padding:4px;background-color:#fff;border:1px solid #bbb;color:#333;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;-webkit-transition:height .1s ease;-moz-transition:height .1s ease;-o-transition:height .1s ease;transition:height .1s ease}.tablesorter .filtered{display:none}.tablesorter .tablesorter-errorRow td{text-align:center;cursor:pointer;background-color:#e6bf99} \ No newline at end of file diff --git a/static/images/network/amazon prime.png b/static/images/network/amazon prime.png new file mode 100644 index 0000000000..ffa4f38294 Binary files /dev/null and b/static/images/network/amazon prime.png differ diff --git a/static/images/network/bbc iplayer.png b/static/images/network/bbc iplayer.png new file mode 100644 index 0000000000..a664090720 Binary files /dev/null and b/static/images/network/bbc iplayer.png differ diff --git a/static/images/network/cbs all access.png b/static/images/network/cbs all access.png new file mode 100644 index 0000000000..6a915230fa Binary files /dev/null and b/static/images/network/cbs all access.png differ diff --git a/static/images/network/club illico.png b/static/images/network/club illico.png new file mode 100644 index 0000000000..d6d0e697b9 Binary files /dev/null and b/static/images/network/club illico.png differ diff --git a/static/images/network/dr3.png b/static/images/network/dr3.png new file mode 100644 index 0000000000..c39f663173 Binary files /dev/null and b/static/images/network/dr3.png differ diff --git a/static/images/network/italia 1.png b/static/images/network/italia 1.png new file mode 100644 index 0000000000..d477462171 Binary files /dev/null and b/static/images/network/italia 1.png differ diff --git a/static/images/network/kanal 5 (dk).png b/static/images/network/kanal 5 (dk).png new file mode 100644 index 0000000000..2a8e4cc4dd Binary files /dev/null and b/static/images/network/kanal 5 (dk).png differ diff --git a/static/images/network/la7.png b/static/images/network/la7.png new file mode 100644 index 0000000000..20b69673b3 Binary files /dev/null and b/static/images/network/la7.png differ diff --git a/static/images/network/rai gulp.png b/static/images/network/rai gulp.png new file mode 100644 index 0000000000..ad6e304333 Binary files /dev/null and b/static/images/network/rai gulp.png differ diff --git a/static/images/network/rete 4.png b/static/images/network/rete 4.png new file mode 100644 index 0000000000..1b4a62bccf Binary files /dev/null and b/static/images/network/rete 4.png differ diff --git a/static/images/network/seeso.png b/static/images/network/seeso.png new file mode 100644 index 0000000000..00026b3d75 Binary files /dev/null and b/static/images/network/seeso.png differ diff --git a/static/images/network/sky 1.png b/static/images/network/sky 1.png new file mode 100644 index 0000000000..209755e2b0 Binary files /dev/null and b/static/images/network/sky 1.png differ diff --git a/static/images/network/sky uno.png b/static/images/network/sky uno.png new file mode 100644 index 0000000000..3272536bea Binary files /dev/null and b/static/images/network/sky uno.png differ diff --git a/static/images/network/startrekcontinues.com.png b/static/images/network/startrekcontinues.com.png new file mode 100644 index 0000000000..a4b71aafe2 Binary files /dev/null and b/static/images/network/startrekcontinues.com.png differ diff --git a/static/images/network/tv 2 zulu.png b/static/images/network/tv 2 zulu.png new file mode 100644 index 0000000000..d11495d683 Binary files /dev/null and b/static/images/network/tv 2 zulu.png differ diff --git a/static/images/network/tv 3.png b/static/images/network/tv 3.png new file mode 100644 index 0000000000..c53fe1add0 Binary files /dev/null and b/static/images/network/tv 3.png differ diff --git a/static/images/network/yes.png b/static/images/network/yes.png new file mode 100644 index 0000000000..7a57095776 Binary files /dev/null and b/static/images/network/yes.png differ diff --git a/static/images/network/ytv (jp).png b/static/images/network/ytv (jp).png new file mode 100644 index 0000000000..62409ea5c0 Binary files /dev/null and b/static/images/network/ytv (jp).png differ diff --git a/static/images/trakt.png b/static/images/trakt.png new file mode 100644 index 0000000000..ebdaf8ce67 Binary files /dev/null and b/static/images/trakt.png differ diff --git a/static/js/add-show-options.js b/static/js/add-show-options.js index 886756ec39..fb295eca13 100644 --- a/static/js/add-show-options.js +++ b/static/js/add-show-options.js @@ -9,10 +9,11 @@ $(document).ready(function() { bestQualArray.push($(d).val()); }); + // @TODO: Move this to API $.get('config/general/saveAddShowDefaults', { defaultStatus: $('#statusSelect').val(), - allowed_qualities: anyQualArray.join(','), - preferred_qualities: bestQualArray.join(','), + allowed_qualities: anyQualArray.join(','), // eslint-disable-line camelcase + preferred_qualities: bestQualArray.join(','), // eslint-disable-line camelcase defaultFlattenFolders: $('#flatten_folders').prop('checked'), subtitles: $('#subtitles').prop('checked'), anime: $('#anime').prop('checked'), diff --git a/static/js/add-shows/add-existing-show.js b/static/js/add-shows/add-existing-show.js index 6d538a1e4e..09b7c95757 100644 --- a/static/js/add-shows/add-existing-show.js +++ b/static/js/add-shows/add-existing-show.js @@ -1,8 +1,8 @@ MEDUSA.addShows.addExistingShow = function() { $('#tableDiv').on('click', '#checkAll', function() { - var seasCheck = this; + var seasonCheck = this; $('.dirCheck').each(function() { - this.checked = seasCheck.checked; + this.checked = seasonCheck.checked; }); }); @@ -10,11 +10,10 @@ MEDUSA.addShows.addExistingShow = function() { var dirArr = []; $('.dirCheck').each(function() { if (this.checked === true) { - var show = $(this).attr('id'); - var originalIndexer = $(this).attr('data-indexer') - var indexerId = '|' + $(this).attr('data-indexer-id') - var showName = $(this).attr('data-show-name') - var showDir = $(this).attr('data-show-dir') + var originalIndexer = $(this).attr('data-indexer'); + var indexerId = '|' + $(this).attr('data-indexer-id'); + var showName = $(this).attr('data-show-name'); + var showDir = $(this).attr('data-show-dir'); var indexer = $(this).closest('tr').find('select').val(); if (originalIndexer !== indexer || originalIndexer === '0') { diff --git a/static/js/add-shows/init.js b/static/js/add-shows/init.js index a0d47b80fc..e9a87738d2 100644 --- a/static/js/add-shows/init.js +++ b/static/js/add-shows/init.js @@ -4,8 +4,15 @@ MEDUSA.addShows.init = function() { selected: (MEDUSA.config.sortArticle ? -1 : 0) }); + var imgLazyLoad = new LazyLoad({ + // example of options object -> see options section + threshold: 500 + }); + $.initRemoteShowGrid = function() { // Set defaults on page load + imgLazyLoad.update(); + imgLazyLoad.handleScroll(); $('#showsort').val('original'); $('#showsortdirection').val('asc'); @@ -37,6 +44,10 @@ MEDUSA.addShows.init = function() { }); }); + $('#rootDirs').on('change', function() { + $.rootDirCheck(); + }); + $('#showsortdirection').on('change', function() { $('#container').isotope({ sortAscending: (this.value === 'asc') @@ -54,6 +65,9 @@ MEDUSA.addShows.init = function() { rating: '[data-rating] parseInt', votes: '[data-votes] parseInt' } + }).on('layoutComplete arrangeComplete removeComplete', function() { + imgLazyLoad.update(); + imgLazyLoad.handleScroll(); }); }; @@ -64,6 +78,8 @@ MEDUSA.addShows.init = function() { $(this).empty().html(errorTxt); } else { $.initRemoteShowGrid(); + imgLazyLoad.update(); + imgLazyLoad.handleScroll(); } }); }; @@ -145,8 +161,8 @@ MEDUSA.addShows.init = function() { $.get('config/general/saveAddShowDefaults', { defaultStatus: $('#statusSelect').val(), - allowed_qualities: anyQualArray.join(','), - preferred_qualities: bestQualArray.join(','), + allowed_qualities: anyQualArray.join(','), // eslint-disable-line camelcase + preferred_qualities: bestQualArray.join(','), // eslint-disable-line camelcase defaultFlattenFolders: $('#flatten_folders').prop('checked'), subtitles: $('#subtitles').prop('checked'), anime: $('#anime').prop('checked'), diff --git a/static/js/add-shows/new-show.js b/static/js/add-shows/new-show.js index a1d3b73d56..a73bddc908 100644 --- a/static/js/add-shows/new-show.js +++ b/static/js/add-shows/new-show.js @@ -89,7 +89,7 @@ MEDUSA.addShows.newShow = function() { dataType: 'json', error: function() { $('#searchResults').empty().html('search timed out, try again or try another indexer'); - }, + } }).done(function(data) { var firstResult = true; var resultStr = '
        \nSearch Results:\n'; @@ -109,7 +109,7 @@ MEDUSA.addShows.newShow = function() { var whichSeries = obj.join('|'); resultStr += ' '; - if (data.langid && data.langid !== '' && obj[1] === 1) { //For now only add the language id to the tvdb url, as the others might have different routes.) + if (data.langid && data.langid !== '' && obj[1] === 1) { // For now only add the language id to the tvdb url, as the others might have different routes. resultStr += '' + obj[4] + ''; } else { resultStr += '' + obj[4] + ''; @@ -206,4 +206,8 @@ MEDUSA.addShows.newShow = function() { updateSampleText(); myform.loadsection(2); }); + + $('#rootDirs').on('change', function() { + updateSampleText(); + }); }; diff --git a/static/js/add-shows/popular-shows.js b/static/js/add-shows/popular-shows.js index 41f6a37c7c..337f6884cb 100644 --- a/static/js/add-shows/popular-shows.js +++ b/static/js/add-shows/popular-shows.js @@ -1,3 +1,4 @@ MEDUSA.addShows.popularShows = function() { $.initRemoteShowGrid(); + $.rootDirCheck(); }; diff --git a/static/js/add-shows/recommended-shows.js b/static/js/add-shows/recommended-shows.js index 68d9a8e6d8..b17664d92a 100644 --- a/static/js/add-shows/recommended-shows.js +++ b/static/js/add-shows/recommended-shows.js @@ -10,4 +10,5 @@ MEDUSA.addShows.recommendedShows = function() { $.initAddShowById(); $.initBlackListShowById(); $.initRemoteShowGrid(); + $.rootDirCheck(); }; diff --git a/static/js/add-shows/trending-shows.js b/static/js/add-shows/trending-shows.js index 1f947be71e..8d19e50cb2 100644 --- a/static/js/add-shows/trending-shows.js +++ b/static/js/add-shows/trending-shows.js @@ -23,4 +23,5 @@ MEDUSA.addShows.trendingShows = function() { $.initAddShowById(); $.initBlackListShowById(); + $.rootDirCheck(); }; diff --git a/static/js/ajax-episode-search.js b/static/js/ajax-episode-search.js index 0e487046c5..40055b3fd1 100644 --- a/static/js/ajax-episode-search.js +++ b/static/js/ajax-episode-search.js @@ -167,6 +167,7 @@ $.ajaxEpSearch = function(options) { url += '&down_cur_quality=1'; } + // @TODO: Move to the API $.getJSON(url, function(data) { // if they failed then just put the red X if (data.result.toLowerCase() === 'failure') { diff --git a/static/js/ajax-notifications.js b/static/js/ajax-notifications.js index 40a1bbf84c..6e64a090a2 100644 --- a/static/js/ajax-notifications.js +++ b/static/js/ajax-notifications.js @@ -1,4 +1,4 @@ -var messageUrl = 'ui/get_messages'; // eslint-disable-line xo/filename-case +var messageUrl = 'ui/get_messages'; var test = !1; var iconUrl = 'images/ico/favicon-120.png'; @@ -19,10 +19,10 @@ function displayPNotify(type, title, message) { new PNotify({ // eslint-disable-line no-new type: type, title: title, - text: message.replace(/]*)?>/ig, '\n') - .replace(/<[\/]?b(?:\s[^>]*)?>/ig, '*') - .replace(/]*)?>/ig, '[').replace(/<[\/]i>/ig, ']') - .replace(/<(?:[\/]?ul|\/li)(?:\s[^>]*)?>/ig, '').replace(/]*)?>/ig, '\n* ') + text: message.replace(/]*)?>/ig, '\n') + .replace(/<[/]?b(?:\s[^>]*)?>/ig, '*') + .replace(/]*)?>/ig, '[').replace(/<[/]i>/ig, ']') + .replace(/<(?:[/]?ul|\/li)(?:\s[^>]*)?>/ig, '').replace(/]*)?>/ig, '\n* ') }); } diff --git a/static/js/common/init.js b/static/js/common/init.js index 9776674597..fddfb261c8 100644 --- a/static/js/common/init.js +++ b/static/js/common/init.js @@ -1,30 +1,119 @@ MEDUSA.common.init = function() { + // Import underscore.string using it's mixin export. + _.mixin(s.exports()); + + // Background Fanart Functions if (MEDUSA.config.fanartBackground) { var showID = $('#showID').attr('value'); if (showID) { - let asset = 'show/' + $('#showID').attr('value') + '?type=fanart'; - let path = apiRoot + 'asset/' + asset + '&api_key=' + apiKey; + let path = apiRoot + 'series/' + $('#series_slug').attr('value') + '/asset/fanart?api_key=' + apiKey; $.backstretch(path); - $('.backstretch').css('top',backstretchOffset()); + $('.backstretch').css('top', backstretchOffset()); $('.backstretch').css('opacity', MEDUSA.config.fanartBackgroundOpacity).fadeIn(500); } } function backstretchOffset() { var offset = '90px'; - if($("#sub-menu-container").length == 0) { + if ($('#sub-menu-container').length === 0) { offset = '50px'; } - if ($(window).width() < 1281) { + if ($(window).width() < 1280) { offset = '50px'; } return offset; } $(window).resize(function() { - $('.backstretch').css('top',backstretchOffset()); + $('.backstretch').css('top', backstretchOffset()); + }); + + // Scroll Functions + function scrollTo(dest) { + $('html, body').animate({scrollTop: $(dest).offset().top}, 500, 'linear'); + } + + $(document).on('scroll', function() { + if ($(window).scrollTop() > 100) { + $('.scroll-top-wrapper').addClass('show'); + } else { + $('.scroll-top-wrapper').removeClass('show'); + } }); + $('.scroll-top-wrapper').on('click', function() { + scrollTo($('body')); + }); + + // Scroll to Anchor + $('a[href^="#season"]').on('click', function(e) { + e.preventDefault(); + scrollTo($('a[name="' + $(this).attr('href').replace('#', '') + '"]')); + }); + + // Hover Dropdown for Nav + $('ul.nav li.dropdown').hover(function() { + $(this).find('.dropdown-menu').stop(true, true).delay(200).fadeIn(500); + }, function() { + $(this).find('.dropdown-menu').stop(true, true).delay(200).fadeOut(500); + }); + + // function to change luminance of #000000 color - used in triggerhighlighting + function colorLuminance(hex, lum) { + hex = String(hex).replace(/[^0-9a-f]/gi, ''); + if (hex.length < 6) { + hex = hex[0] + hex[0] + hex[1] + hex[1] + hex[2] + hex[2]; + } + lum = lum || 0; + var rgb = '#'; + var c; + var i; + for (i = 0; i < 3; i++) { + c = parseInt(hex.substr(i * 2, 2), 16); + c = Math.round(Math.min(Math.max(0, c + (c * lum)), 255)).toString(16); + rgb += ('00' + c).substr(c.length); + } + return rgb; + } + + // function to convert rgb(0,0,0) into #000000 + function rgb2hex(rgb) { + rgb = rgb.match(/^rgb\((\d+),\s*(\d+),\s*(\d+)\)$/); + function hex(x) { + return ('0' + parseInt(x, 10).toString(16)).slice(-2); + } + return '#' + hex(rgb[1]) + hex(rgb[2]) + hex(rgb[3]); + } + + var revertBackgroundColor; // used to revert back to original background-color after highlight + $('.triggerhighlight').on('mouseover', function() { + revertBackgroundColor = rgb2hex($(this).parent().css('background-color')); // fetch the original background-color to revert back to + $(this).parent().find('.triggerhighlight').css('background-color', colorLuminance(revertBackgroundColor, -0.15)); // setting highlight background-color + }).on('mouseout', function() { + $(this).parent().find('.triggerhighlight').css('background-color', revertBackgroundColor); // reverting back to original background-color + }); + + $.rootDirCheck = function() { + if ($('#rootDirs option:selected').length === 0) { + $('button[data-add-show]').prop('disabled', true); + if (!$('#configure_show_options').is(':checked')) { + $('#configure_show_options').prop('checked', true); + $('#content_configure_show_options').fadeIn('fast', 'linear'); + } + if ($('#rootDirAlert').length === 0) { + $('#content-row').before('
        ' + + '
        ' + + 'ERROR! Unable to add recommended shows. Please set a default directory first.' + + '
        '); + } else { + $('#rootDirAlert').show(); + } + } else { + $('#rootDirAlert').hide(); + $('button[data-add-show]').prop('disabled', false); + } + }; + $.confirm.options = { confirmButton: 'Yes', cancelButton: 'Cancel', @@ -182,7 +271,7 @@ MEDUSA.common.init = function() { }, position: { my: my, - at: at, + at: at }, style: { tip: { diff --git a/static/js/config-providers.js b/static/js/config-providers.js index df5d46cec7..8026bd9ac5 100644 --- a/static/js/config-providers.js +++ b/static/js/config-providers.js @@ -72,7 +72,8 @@ $(document).ready(function() { // eslint-disable-line max-lines var newData = [isDefault, [name, url, key, cat]]; newznabProviders[id] = newData; - $('#editANewznabProvider').addOption(id, name); + $('#editANewznabProvider').append(''); + $('select#editANewznabProvider').prop('selectedIndex', 0); if ($('#provider_order_list > #' + id).length === 0 && showProvider !== false) { var toAdd = '
      • ' + name + ' ' + name + '
      • '; // eslint-disable-line no-undef @@ -84,11 +85,11 @@ $(document).ready(function() { // eslint-disable-line max-lines $(this).makeNewznabProviderString(); }; - $.fn.addTorrentRssProvider = function(id, name, url, cookies, title_tag) { // eslint-disable-line max-params - var newData = [name, url, cookies, title_tag]; + $.fn.addTorrentRssProvider = function(id, name, url, cookies, titleTag) { // eslint-disable-line max-params + var newData = [name, url, cookies, titleTag]; torrentRssProviders[id] = newData; - $('#editATorrentRssProvider').addOption(id, name); + $('#editATorrentRssProvider').append(''); $(this).populateTorrentRssSection(); if ($('#provider_order_list > #' + id).length === 0) { @@ -117,10 +118,10 @@ $(document).ready(function() { // eslint-disable-line max-lines $(this).makeNewznabProviderString(); }; - $.fn.updateTorrentRssProvider = function(id, url, cookies, title_tag) { + $.fn.updateTorrentRssProvider = function(id, url, cookies, titleTag) { torrentRssProviders[id][1] = url; torrentRssProviders[id][2] = cookies; - torrentRssProviders[id][3] = title_tag; + torrentRssProviders[id][3] = titleTag; $(this).populateTorrentRssSection(); $(this).makeTorrentRssProviderString(); }; @@ -381,9 +382,9 @@ $(document).ready(function() { // eslint-disable-line max-lines var url = $('#torrentrss_url').val(); var cookies = $('#torrentrss_cookies').val(); - var title_tag = $('#torrentrss_title_tag').val(); + var titleTag = $('#torrentrss_title_tag').val(); - $(this).updateTorrentRssProvider(selectedProvider, url, cookies, title_tag); + $(this).updateTorrentRssProvider(selectedProvider, url, cookies, titleTag); }); $('body').on('change', '#editAProvider', function() { @@ -475,14 +476,15 @@ $(document).ready(function() { // eslint-disable-line max-lines var name = $('#torrentrss_name').val(); var url = $('#torrentrss_url').val(); var cookies = $('#torrentrss_cookies').val(); - var title_tag = $('#torrentrss_title_tag').val(); + var titleTag = $('#torrentrss_title_tag').val(); var params = { name: name, url: url, cookies: cookies, - title_tag: title_tag + title_tag: titleTag // eslint-disable-line camelcase }; + // @TODO: Move to the API // send to the form with ajax, get a return value $.getJSON('config/providers/canAddTorrentRssProvider', params, function(data) { if (data.error !== undefined) { @@ -490,7 +492,7 @@ $(document).ready(function() { // eslint-disable-line max-lines return; } - $(this).addTorrentRssProvider(data.success, name, url, cookies, title_tag); + $(this).addTorrentRssProvider(data.success, name, url, cookies, titleTag); $(this).refreshEditAProvider(); }); }); diff --git a/static/js/config/index.js b/static/js/config/index.js index 71ed48ac04..def6737036 100644 --- a/static/js/config/index.js +++ b/static/js/config/index.js @@ -4,6 +4,19 @@ MEDUSA.config.index = function() { $('label[for="proxy_indexers"]').hide(); } + $('#theme_name').on('change', function() { + api.patch('config/main', { + theme: { + name: $(this).val() + } + }).then(function(response) { + log.info(response); + window.location.reload(); + }).catch(function(err) { + log.error(err); + }); + }); + $('input[name="proxy_setting"]').on('input', function() { if ($(this).val().length === 0) { $('input[id="proxy_indexers"]').prop('checked', false); diff --git a/static/js/config/init.js b/static/js/config/init.js index ff84462ddf..cedf5c2b25 100644 --- a/static/js/config/init.js +++ b/static/js/config/init.js @@ -112,4 +112,40 @@ MEDUSA.config.init = function() { $('#branchForceUpdate').prop('disabled', false); }); }); + + // GitHub Auth Types + function setupGithubAuthTypes() { + var selected = parseInt($('input[name="git_auth_type"]').filter(':checked').val(), 10); + + $('div[name="content_github_auth_type"]').each(function(index) { + if (index === selected) { + $(this).show(); + } else { + $(this).hide(); + } + }); + } + // GitHub Auth Types + setupGithubAuthTypes(); + + $('input[name="git_auth_type"]').on('click', function() { + setupGithubAuthTypes(); + }); + + $('#git_token').on('click', function() { + $('#git_token').select(); + }); + + $('#create_access_token').popover({ + placement: 'left', + html: true, // required if content has HTML + title: 'Github Token', + content: '

        Copy the generated token and paste it in the token input box.

        ' + + '

        ' + + '


        ' + }); + + $('#manage_tokens').on('click', function() { + window.open(MEDUSA.config.anonRedirect + 'https://github.com/settings/tokens', '_blank'); + }); }; diff --git a/static/js/config/search.js b/static/js/config/search.js index 399b253912..e908ec9c32 100644 --- a/static/js/config/search.js +++ b/static/js/config/search.js @@ -3,6 +3,7 @@ MEDUSA.config.search = function() { $('#nzb_dir').fileBrowser({title: 'Select .nzb black hole/watch location'}); $('#torrent_dir').fileBrowser({title: 'Select .torrent black hole/watch location'}); $('#torrent_path').fileBrowser({title: 'Select .torrent download location'}); + $('#torrent_seed_location').fileBrowser({title: 'Select Post-Processed seeding torrents location'}); $.fn.nzbMethodHandler = function() { var selectedProvider = $('#nzb_method :selected').val(); @@ -67,6 +68,7 @@ MEDUSA.config.search = function() { $('#torrent_auth_type_option').hide(); $('#torrent_path_option').show(); $('#torrent_path_option').find('.fileBrowser').show(); + $('#torrent_seed_location_option').hide(); $('#torrent_seed_time_option').hide(); $('#torrent_high_bandwidth_option').hide(); $('#torrent_label_option').show(); @@ -81,6 +83,7 @@ MEDUSA.config.search = function() { $('#torrent_seed_time_label').text('Minimum seeding time is'); $('#torrent_seed_time_option').show(); $('#host_desc_torrent').text('URL to your uTorrent client (e.g. http://localhost:8000)'); + $('#torrent_seed_location_option').hide(); } else if (selectedProvider.toLowerCase() === 'transmission') { client = 'Transmission'; $('#torrent_seed_time_label').text('Stop seeding when inactive for'); @@ -90,6 +93,7 @@ MEDUSA.config.search = function() { $('#torrent_label_anime_option').hide(); $('#torrent_rpcurl_option').show(); $('#host_desc_torrent').text('URL to your Transmission client (e.g. http://localhost:9091)'); + $('#torrent_seed_location_option').show(); } else if (selectedProvider.toLowerCase() === 'deluge') { client = 'Deluge'; $('#torrent_verify_cert_option').show(); @@ -100,6 +104,7 @@ MEDUSA.config.search = function() { $('#torrent_username_option').hide(); $('#torrent_username').prop('value', ''); $('#host_desc_torrent').text('URL to your Deluge client (e.g. http://localhost:8112)'); + $('#torrent_seed_location_option').show(); } else if (selectedProvider.toLowerCase() === 'deluged') { client = 'Deluge'; $('#torrent_verify_cert_option').hide(); @@ -109,6 +114,7 @@ MEDUSA.config.search = function() { $('#label_anime_warning_deluge').show(); $('#torrent_username_option').show(); $('#host_desc_torrent').text('IP or Hostname of your Deluge Daemon (e.g. scgi://localhost:58846)'); + $('#torrent_seed_location_option').show(); } else if (selectedProvider.toLowerCase() === 'download_station') { client = 'Synology DS'; $('#torrent_label_option').hide(); @@ -117,6 +123,7 @@ MEDUSA.config.search = function() { $('#torrent_path_option').find('.fileBrowser').hide(); $('#host_desc_torrent').text('URL to your Synology DS client (e.g. http://localhost:5000)'); $('#path_synology').show(); + $('#torrent_seed_location_option').hide(); } else if (selectedProvider.toLowerCase() === 'rtorrent') { client = 'rTorrent'; $('#torrent_paused_option').hide(); @@ -125,12 +132,14 @@ MEDUSA.config.search = function() { $('#torrent_verify_deluge').hide(); $('#torrent_verify_rtorrent').show(); $('#torrent_auth_type_option').show(); + $('#torrent_seed_location_option').hide(); } else if (selectedProvider.toLowerCase() === 'qbittorrent') { client = 'qbittorrent'; $('#torrent_path_option').hide(); $('#label_warning_qbittorrent').show(); $('#label_anime_warning_qbittorrent').show(); $('#host_desc_torrent').text('URL to your qbittorrent client (e.g. http://localhost:8080)'); + $('#torrent_seed_location_option').hide(); } else if (selectedProvider.toLowerCase() === 'mlnet') { client = 'mlnet'; $('#torrent_path_option').hide(); @@ -141,11 +150,12 @@ MEDUSA.config.search = function() { $('#torrent_label_anime_option').hide(); $('#torrent_paused_option').hide(); $('#host_desc_torrent').text('URL to your MLDonkey (e.g. http://localhost:4080)'); + $('#torrent_seed_location_option').hide(); } $('#host_title').text(client + host); $('#username_title').text(client + username); $('#password_title').text(client + password); - $('#torrent_client').text(client); + $('#torrent_client, #torrent_client_seed_path').text(client); $('#rpcurl_title').text(client + rpcurl); optionPanel = '#options_torrent_clients'; } diff --git a/static/js/core.js b/static/js/core.js index 0793f3eba0..30d67c8a13 100644 --- a/static/js/core.js +++ b/static/js/core.js @@ -33,6 +33,21 @@ var UTIL = { } var body = document.body; + $('[asset]').each(function() { + let asset = $(this).attr('asset'); + let series = $(this).attr('series'); + let path = apiRoot + 'series/' + series + '/asset/' + asset + '?api_key=' + apiKey; + if (this.tagName.toLowerCase() === 'img') { + if ($(this).attr('lazy') === 'on') { + $(this).attr('data-original', path); + } else { + $(this).attr('src', path); + } + } + if (this.tagName.toLowerCase() === 'a') { + $(this).attr('href', path); + } + }); var controller = body.getAttribute('data-controller'); var action = body.getAttribute('data-action'); @@ -46,16 +61,15 @@ $.extend({ isMeta: function(pyVar, result) { // eslint-disable-line no-unused-vars var reg = new RegExp(result.length > 1 ? result.join('|') : result); - if (typeof(pyVar) === 'object' && Object.keys(pyVar).length == 1) { + if (typeof (pyVar) === 'object' && Object.keys(pyVar).length === 1) { return (reg).test(MEDUSA.config[Object.keys(pyVar)[0]][pyVar[Object.keys(pyVar)[0]]]); - } else { - if (pyVar.match('medusa')) { - pyVar.split('.')[1].toLowerCase().replace(/(_\w)/g, function(m) { - return m[1].toUpperCase(); - }); - } - return (reg).test(MEDUSA.config[pyVar]); } + if (pyVar.match('medusa')) { + pyVar.split('.')[1].toLowerCase().replace(/(_\w)/g, function(m) { + return m[1].toUpperCase(); + }); + } + return (reg).test(MEDUSA.config[pyVar]); } }); @@ -69,27 +83,17 @@ $.fn.extend({ }); if (!document.location.pathname.endsWith('/login/')) { - api.get('config').then(function(response) { + api.get('config/main').then(function(response) { log.setDefaultLevel('trace'); $.extend(MEDUSA.config, response.data); MEDUSA.config.themeSpinner = MEDUSA.config.themeName === 'dark' ? '-dark' : ''; MEDUSA.config.loading = ''; - $('[asset]').each(function(){ - let asset = $(this).attr('asset'); - let path = apiRoot + 'asset/' + asset + '&api_key=' + apiKey; - if (this.tagName.toLowerCase() === 'img') { - $(this).attr('src', path); - } - if (this.tagName.toLowerCase() === 'a') { - $(this).attr('href', path); - } - }); - if (navigator.userAgent.indexOf('PhantomJS') === -1) { $(document).ready(UTIL.init); } - }).catch(function (error) { + }).catch(function(err) { + log.error(err); alert('Unable to connect to Medusa!'); // eslint-disable-line no-alert }); } diff --git a/static/js/errorlogs/viewlogs.js b/static/js/errorlogs/viewlogs.js index 9c23117bae..3d0ffccc0c 100644 --- a/static/js/errorlogs/viewlogs.js +++ b/static/js/errorlogs/viewlogs.js @@ -1,6 +1,6 @@ MEDUSA.errorlogs.viewlogs = function() { - var getParam = function() { - return params = $.param({ + function getParams() { + return $.param({ min_level: $('select[name=min_level]').val(), // eslint-disable-line camelcase log_filter: $('select[name=log_filter]').val(), // eslint-disable-line camelcase log_period: $('select[name=log_period]').val(), // eslint-disable-line camelcase @@ -8,13 +8,14 @@ MEDUSA.errorlogs.viewlogs = function() { }); } - $('#min_level,#log_filter,#log_search,#log_period').on('keyup change', _.debounce(function() { // eslint-disable-line no-undef + $('#min_level,#log_filter,#log_search,#log_period').on('keyup change', _.debounce(function() { + var params = getParams(); $('#min_level').prop('disabled', true); $('#log_filter').prop('disabled', true); $('#log_period').prop('disabled', true); document.body.style.cursor = 'wait'; - $.get('errorlogs/viewlog/?' + getParam(), function(data) { + $.get('errorlogs/viewlog/?' + params, function(data) { history.pushState('data', '', 'errorlogs/viewlog/?' + params); $('pre').html($(data).find('pre').html()); $('#min_level').prop('disabled', false); @@ -25,8 +26,9 @@ MEDUSA.errorlogs.viewlogs = function() { }, 500)); $(document.body).on('click', '#viewlog-text-view', function(e) { - e.preventDefault(); - var win = window.open('errorlogs/viewlog/?' + getParam() + '&text_view=1', '_blank'); - win.focus(); - }) + e.preventDefault(); + var params = getParams(); + var win = window.open('errorlogs/viewlog/?' + params + '&text_view=1', '_blank'); + win.focus(); + }); }; diff --git a/static/js/history/index.js b/static/js/history/index.js index 6d35d241b7..28cda51d36 100644 --- a/static/js/history/index.js +++ b/static/js/history/index.js @@ -3,23 +3,35 @@ MEDUSA.history.index = function() { widgets: ['saveSort', 'zebra', 'filter'], sortList: [[0, 1]], textExtraction: (function() { - if ($.isMeta({'layout': 'history'}, ['detailed'])) { + if ($.isMeta({layout: 'history'}, ['detailed'])) { return { // 0: Time 1: Episode 2: Action 3: Provider 4: Quality - 0: function(node) { return $(node).find('time').attr('datetime'); }, // Time - 1: function(node) { return $(node).find('a').text(); } // Episode + 0: function(node) { + return $(node).find('time').attr('datetime'); + }, + 1: function(node) { + return $(node).find('a').text(); + } }; } return { // 0: Time 1: Episode 2: Snatched 3: Downloaded 4: Quality - 0: function(node) { return $(node).find('time').attr('datetime'); }, // Time - 1: function(node) { return $(node).find('a').text(); }, // Episode - 2: function(node) { return $(node).find('img').attr('title') === undefined ? '' : $(node).find('img').attr('title'); }, // Snatched - 3: function(node) { return $(node).find('img').attr('title') === undefined ? '' : $(node).find('img').attr('title'); } // Downloaded + 0: function(node) { + return $(node).find('time').attr('datetime'); + }, + 1: function(node) { + return $(node).find('a').text(); + }, // Episode + 2: function(node) { + return $(node).find('img').attr('title') === undefined ? '' : $(node).find('img').attr('title'); + }, + 3: function(node) { + return $(node).find('img').attr('title') === undefined ? '' : $(node).find('img').attr('title'); + } }; })(), headers: (function() { - if ($.isMeta({'layout': 'history'}, ['detailed'])) { + if ($.isMeta({layout: 'history'}, ['detailed'])) { return { 0: {sorter: 'realISODate'} }; @@ -35,16 +47,16 @@ MEDUSA.history.index = function() { window.location.href = $('base').attr('href') + 'history/?limit=' + $(this).val(); }); - $('.show-option select[name="layout"]').on('change', function(){ - api.patch('config', { + $('.show-option select[name="layout"]').on('change', function() { + api.patch('config/main', { layout: { history: $(this).val() } }).then(function(response) { log.info(response); window.location.reload(); - }).catch(function (error) { - log.info(error); + }).catch(function(err) { + log.info(err); }); }); }; diff --git a/static/js/home/display-show.js b/static/js/home/display-show.js index 4ce0e9d2b9..b0abf10fb7 100644 --- a/static/js/home/display-show.js +++ b/static/js/home/display-show.js @@ -1,10 +1,10 @@ MEDUSA.home.displayShow = function() { // eslint-disable-line max-lines $('.imdbPlot').on('click', function() { $(this).prev('span').toggle(); - if ($(this).html() === "..show less") { - $(this).html("..show more"); + if ($(this).html() === '..show less') { + $(this).html('..show more'); } else { - $(this).html("..show less"); + $(this).html('..show less'); } moveSummaryBackground(); movecheckboxControlsBackground(); @@ -12,17 +12,17 @@ MEDUSA.home.displayShow = function() { // eslint-disable-line max-lines // adjust the summary background position and size on page load and resize function moveSummaryBackground() { - var height = $("#summary").height() + 10; - var top = $("#summary").offset().top + 5; - $("#summaryBackground").height(height); - $("#summaryBackground").offset({ top: top, left: 0}); + var height = $('#summary').height() + 10; + var top = $('#summary').offset().top + 5; + $('#summaryBackground').height(height); + $('#summaryBackground').offset({top: top, left: 0}); } function movecheckboxControlsBackground() { - var height = $("#checkboxControls").height() + 10; - var top = $("#checkboxControls").offset().top - 3; - $("#checkboxControlsBackground").height(height); - $("#checkboxControlsBackground").offset({ top: top, left: 0}); + var height = $('#checkboxControls').height() + 10; + var top = $('#checkboxControls').offset().top - 3; + $('#checkboxControlsBackground').height(height); + $('#checkboxControlsBackground').offset({top: top, left: 0}); } $(window).resize(function() { @@ -414,18 +414,18 @@ MEDUSA.home.displayShow = function() { // eslint-disable-line max-lines }); // href="home/toggleDisplayShowSpecials/?show=${show.indexerid}" - $('.display-specials a').on('click', function(){ - api.patch('config', { + $('.display-specials a').on('click', function() { + api.patch('config/main', { layout: { show: { - specials: $(this).text() === 'Hide' ? false : true + specials: $(this).text() !== 'Hide' } } }).then(function(response) { log.info(response.data); window.location.reload(); - }).catch(function(response){ - log.error(response.data); + }).catch(function(err) { + log.error(err.data); }); }); }; diff --git a/static/js/home/edit-show.js b/static/js/home/edit-show.js index 63b34078f6..0897a64092 100644 --- a/static/js/home/edit-show.js +++ b/static/js/home/edit-show.js @@ -1,7 +1,6 @@ MEDUSA.home.editShow = function() { if (MEDUSA.config.fanartBackground) { - let asset = 'show/' + $('#showID').attr('value') + '?type=fanart'; - let path = apiRoot + 'asset/' + asset + '&api_key=' + apiKey; + let path = apiRoot + 'series/' + $('#showID').attr('value') + '/asset/fanart?api_key=' + apiKey; $.backstretch(path); $('.backstretch').css('opacity', MEDUSA.config.fanartBackgroundOpacity).fadeIn(500); } diff --git a/static/js/home/index.js b/static/js/home/index.js index 066833ad1d..ee9c502f05 100644 --- a/static/js/home/index.js +++ b/static/js/home/index.js @@ -14,6 +14,11 @@ MEDUSA.home.index = function() { }); }, 500)); + var imgLazyLoad = new LazyLoad({ + // example of options object -> see options section + threshold: 500 + }); + function resizePosters(newSize) { var fontSize; var logoWidth; @@ -93,7 +98,7 @@ MEDUSA.home.index = function() { }); $('#showListTableShows:has(tbody tr), #showListTableAnime:has(tbody tr)').tablesorter({ - debug: true, + debug: false, sortList: [[7, 1], [2, 0]], textExtraction: (function() { return { @@ -183,6 +188,10 @@ MEDUSA.home.index = function() { }, sortStable: true, sortAppend: [[2, 0]] + }).bind('sortEnd', function() { + imgLazyLoad.handleScroll(); + }).bind('filterEnd', function() { + imgLazyLoad.handleScroll(); }); $('.show-grid').imagesLoaded(function() { @@ -217,6 +226,9 @@ MEDUSA.home.index = function() { return (indexer.length && parseInt(indexer, 10)) || Number.NEGATIVE_INFINITY; } } + }).on('layoutComplete arrangeComplete removeComplete', function() { + imgLazyLoad.update(); + imgLazyLoad.handleScroll(); }); // When posters are small enough to not display the .show-details @@ -245,7 +257,7 @@ MEDUSA.home.index = function() { popup.on('mouseleave', function() { $(this).remove(); }); - popup.css({zIndex: '9999'}) + popup.css({zIndex: '9999'}); popup.appendTo('body'); var height = 438; @@ -284,6 +296,8 @@ MEDUSA.home.index = function() { clearTimeout(posterHoverTimer); } }); + imgLazyLoad.update(); + imgLazyLoad.handleScroll(); }); $('#postersort').on('change', function() { @@ -308,16 +322,48 @@ MEDUSA.home.index = function() { } }); - $('.show-option select').on('change', function(){ - api.patch('config', { + $('.show-option .show-layout').on('change', function() { + api.patch('config/main', { layout: { home: $(this).val() } }).then(function(response) { log.info(response); window.location.reload(); - }).catch(function (error) { - log.info(error); + }).catch(function(err) { + log.info(err); + }); + }); + + $('#showRootDir').on('change', function() { + api.patch('config/main', { + selectedRootIndex: $(this).val() + }).then(function(response) { + log.info(response); + window.location.reload(); + }).catch(function(err) { + log.info(err); }); }); + + var rootDir = MEDUSA.config.rootDirs; + var rootDirIndex = MEDUSA.config.selectedRootIndex; + if (rootDir) { + var backendPieces = rootDir.split('|'); + var backendDirs = backendPieces.slice(1); + if (backendDirs.length >= 2) { + $('#showRoot').show(); + var item = ['All Folders']; + var rootDirOptions = item.concat(backendDirs); + $.each(rootDirOptions, function(i, item) { + $('#showRootDir').append($('');for(b in c)c.hasOwnProperty(b)&&this.$element.append('");this.$element.val(a)},addBootstrapCountries:function(){var a,b,c,d,e,f;d=this.options.country,a=this.$element.find('input[type="hidden"]'),b=this.$element.find(".bfh-selectbox-option"),c=this.$element.find("[role=option]"),f=this.getCountries(),c.html(""),this.options.blank===!0&&c.append('
      • ');for(e in f)f.hasOwnProperty(e)&&(this.options.flags===!0?c.append('
      • '+f[e]+"
      • "):c.append('
      • '+f[e]+"
      • "));this.$element.val(d)},displayCountry:function(){var a;a=this.options.country,this.options.flags===!0?this.$element.html(' '+BFHCountriesList[a]):this.$element.html(BFHCountriesList[a])}};var c=a.fn.bfhcountries;a.fn.bfhcountries=function(c){return this.each(function(){var d,e,f;d=a(this),e=d.data("bfhcountries"),f="object"==typeof c&&c,e||d.data("bfhcountries",e=new b(this,f)),"string"==typeof c&&e[c].call(d)})},a.fn.bfhcountries.Constructor=b,a.fn.bfhcountries.defaults={country:"",available:"",flags:!1,blank:!0},a.fn.bfhcountries.noConflict=function(){return a.fn.bfhcountries=c,this},a(document).ready(function(){a("form select.bfh-countries, span.bfh-countries, div.bfh-countries").each(function(){var b;b=a(this),b.hasClass("bfh-selectbox")&&b.bfhselectbox(b.data()),b.bfhcountries(b.data())})})}(window.jQuery),+function(a){"use strict";var b=function(b,c){this.options=a.extend({},a.fn.bfhcurrencies.defaults,c),this.$element=a(b),this.$element.is("select")&&this.addCurrencies(),this.$element.hasClass("bfh-selectbox")&&this.addBootstrapCurrencies(),this.$element.is("span")&&this.displayCurrency()};b.prototype={constructor:b,getCurrencies:function(){var b,c;if(this.options.available){c=[],this.options.available=this.options.available.split(",");for(b in BFHCurrenciesList)BFHCurrenciesList.hasOwnProperty(b)&&a.inArray(b,this.options.available)>=0&&(c[b]=BFHCurrenciesList[b]);return c}return BFHCurrenciesList},addCurrencies:function(){var a,b,c;a=this.options.currency,c=this.getCurrencies(),this.$element.html(""),this.options.blank===!0&&this.$element.append('');for(b in c)c.hasOwnProperty(b)&&this.$element.append('");this.$element.val(a)},addBootstrapCurrencies:function(){var a,b,c,d,e,f,g;d=this.options.currency,a=this.$element.find('input[type="hidden"]'),b=this.$element.find(".bfh-selectbox-option"),c=this.$element.find("[role=option]"),f=this.getCurrencies(),c.html(""),this.options.blank===!0&&c.append('
      • ');for(e in f)f.hasOwnProperty(e)&&(this.options.flags===!0?(g=f[e].currencyflag?f[e].currencyflag:e.substr(0,2),c.append('
      • '+f[e].label+"
      • ")):c.append('
      • '+f[e].label+"
      • "));this.$element.val(d)},displayCurrency:function(){var a,b;a=this.options.currency,this.options.flags===!0?(b=BFHCurrenciesList[a].currencyflag?BFHCurrenciesList[a].currencyflag:a.substr(0,2),this.$element.html(' '+BFHCurrenciesList[a].label)):this.$element.html(BFHCurrenciesList[a].label)}};var c=a.fn.bfhcurrencies;a.fn.bfhcurrencies=function(c){return this.each(function(){var d,e,f;d=a(this),e=d.data("bfhcurrencies"),f="object"==typeof c&&c,e||d.data("bfhcurrencies",e=new b(this,f)),"string"==typeof c&&e[c].call(d)})},a.fn.bfhcurrencies.Constructor=b,a.fn.bfhcurrencies.defaults={currency:"",available:"",flags:!1,blank:!0},a.fn.bfhcurrencies.noConflict=function(){return a.fn.bfhcurrencies=c,this},a(document).ready(function(){a("form select.bfh-currencies, span.bfh-currencies, div.bfh-currencies").each(function(){var b;b=a(this),b.hasClass("bfh-selectbox")&&b.bfhselectbox(b.data()),b.bfhcurrencies(b.data())})})}(window.jQuery),+function(a){"use strict";function b(a,b){return new Date(b,a,0).getDate()}function c(a,b,c){return new Date(b,a,c).getDay()}function d(a,b,c,d){return b+=1,b=String(b),d=String(d),1===b.length&&(b="0"+b),1===d.length&&(d="0"+d),a.replace("m",b).replace("y",c).replace("d",d)}function e(a,b,c){var d,e,f;d=[{part:"m",position:a.indexOf("m")},{part:"y",position:a.indexOf("y")},{part:"d",position:a.indexOf("d")}],d.sort(function(a,b){return a.position-b.position}),f=b.match(/(\d+)/g);for(e in d)if(d.hasOwnProperty(e)&&d[e].part===c)return Number(f[e]).toString()}function f(){var b;a(h).each(function(c){return b=g(a(this)),b.hasClass("open")?(b.trigger(c=a.Event("hide.bfhdatepicker")),c.isDefaultPrevented()?!0:(b.removeClass("open").trigger("hidden.bfhdatepicker"),void 0)):!0})}function g(a){return a.closest(".bfh-datepicker")}var h="[data-toggle=bfh-datepicker]",i=function(b,c){this.options=a.extend({},a.fn.bfhdatepicker.defaults,c),this.$element=a(b),this.initCalendar()};i.prototype={constructor:i,setDate:function(){var a,b,c;a=this.options.date,c=this.options.format,""===a||"today"===a||void 0===a?(b=new Date,"today"===a&&this.$element.val(d(c,b.getMonth(),b.getFullYear(),b.getDate())),this.$element.data("month",b.getMonth()),this.$element.data("year",b.getFullYear())):(this.$element.val(a),this.$element.data("month",Number(e(c,a,"m")-1)),this.$element.data("year",Number(e(c,a,"y"))))},setDateLimit:function(a,b){var c,d;d=this.options.format,""!==a?(this.$element.data(b+"limit",!0),"today"===a?(c=new Date,this.$element.data(b+"day",c.getDate()),this.$element.data(b+"month",c.getMonth()),this.$element.data(b+"year",c.getFullYear())):(this.$element.data(b+"day",Number(e(d,a,"d"))),this.$element.data(b+"month",Number(e(d,a,"m")-1)),this.$element.data(b+"year",Number(e(d,a,"y"))))):this.$element.data(b+"limit",!1)},initCalendar:function(){var a,b,c;a="",b="",c="",""!==this.options.icon&&("right"===this.options.align?b='':a='',c="input-group"),this.$element.html('
        '+a+''+b+"
        "+'
        '+''+""+''+'"+'"+""+''+""+""+""+""+"
        '+''+""+''+"'+''+""+''+"
        "+"
        "),this.$element.on("click.bfhdatepicker.data-api touchstart.bfhdatepicker.data-api",h,i.prototype.toggle).on("click.bfhdatepicker.data-api touchstart.bfhdatepicker.data-api",".bfh-datepicker-calendar > table.calendar .month > .previous",i.prototype.previousMonth).on("click.bfhdatepicker.data-api touchstart.bfhdatepicker.data-api",".bfh-datepicker-calendar > table.calendar .month > .next",i.prototype.nextMonth).on("click.bfhdatepicker.data-api touchstart.bfhdatepicker.data-api",".bfh-datepicker-calendar > table.calendar .year > .previous",i.prototype.previousYear).on("click.bfhdatepicker.data-api touchstart.bfhdatepicker.data-api",".bfh-datepicker-calendar > table.calendar .year > .next",i.prototype.nextYear).on("click.bfhdatepicker.data-api touchstart.bfhdatepicker.data-api",".bfh-datepicker-calendar > table.calendar td:not(.off)",i.prototype.select).on("click.bfhdatepicker.data-api touchstart.bfhdatepicker.data-api",".bfh-datepicker-calendar > table.calendar",function(){return!1}),this.setDate(),this.setDateLimit(this.options.min,"lower"),this.setDateLimit(this.options.max,"higher"),this.updateCalendar()},updateCalendarHeader:function(a,b,c){var d,e;for(a.find("table > thead > tr > th.month > span").text(BFHMonthsList[b]),a.find("table > thead > tr > th.year > span").text(c),d=a.find("table > thead > tr.days-header"),d.html(""),e=BFHDayOfWeekStart;e"+BFHDaysList[e]+"");for(e=0;BFHDayOfWeekStart>e;e+=1)d.append(""+BFHDaysList[e]+"")},checkMinDate:function(a,b,c){var d,e,f,g;return d=this.$element.data("lowerlimit"),d===!0&&(e=this.$element.data("lowerday"),f=this.$element.data("lowermonth"),g=this.$element.data("loweryear"),e>a&&b===f&&c===g||f>b&&c===g||g>c)?!0:!1},checkMaxDate:function(a,b,c){var d,e,f,g;return d=this.$element.data("higherlimit"),d===!0&&(e=this.$element.data("higherday"),f=this.$element.data("highermonth"),g=this.$element.data("higheryear"),a>e&&b===f&&c===g||b>f&&c===g||c>g)?!0:!1},checkToday:function(a,b,c){var d;return d=new Date,a===d.getDate()&&b===d.getMonth()&&c===d.getFullYear()?!0:!1},updateCalendarDays:function(a,d,e){var f,g,h,i,j,k,l;for(f=a.find("table > tbody").html(""),g=b(d,e),h=b(d+1,e),i=c(d,e,1),j=c(d,e,h),k="",l=0;(i-BFHDayOfWeekStart+7)%7>l;l+=1)k+=''+(g-(i-BFHDayOfWeekStart+7)%7+l+1)+"";for(l=1;h>=l;l+=1)k+=this.checkMinDate(l,d,e)?''+l+"":this.checkMaxDate(l,d,e)?''+l+"":this.checkToday(l,d,e)?''+l+"":''+l+"",c(d,e,l)===(6+BFHDayOfWeekStart)%7&&(f.append(""+k+""),k="");for(l=1;(7-(j+1-BFHDayOfWeekStart+7)%7)%7+1>=l;l+=1)k+=''+l+"",l===(7-(j+1-BFHDayOfWeekStart+7)%7)%7&&f.append(""+k+"")},updateCalendar:function(){var a,b,c;a=this.$element.find(".bfh-datepicker-calendar"),b=this.$element.data("month"),c=this.$element.data("year"),this.updateCalendarHeader(a,b,c),this.updateCalendarDays(a,b,c)},previousMonth:function(){var b,c,d;return b=a(this),c=g(b),0===Number(c.data("month"))?(c.data("month",11),c.data("year",Number(c.data("year"))-1)):c.data("month",Number(c.data("month"))-1),d=c.data("bfhdatepicker"),d.updateCalendar(),!1},nextMonth:function(){var b,c,d;return b=a(this),c=g(b),11===Number(c.data("month"))?(c.data("month",0),c.data("year",Number(c.data("year"))+1)):c.data("month",Number(c.data("month"))+1),d=c.data("bfhdatepicker"),d.updateCalendar(),!1},previousYear:function(){var b,c,d;return b=a(this),c=g(b),c.data("year",Number(c.data("year"))-1),d=c.data("bfhdatepicker"),d.updateCalendar(),!1},nextYear:function(){var b,c,d;return b=a(this),c=g(b),c.data("year",Number(c.data("year"))+1),d=c.data("bfhdatepicker"),d.updateCalendar(),!1},select:function(b){var c,e,h,i,j,k;c=a(this),b.preventDefault(),b.stopPropagation(),e=g(c),h=e.data("bfhdatepicker"),i=e.data("month"),j=e.data("year"),k=c.data("day"),e.val(d(h.options.format,i,j,k)),e.trigger("change.bfhdatepicker"),h.options.close===!0&&f()},toggle:function(b){var c,d,e;if(c=a(this),d=g(c),d.is(".disabled")||void 0!==d.attr("disabled"))return!0;if(e=d.hasClass("open"),f(),!e){if(d.trigger(b=a.Event("show.bfhdatepicker")),b.isDefaultPrevented())return!0;d.toggleClass("open").trigger("shown.bfhdatepicker"),c.focus()}return!1}};var j=a.fn.bfhdatepicker;a.fn.bfhdatepicker=function(b){return this.each(function(){var c,d,e;c=a(this),d=c.data("bfhdatepicker"),e="object"==typeof b&&b,this.type="bfhdatepicker",d||c.data("bfhdatepicker",d=new i(this,e)),"string"==typeof b&&d[b].call(c)})},a.fn.bfhdatepicker.Constructor=i,a.fn.bfhdatepicker.defaults={icon:"glyphicon glyphicon-calendar",align:"left",input:"form-control",placeholder:"",name:"",date:"today",format:"m/d/y",min:"",max:"",close:!0},a.fn.bfhdatepicker.noConflict=function(){return a.fn.bfhdatepicker=j,this};var k;a.valHooks.div&&(k=a.valHooks.div),a.valHooks.div={get:function(b){return a(b).hasClass("bfh-datepicker")?a(b).find('input[type="text"]').val():k?k.get(b):void 0},set:function(b,c){if(a(b).hasClass("bfh-datepicker"))a(b).find('input[type="text"]').val(c);else if(k)return k.set(b,c)}},a(document).ready(function(){a("div.bfh-datepicker").each(function(){var b;b=a(this),b.bfhdatepicker(b.data())})}),a(document).on("click.bfhdatepicker.data-api",f)}(window.jQuery),+function(a){"use strict";var b=function(b,c){this.options=a.extend({},a.fn.bfhfonts.defaults,c),this.$element=a(b),this.$element.is("select")&&this.addFonts(),this.$element.hasClass("bfh-selectbox")&&this.addBootstrapFonts()};b.prototype={constructor:b,getFonts:function(){var b,c;if(this.options.available){c=[],this.options.available=this.options.available.split(",");for(b in BFHFontsList)BFHFontsList.hasOwnProperty(b)&&a.inArray(b,this.options.available)>=0&&(c[b]=BFHFontsList[b]);return c}return BFHFontsList},addFonts:function(){var a,b,c;a=this.options.font,c=this.getFonts(),this.$element.html(""),this.options.blank===!0&&this.$element.append('');for(b in c)c.hasOwnProperty(b)&&this.$element.append('");this.$element.val(a)},addBootstrapFonts:function(){var a,b,c,d,e,f;d=this.options.font,a=this.$element.find('input[type="hidden"]'),b=this.$element.find(".bfh-selectbox-option"),c=this.$element.find("[role=option]"),f=this.getFonts(),c.html(""),this.options.blank===!0&&c.append('
      • ');for(e in f)f.hasOwnProperty(e)&&c.append('
      • '+e+"
      • ");this.$element.val(d)}};var c=a.fn.bfhfonts;a.fn.bfhfonts=function(c){return this.each(function(){var d,e,f;d=a(this),e=d.data("bfhfonts"),f="object"==typeof c&&c,e||d.data("bfhfonts",e=new b(this,f)),"string"==typeof c&&e[c].call(d)})},a.fn.bfhfonts.Constructor=b,a.fn.bfhfonts.defaults={font:"",available:"",blank:!0},a.fn.bfhfonts.noConflict=function(){return a.fn.bfhfonts=c,this},a(document).ready(function(){a("form select.bfh-fonts, span.bfh-fonts, div.bfh-fonts").each(function(){var b;b=a(this),b.hasClass("bfh-selectbox")&&b.bfhselectbox(b.data()),b.bfhfonts(b.data())})})}(window.jQuery),+function(a){"use strict";var b=function(b,c){this.options=a.extend({},a.fn.bfhfontsizes.defaults,c),this.$element=a(b),this.$element.is("select")&&this.addFontSizes(),this.$element.hasClass("bfh-selectbox")&&this.addBootstrapFontSizes()};b.prototype={constructor:b,getFontsizes:function(){var b,c;if(this.options.available){c=[],this.options.available=this.options.available.split(",");for(b in BFHFontSizesList)BFHFontSizesList.hasOwnProperty(b)&&a.inArray(b,this.options.available)>=0&&(c[b]=BFHFontSizesList[b]);return c}return BFHFontSizesList},addFontSizes:function(){var a,b,c;a=this.options.fontsize,c=this.getFontsizes(),this.$element.html(""),this.options.blank===!0&&this.$element.append('');for(b in c)c.hasOwnProperty(b)&&this.$element.append('");this.$element.val(a)},addBootstrapFontSizes:function(){var a,b,c,d,e,f;d=this.options.fontsize,a=this.$element.find('input[type="hidden"]'),b=this.$element.find(".bfh-selectbox-option"),c=this.$element.find("[role=option]"),f=this.getFontsizes(),c.html(""),this.options.blank===!0&&c.append('
      • ');for(e in f)f.hasOwnProperty(e)&&c.append('
      • '+f[e]+"
      • ");this.$element.val(d)}};var c=a.fn.bfhfontsizes;a.fn.bfhfontsizes=function(c){return this.each(function(){var d,e,f;d=a(this),e=d.data("bfhfontsizes"),f="object"==typeof c&&c,e||d.data("bfhfontsizes",e=new b(this,f)),"string"==typeof c&&e[c].call(d)})},a.fn.bfhfontsizes.Constructor=b,a.fn.bfhfontsizes.defaults={fontsize:"",available:"",blank:!0},a.fn.bfhfontsizes.noConflict=function(){return a.fn.bfhfontsizes=c,this},a(document).ready(function(){a("form select.bfh-fontsizes, span.bfh-fontsizes, div.bfh-fontsizes").each(function(){var b;b=a(this),b.hasClass("bfh-selectbox")&&b.bfhselectbox(b.data()),b.bfhfontsizes(b.data())})})}(window.jQuery),+function(a){"use strict";var b=function(b,c){this.options=a.extend({},a.fn.bfhgooglefonts.defaults,c),this.$element=a(b),this.$element.is("select")&&this.addFonts(),this.$element.hasClass("bfh-selectbox")&&this.addBootstrapFonts()};b.prototype={constructor:b,getFonts:function(){var b,c;if(c=[],this.options.subset)for(b in BFHGoogleFontsList.items)BFHGoogleFontsList.items.hasOwnProperty(b)&&a.inArray(this.options.subset,BFHGoogleFontsList.items[b].subsets)>=0&&(c[BFHGoogleFontsList.items[b].family]={info:BFHGoogleFontsList.items[b],index:parseInt(b,10)});else if(this.options.available){this.options.available=this.options.available.split(",");for(b in BFHGoogleFontsList.items)BFHGoogleFontsList.items.hasOwnProperty(b)&&a.inArray(BFHGoogleFontsList.items[b].family,this.options.available)>=0&&(c[BFHGoogleFontsList.items[b].family]={info:BFHGoogleFontsList.items[b],index:parseInt(b,10)})}else for(b in BFHGoogleFontsList.items)BFHGoogleFontsList.items.hasOwnProperty(b)&&(c[BFHGoogleFontsList.items[b].family]={info:BFHGoogleFontsList.items[b],index:parseInt(b,10)});return c},addFonts:function(){var a,b,c;a=this.options.font,c=this.getFonts(),this.$element.html(""),this.options.blank===!0&&this.$element.append('');for(b in c)c.hasOwnProperty(b)&&this.$element.append('");this.$element.val(a)},addBootstrapFonts:function(){var a,b,c,d,e,f;d=this.options.font,a=this.$element.find('input[type="hidden"]'),b=this.$element.find(".bfh-selectbox-option"),c=this.$element.find("[role=option]"),f=this.getFonts(),c.html(""),this.options.blank===!0&&c.append('
      • ');for(e in f)f.hasOwnProperty(e)&&c.append('
      • '+f[e].info.family+"
      • ");this.$element.val(d)}};var c=a.fn.bfhgooglefonts;a.fn.bfhgooglefonts=function(c){return this.each(function(){var d,e,f;d=a(this),e=d.data("bfhgooglefonts"),f="object"==typeof c&&c,e||d.data("bfhgooglefonts",e=new b(this,f)),"string"==typeof c&&e[c].call(d)})},a.fn.bfhgooglefonts.Constructor=b,a.fn.bfhgooglefonts.defaults={font:"",available:"",subset:"",blank:!0},a.fn.bfhgooglefonts.noConflict=function(){return a.fn.bfhgooglefonts=c,this},a(document).ready(function(){a("form select.bfh-googlefonts, span.bfh-googlefonts, div.bfh-googlefonts").each(function(){var b;b=a(this),b.hasClass("bfh-selectbox")&&b.bfhselectbox(b.data()),b.bfhgooglefonts(b.data())})})}(window.jQuery),+function(a){"use strict";var b=function(b,c){this.options=a.extend({},a.fn.bfhlanguages.defaults,c),this.$element=a(b),this.$element.is("select")&&this.addLanguages(),this.$element.is("span")&&this.displayLanguage(),this.$element.hasClass("bfh-selectbox")&&this.addBootstrapLanguages()};b.prototype={constructor:b,getLanguages:function(){var a,b,c;if(this.options.available){c=[],this.options.available=this.options.available.split(",");for(b in this.options.available)this.options.available.hasOwnProperty(b)&&(-1!==this.options.available[b].indexOf("_")?(a=this.options.available[b].split("_"),c[a[0]]={name:BFHLanguagesList[a[0]],country:a[1]}):c[this.options.available[b]]=BFHLanguagesList[this.options.available[b]]);return c}return BFHLanguagesList},addLanguages:function(){var a,b,c;a=this.options.language,b=this.getLanguages(),this.$element.html(""),this.options.blank===!0&&this.$element.append('');for(c in b)b.hasOwnProperty(c)&&(b[c].hasOwnProperty("name")?this.$element.append('"):this.$element.append('"));this.$element.val(a)},addBootstrapLanguages:function(){var a,b,c,d,e,f;d=this.options.language,a=this.$element.find('input[type="hidden"]'),b=this.$element.find(".bfh-selectbox-option"),c=this.$element.find("[role=option]"),e=this.getLanguages(),c.html(""),this.options.blank===!0&&c.append('
      • ');for(f in e)e.hasOwnProperty(f)&&(e[f].hasOwnProperty("name")?this.options.flags===!0?c.append('
      • '+e[f].name.toProperCase()+"
      • "):c.append('
      • '+e[f].name.toProperCase()+" ("+BFHCountriesList[e[f].country]+")
      • "):c.append('
      • '+e[f]+"
      • "));this.$element.val(d)},displayLanguage:function(){var a;a=this.options.language,-1!==a.indexOf("_")?(a=a.split("_"),this.options.flags===!0?this.$element.html(' '+BFHLanguagesList[a[0]].toProperCase()):this.$element.html(BFHLanguagesList[a[0]].toProperCase()+" ("+BFHCountriesList[a[1]]+")")):this.$element.html(BFHLanguagesList[a].toProperCase())}};var c=a.fn.bfhlanguages;a.fn.bfhlanguages=function(c){return this.each(function(){var d,e,f;d=a(this),e=d.data("bfhlanguages"),f="object"==typeof c&&c,e||d.data("bfhlanguages",e=new b(this,f)),"string"==typeof c&&e[c].call(d)})},a.fn.bfhlanguages.Constructor=b,a.fn.bfhlanguages.defaults={language:"",available:"",flags:!1,blank:!0},a.fn.bfhlanguages.noConflict=function(){return a.fn.bfhlanguages=c,this},a(document).ready(function(){a("form select.bfh-languages, span.bfh-languages, div.bfh-languages").each(function(){var b;b=a(this),b.hasClass("bfh-selectbox")&&b.bfhselectbox(b.data()),b.bfhlanguages(b.data())})}),String.prototype.toProperCase=function(){return this.replace(/\w\S*/g,function(a){return a.charAt(0).toUpperCase()+a.substr(1).toLowerCase()})}}(window.jQuery),+function(a){"use strict";var b=function(b,c){this.options=a.extend({},a.fn.bfhnumber.defaults,c),this.$element=a(b),this.initInput()};b.prototype={constructor:b,initInput:function(){this.options.buttons===!0&&(this.$element.wrap('
        '),this.$element.parent().append(''),this.$element.parent().append('')),this.$element.on("change.bfhnumber.data-api",b.prototype.change),this.options.keyboard===!0&&this.$element.on("keydown.bfhnumber.data-api",b.prototype.keydown),this.options.buttons===!0&&this.$element.parent().on("mousedown.bfhnumber.data-api",".inc",b.prototype.btninc).on("mousedown.bfhnumber.data-api",".dec",b.prototype.btndec),this.formatNumber()},keydown:function(b){var c;if(c=a(this).data("bfhnumber"),c.$element.is(".disabled")||void 0!==c.$element.attr("disabled"))return!0;switch(b.which){case 38:c.increment();break;case 40:c.decrement()}return!0},mouseup:function(a){var b,c,d;b=a.data.btn,c=b.$element.data("timer"),d=b.$element.data("interval"),clearTimeout(c),clearInterval(d)},btninc:function(){var c,d;return c=a(this).parent().find(".bfh-number").data("bfhnumber"),c.$element.is(".disabled")||void 0!==c.$element.attr("disabled")?!0:(c.increment(),d=setTimeout(function(){var a;a=setInterval(function(){c.increment()},80),c.$element.data("interval",a)},750),c.$element.data("timer",d),a(document).one("mouseup",{btn:c},b.prototype.mouseup),!0)},btndec:function(){var c,d;return c=a(this).parent().find(".bfh-number").data("bfhnumber"),c.$element.is(".disabled")||void 0!==c.$element.attr("disabled")?!0:(c.decrement(),d=setTimeout(function(){var a;a=setInterval(function(){c.decrement()},80),c.$element.data("interval",a)},750),c.$element.data("timer",d),a(document).one("mouseup",{btn:c},b.prototype.mouseup),!0)},change:function(){var b;return b=a(this).data("bfhnumber"),b.$element.is(".disabled")||void 0!==b.$element.attr("disabled")?!0:(b.formatNumber(),!0)},increment:function(){var a;a=this.getValue(),a+=1,this.$element.val(a).change()},decrement:function(){var a;a=this.getValue(),a-=1,this.$element.val(a).change()},getValue:function(){var a;return a=this.$element.val(),"-1"!==a&&(a=String(a).replace(/\D/g,"")),0===String(a).length&&(a=this.options.min),parseInt(a)},formatNumber:function(){var a,b,c,d;if(a=this.getValue(),a>this.options.max&&(a=this.options.wrap===!0?this.options.min:this.options.max),ad;d+=1)a="0"+a;a!==this.$element.val()&&this.$element.val(a)}};var c=a.fn.bfhnumber;a.fn.bfhnumber=function(c){return this.each(function(){var d,e,f;d=a(this),e=d.data("bfhnumber"),f="object"==typeof c&&c,e||d.data("bfhnumber",e=new b(this,f)),"string"==typeof c&&e[c].call(d)})},a.fn.bfhnumber.Constructor=b,a.fn.bfhnumber.defaults={min:0,max:9999,zeros:!1,keyboard:!0,buttons:!0,wrap:!1},a.fn.bfhnumber.noConflict=function(){return a.fn.bfhnumber=c,this},a(document).ready(function(){a('form input[type="text"].bfh-number, form input[type="number"].bfh-number').each(function(){var b;b=a(this),b.bfhnumber(b.data())})})}(window.jQuery),+function(a){"use strict";function b(a,b){var c,d,e,f;for(c="",b=String(b).replace(/\D/g,""),d=0,e=0;d'+a(this).html()+""}),this.$element.html(''+''+''+''+""+'
        '+'
        '+'
          '+"
        "+"
        "+"
        "),this.$element.find("[role=option]").html(b),this.options.filter===!0&&this.$element.find(".bfh-selectbox-options").prepend('
        '),this.$element.val(this.options.value),this.$element.on("click.bfhselectbox.data-api touchstart.bfhselectbox.data-api",d,e.prototype.toggle).on("keydown.bfhselectbox.data-api",d+", [role=option]",e.prototype.keydown).on("mouseenter.bfhselectbox.data-api","[role=option] > li > a",e.prototype.mouseenter).on("click.bfhselectbox.data-api","[role=option] > li > a",e.prototype.select).on("click.bfhselectbox.data-api",".bfh-selectbox-filter",function(){return!1}).on("propertychange.bfhselectbox.data-api change.bfhselectbox.data-api input.bfhselectbox.data-api paste.bfhselectbox.data-api",".bfh-selectbox-filter",e.prototype.filter)},toggle:function(d){var e,f,g;if(e=a(this),f=c(e),f.is(".disabled")||void 0!==f.attr("disabled"))return!0;if(g=f.hasClass("open"),b(),!g){if(f.trigger(d=a.Event("show.bfhselectbox")),d.isDefaultPrevented())return!0;f.toggleClass("open").trigger("shown.bfhselectbox").find('[role=option] > li > [data-option="'+f.val()+'"]').focus()}return!1},filter:function(){var b,d,e;b=a(this),d=c(b),e=a("[role=option] li a",d),e.hide().filter(function(){return-1!==a(this).text().toUpperCase().indexOf(b.val().toUpperCase())}).show()},keydown:function(b){var f,g,h,i,j;return/(38|40|27)/.test(b.keyCode)?(f=a(this),b.preventDefault(),b.stopPropagation(),h=c(f),i=h.hasClass("open"),!i||i&&27===b.keyCode?(27===b.which&&h.find(d).focus(),f.click()):(g=a("[role=option] li:not(.divider) a:visible",h),g.length?(a("body").off("mouseenter.bfh-selectbox.data-api","[role=option] > li > a",e.prototype.mouseenter),j=g.index(g.filter(":focus")),38===b.keyCode&&j>0&&(j-=1),40===b.keyCode&&j li > a",e.prototype.mouseenter),void 0):!0)):!0},mouseenter:function(){var b;b=a(this),b.focus()},select:function(d){var e,f;return e=a(this),d.preventDefault(),d.stopPropagation(),e.is(".disabled")||void 0!==e.attr("disabled")?!0:(f=c(e),f.val(e.data("option")),f.trigger("change.bfhselectbox"),b(),void 0)}};var f=a.fn.bfhselectbox;a.fn.bfhselectbox=function(b){return this.each(function(){var c,d,f;c=a(this),d=c.data("bfhselectbox"),f="object"==typeof b&&b,this.type="bfhselectbox",d||c.data("bfhselectbox",d=new e(this,f)),"string"==typeof b&&d[b].call(c)})},a.fn.bfhselectbox.Constructor=e,a.fn.bfhselectbox.defaults={icon:"caret",input:"form-control",name:"",value:"",filter:!1},a.fn.bfhselectbox.noConflict=function(){return a.fn.bfhselectbox=f,this};var g;a.valHooks.div&&(g=a.valHooks.div),a.valHooks.div={get:function(b){return a(b).hasClass("bfh-selectbox")?a(b).find('input[type="hidden"]').val():g?g.get(b):void 0},set:function(b,c){var d,e;if(a(b).hasClass("bfh-selectbox"))d=a(b),d.find("li a[data-option='"+c+"']").length>0?e=d.find("li a[data-option='"+c+"']").html():d.find("li a").length>0?e=d.find("li a").eq(0).html():(c="",e=""),d.find('input[type="hidden"]').val(c),d.find(".bfh-selectbox-option").html(e);else if(g)return g.set(b,c)}},a(document).ready(function(){a("div.bfh-selectbox").each(function(){var b;b=a(this),b.bfhselectbox(b.data())})}),a(document).on("click.bfhselectbox.data-api",b)}(window.jQuery),+function(a){"use strict";var b=function(b,c){this.options=a.extend({},a.fn.bfhslider.defaults,c),this.$element=a(b),this.initSlider()};b.prototype={constructor:b,initSlider:function(){""===this.options.value&&(this.options.value=this.options.min),this.$element.html(''+'
        '),this.$element.find('input[type="hidden"]').val(this.options.value),this.updateHandle(this.options.value),this.$element.on("mousedown.bfhslider.data-api",b.prototype.mouseDown)},updateHandle:function(a){var b,c,d,e;e=this.options.max-this.options.min,c=this.$element.width(),d=this.$element.position().left,b=Math.round((a-this.options.min)*(c-20)/e+d),this.$element.find(".bfh-slider-handle").css("left",b+"px"),this.$element.find(".bfh-slider-value").text(a)},updateVal:function(a){var b,c,d,e,f;return f=this.options.max-this.options.min,b=this.$element.width(),c=this.$element.offset().left,d=c+b,c>a&&(a=c),a+20>d&&(a=d),e=(a-c)/b,e=Math.ceil(e*f+this.options.min),e===this.$element.val()?!0:(this.$element.val(e),this.$element.trigger("change.bfhslider"),void 0)},mouseDown:function(){var c;return c=a(this),c.is(".disabled")||void 0!==c.attr("disabled")?!0:(a(document).on("mousemove.bfhslider.data-api",{slider:c},b.prototype.mouseMove).one("mouseup.bfhslider.data-api",{slider:c},b.prototype.mouseUp),void 0)},mouseMove:function(a){var b;b=a.data.slider,b.data("bfhslider").updateVal(a.pageX)},mouseUp:function(b){var c;c=b.data.slider,c.data("bfhslider").updateVal(b.pageX),a(document).off("mousemove.bfhslider.data-api")}};var c=a.fn.bfhslider;a.fn.bfhslider=function(c){return this.each(function(){var d,e,f;d=a(this),e=d.data("bfhslider"),f="object"==typeof c&&c,this.type="bfhslider",e||d.data("bfhslider",e=new b(this,f)),"string"==typeof c&&e[c].call(d)})},a.fn.bfhslider.Constructor=b,a.fn.bfhslider.defaults={name:"",value:"",min:0,max:100},a.fn.bfhslider.noConflict=function(){return a.fn.bfhslider=c,this};var d;a.valHooks.div&&(d=a.valHooks.div),a.valHooks.div={get:function(b){return a(b).hasClass("bfh-slider")?a(b).find('input[type="hidden"]').val():d?d.get(b):void 0},set:function(b,c){if(a(b).hasClass("bfh-slider"))a(b).find('input[type="hidden"]').val(c),a(b).data("bfhslider").updateHandle(c);else if(d)return d.set(b,c)}},a(document).ready(function(){a("div.bfh-slider").each(function(){var b;b=a(this),b.bfhslider(b.data())})})}(window.jQuery),+function(a){"use strict";var b=function(b,c){this.options=a.extend({},a.fn.bfhstates.defaults,c),this.$element=a(b),this.$element.is("select")&&this.addStates(),this.$element.hasClass("bfh-selectbox")&&this.addBootstrapStates(),this.$element.is("span")&&this.displayState()};b.prototype={constructor:b,addStates:function(){var b,c;b=this.options.country,""!==b&&(c=a(document).find("#"+b),0!==c.length&&(b=c.val(),c.on("change",{state:this},this.changeCountry))),this.loadStates(b)},loadStates:function(a){var b,c;b=this.options.state,this.$element.html(""),this.options.blank===!0&&this.$element.append('');for(c in BFHStatesList[a])BFHStatesList[a].hasOwnProperty(c)&&this.$element.append('");this.$element.val(b)},changeCountry:function(b){var c,d,e;c=a(this),d=b.data.state,e=c.val(),d.loadStates(e)},addBootstrapStates:function(){var b,c;b=this.options.country,""!==b&&(c=a(document).find("#"+b),0!==c.length&&(b=c.find('input[type="hidden"]').val(),c.on("change.bfhselectbox",{state:this},this.changeBootstrapCountry))),this.loadBootstrapStates(b)},loadBootstrapStates:function(a){var b,c,d,e,f,g;e=this.options.state,f="",b=this.$element.find('input[type="hidden"]'),c=this.$element.find(".bfh-selectbox-option"),d=this.$element.find("[role=option]"),d.html(""),this.options.blank===!0&&d.append('
      • ');for(g in BFHStatesList[a])BFHStatesList[a].hasOwnProperty(g)&&(d.append('
      • '+BFHStatesList[a][g].name+"
      • "),BFHStatesList[a][g].code===e&&(f=BFHStatesList[a][g].name));this.$element.val(e)},changeBootstrapCountry:function(b){var c,d,e;c=a(this),d=b.data.state,e=c.val(),d.loadBootstrapStates(e)},displayState:function(){var a,b,c,d;a=this.options.country,b=this.options.state,c="";for(d in BFHStatesList[a])if(BFHStatesList[a].hasOwnProperty(d)&&BFHStatesList[a][d].code===b){c=BFHStatesList[a][d].name;break}this.$element.html(c)}};var c=a.fn.bfhstates;a.fn.bfhstates=function(c){return this.each(function(){var d,e,f;d=a(this),e=d.data("bfhstates"),f="object"==typeof c&&c,e||d.data("bfhstates",e=new b(this,f)),"string"==typeof c&&e[c].call(d)})},a.fn.bfhstates.Constructor=b,a.fn.bfhstates.defaults={country:"",state:"",blank:!0},a.fn.bfhstates.noConflict=function(){return a.fn.bfhstates=c,this},a(document).ready(function(){a("form select.bfh-states, span.bfh-states, div.bfh-states").each(function(){var b;b=a(this),b.hasClass("bfh-selectbox")&&b.bfhselectbox(b.data()),b.bfhstates(b.data())})})}(window.jQuery),+function(a){"use strict";function b(a,b){return a=String(a),1===a.length&&(a="0"+a),b=String(b),1===b.length&&(b="0"+b),a+BFHTimePickerDelimiter+b}function c(){var b;a(e).each(function(c){return b=d(a(this)),b.hasClass("open")?(b.trigger(c=a.Event("hide.bfhtimepicker")),c.isDefaultPrevented()?!0:(b.removeClass("open").trigger("hidden.bfhtimepicker"),void 0)):!0})}function d(a){return a.closest(".bfh-timepicker")}var e="[data-toggle=bfh-timepicker]",f=function(b,c){this.options=a.extend({},a.fn.bfhtimepicker.defaults,c),this.$element=a(b),this.initPopover()};f.prototype={constructor:f,setTime:function(){var a,c,d,e,f,g,h;a=this.options.time,g="",h="",""===a||"now"===a||void 0===a?(c=new Date,e=c.getHours(),f=c.getMinutes(),"12h"===this.options.mode&&(e>12?(e-=12,g=" "+BFHTimePickerModes.pm,h="pm"):(g=" "+BFHTimePickerModes.am,h="am")),"now"===a&&this.$element.find('.bfh-timepicker-toggle > input[type="text"]').val(b(e,f)+g),this.$element.data("hour",e),this.$element.data("minute",f),this.$element.data("mode",h)):(d=String(a).split(BFHTimePickerDelimiter),e=d[0],f=d[1],"12h"===this.options.mode&&(d=String(f).split(" "),f=d[0],h=d[1]===BFHTimePickerModes.pm?"pm":"am"),this.$element.find('.bfh-timepicker-toggle > input[type="text"]').val(a),this.$element.data("hour",e),this.$element.data("minute",f),this.$element.data("mode",h))},initPopover:function(){var b,c,d,g,h;b="",c="",d="",""!==this.options.icon&&("right"===this.options.align?c='':b='',d="input-group"),g="",h="23","12h"===this.options.mode&&(g='
        '+'
        '+BFHTimePickerModes.am+"
        "+'
        '+BFHTimePickerModes.pm+"
        "+"
        ",h="11"),this.$element.html('
        '+b+''+c+"
        "+'
        '+''+""+""+'"+'"+'"+g+""+""+"
        '+''+"'+BFHTimePickerDelimiter+"'+''+"
        "+"
        "),this.$element.on("click.bfhtimepicker.data-api touchstart.bfhtimepicker.data-api",e,f.prototype.toggle).on("click.bfhtimepicker.data-api touchstart.bfhtimepicker.data-api",".bfh-timepicker-popover > table",function(){return!1}),this.$element.find(".bfh-number").each(function(){var b;b=a(this),b.bfhnumber(b.data()),b.on("change",f.prototype.change)}),this.$element.find(".bfh-selectbox").each(function(){var b;b=a(this),b.bfhselectbox(b.data()),b.on("change.bfhselectbox",f.prototype.change)}),this.setTime(),this.updatePopover()},updatePopover:function(){var a,b,c;a=this.$element.data("hour"),b=this.$element.data("minute"),c=this.$element.data("mode"),this.$element.find(".hour input[type=text]").val(a).change(),this.$element.find(".minute input[type=text]").val(b).change(),this.$element.find(".bfh-selectbox").val(c)},change:function(){var b,c,e,f;return b=a(this),c=d(b),e=c.data("bfhtimepicker"),e&&"undefined"!==e&&(f="","12h"===e.options.mode&&(f=" "+BFHTimePickerModes[c.find(".bfh-selectbox").val()]),c.find('.bfh-timepicker-toggle > input[type="text"]').val(c.find(".hour input[type=text]").val()+BFHTimePickerDelimiter+c.find(".minute input[type=text]").val()+f),c.trigger("change.bfhtimepicker")),!1},toggle:function(b){var e,f,g;if(e=a(this),f=d(e),f.is(".disabled")||void 0!==f.attr("disabled"))return!0;if(g=f.hasClass("open"),c(),!g){if(f.trigger(b=a.Event("show.bfhtimepicker")),b.isDefaultPrevented())return!0;f.toggleClass("open").trigger("shown.bfhtimepicker"),e.focus()}return!1}};var g=a.fn.bfhtimepicker;a.fn.bfhtimepicker=function(b){return this.each(function(){var c,d,e;c=a(this),d=c.data("bfhtimepicker"),e="object"==typeof b&&b,this.type="bfhtimepicker",d||c.data("bfhtimepicker",d=new f(this,e)),"string"==typeof b&&d[b].call(c)})},a.fn.bfhtimepicker.Constructor=f,a.fn.bfhtimepicker.defaults={icon:"glyphicon glyphicon-time",align:"left",input:"form-control",placeholder:"",name:"",time:"now",mode:"24h"},a.fn.bfhtimepicker.noConflict=function(){return a.fn.bfhtimepicker=g,this};var h;a.valHooks.div&&(h=a.valHooks.div),a.valHooks.div={get:function(b){return a(b).hasClass("bfh-timepicker")?a(b).find('.bfh-timepicker-toggle > input[type="text"]').val():h?h.get(b):void 0},set:function(b,c){var d;if(a(b).hasClass("bfh-timepicker"))d=a(b).data("bfhtimepicker"),d.options.time=c,d.setTime(),d.updatePopover();else if(h)return h.set(b,c)}},a(document).ready(function(){a("div.bfh-timepicker").each(function(){var b;b=a(this),b.bfhtimepicker(b.data())})}),a(document).on("click.bfhtimepicker.data-api",c)}(window.jQuery),+function(a){"use strict";var b=function(b,c){this.options=a.extend({},a.fn.bfhtimezones.defaults,c),this.$element=a(b),this.$element.is("select")&&this.addTimezones(),this.$element.hasClass("bfh-selectbox")&&this.addBootstrapTimezones()};b.prototype={constructor:b,addTimezones:function(){var b,c;b=this.options.country,""!==b&&(c=a(document).find("#"+b),0!==c.length&&(b=c.val(),c.on("change",{timezone:this},this.changeCountry))),this.loadTimezones(b)},loadTimezones:function(a){var b,c;b=this.options.timezone,this.$element.html(""),this.options.blank===!0&&this.$element.append('');for(c in BFHTimezonesList[a])BFHTimezonesList[a].hasOwnProperty(c)&&this.$element.append('");this.$element.val(b)},changeCountry:function(b){var c,d,e;c=a(this),d=b.data.timezone,e=c.val(),d.loadTimezones(e)},addBootstrapTimezones:function(){var b,c;b=this.options.country,""!==b&&(c=a(document).find("#"+b),0!==c.length&&(b=c.find('input[type="hidden"]').val(),c.on("change.bfhselectbox",{timezone:this},this.changeBootstrapCountry))),this.loadBootstrapTimezones(b)},loadBootstrapTimezones:function(a){var b,c,d,e,f;e=this.options.timezone,b=this.$element.find('input[type="hidden"]'),c=this.$element.find(".bfh-selectbox-option"),d=this.$element.find("[role=option]"),d.html(""),this.options.blank===!0&&d.append('
      • ');for(f in BFHTimezonesList[a])BFHTimezonesList[a].hasOwnProperty(f)&&d.append('
      • '+BFHTimezonesList[a][f]+"
      • ");this.$element.val(e)},changeBootstrapCountry:function(b){var c,d,e;c=a(this),d=b.data.timezone,e=c.val(),d.loadBootstrapTimezones(e)}};var c=a.fn.bfhtimezones;a.fn.bfhtimezones=function(c){return this.each(function(){var d,e,f;d=a(this),e=d.data("bfhtimezones"),f="object"==typeof c&&c,e||d.data("bfhtimezones",e=new b(this,f)),"string"==typeof c&&e[c].call(d)})},a.fn.bfhtimezones.Constructor=b,a.fn.bfhtimezones.defaults={country:"",timezone:"",blank:!0},a.fn.bfhtimezones.noConflict=function(){return a.fn.bfhtimezones=c,this},a(document).ready(function(){a("form select.bfh-timezones, div.bfh-timezones").each(function(){var b;b=a(this),b.hasClass("bfh-selectbox")&&b.bfhselectbox(b.data()),b.bfhtimezones(b.data())})})}(window.jQuery); \ No newline at end of file diff --git a/static/js/lib/jquery.bookmarkscroll.js b/static/js/lib/jquery.bookmarkscroll.js deleted file mode 100644 index 23f01d766b..0000000000 --- a/static/js/lib/jquery.bookmarkscroll.js +++ /dev/null @@ -1,51 +0,0 @@ -//** Scrolling HTML Bookmarks script- (c) Dynamic Drive DHTML code library: http://www.dynamicdrive.com. -//** Available/ usage terms at http://www.dynamicdrive.com/ (April 11th, 09') -//** Updated Nov 10th, 09'- Fixed anchor jumping issue in IE7 - -var bookmarkscroll={ - setting: {duration:1000, yoffset:-50}, //{duration_of_scroll_milliseconds, offset_from_target_element_to_rest} - topkeyword: '#top', //keyword used in your anchors and scrollTo() to cause script to scroll page to very top - - scrollTo:function(dest, options, hash){ - var $=jQuery, options=options || {} - var $dest=(typeof dest=="string" && dest.length>0)? (dest==this.topkeyword? 0 : $('#'+dest)) : (dest)? $(dest) : [] //get element based on id, topkeyword, or dom ref - if ($dest===0 || $dest.length==1 && (!options.autorun || options.autorun && Math.abs($dest.offset().top+(options.yoffset||this.setting.yoffset)-$(window).scrollTop())>5)){ - this.$body.animate({scrollTop: ($dest===0)? 0 : $dest.offset().top+(options.yoffset||this.setting.yoffset)}, (options.duration||this.setting.duration), function(){ -//** if ($dest!==0 && hash) -//** location.hash=hash - }) - } - }, - - urlparamselect:function(){ - var param=window.location.search.match(/scrollto=[\w\-_,]+/i) //search for scrollto=divid - return (param)? param[0].split('=')[1] : null - }, - - init:function(){ - jQuery(document).ready(function($){ - var mainobj=bookmarkscroll - mainobj.$body=(window.opera)? (document.compatMode=="CSS1Compat"? $('html') : $('body')) : $('html,body') - var urlselectid=mainobj.urlparamselect() //get div of page.htm?scrollto=divid - if (urlselectid) //if id defined - setTimeout(function(){mainobj.scrollTo(document.getElementById(urlselectid) || $('a[name='+urlselectid+']:eq(0)').get(0), {autorun:true})}, 100) - $('a[href^="#"]').each(function(){ //loop through links with "#" prefix - var hashvalue=this.getAttribute('href').match(/#\w+$/i) //filter links at least 1 character following "#" prefix - hashvalue=(hashvalue)? hashvalue[0].substring(1) : null //strip "#" from hashvalue - if (this.hash.length>1){ //if hash value is more than just "#" - var $bookmark=$('a[name='+this.hash.substr(1)+']:eq(0)') - if ($bookmark.length==1 || this.hash==mainobj.topkeyword){ //if HTML anchor with given ID exists or href==topkeyword - if ($bookmark.length==1 && !document.all) //non IE, or IE7+ - $bookmark.html('.').css({position:'absolute', fontSize:1, visibility:'hidden'}) - $(this).click(function(e){ - mainobj.scrollTo((this.hash==mainobj.topkeyword)? mainobj.topkeyword : $bookmark.get(0), {}, this.hash) - e.preventDefault() - }) - } - } - }) - }) - } -} - -bookmarkscroll.init() \ No newline at end of file diff --git a/static/js/lib/jquery.cookiejar.js b/static/js/lib/jquery.cookiejar.js deleted file mode 100644 index 92f6c19b0f..0000000000 --- a/static/js/lib/jquery.cookiejar.js +++ /dev/null @@ -1,200 +0,0 @@ -/** - * .cookieJar - Cookie Jar Plugin - * - * Version: 1.0.1 - * Updated: 2007-08-14 - * - * Used to store objects, arrays or multiple values in one cookie, under one name - * - * Copyright (c) 2007 James Dempster (letssurf@gmail.com, http://www.jdempster.com/category/jquery/cookieJar/) - * - * Dual licensed under the MIT (MIT-LICENSE.txt) - * and GPL (GPL-LICENSE.txt) licenses. - **/ - -/** - * Requirements: - * - jQuery (John Resig, http://www.jquery.com/) - * - cookie (Klaus Hartl, http://www.stilbuero.de/2006/09/17/cookie-plugin-for-jquery/) - * - toJSON (Mark Gibson, http://jollytoad.googlepages.com/json.js) - **/ -(function($) { - $.cookieJar = function(name, options) { - if (!$.parseJSON) return false; - if (!$.toJSON) return false; - if (!$.cookie) return false; - return new function() { - /** - * @access private - **/ - function log(s) { - if (typeof console != 'undefined' && typeof console.log != 'undefined') { - console.log('cookiejar:' + self.cookieName + ' ' + s); - } else { - alert(s); - } - }; - - /** - * @access private - **/ - function save() { - if (self.options.debug) log('save ' + $.toJSON(self.cookieObject)); - return $.cookie(self.cookieName, $.toJSON(self.cookieObject), self.options.cookie); - }; - - /** - * @access private - **/ - function load() { - var cookieJSON = $.cookie(self.cookieName); - if (typeof cookieJSON == 'string') { - if (self.options.debug) log('load ' + cookieJSON); - self.cookieObject = $.parseJSON(cookieJSON, true); - } else { - if (self.options.debug) log('load new'); - self.cookieObject = {}; - save(); - } - } - - /** - * cookieJar.set(name, value) - * - * Sets a value in the cookie jar using a name to identify it - * - * @access public - * @param string name value identifier - * @param mixed value any value, array or object - * @return bool - **/ - this.set = function(name, value) { - if (self.options.debug) log('set ' + name + ' = ' + value); - self.cookieObject[name] = value; - return save(); - }; - - /** - * cookieJar.get(name) - * - * Gets a value from the cookie jar using a name to identify it - * - * @access public - * @param string name value identifier - * @return mixed stored value - **/ - this.get = function(name) { - if (!self.options.cacheCookie) { - load(); - } - if (self.options.debug) log('get ' + name + ' = ' + self.cookieObject[name]); - return self.cookieObject[name]; - }; - - /** - * cookieJar.remove([name]) - * - * Removes a value from the cookie jar using a name to identify it - * No name will clear the cookie jar of all values - * - * @access public - * @param string name value identifier - * @return bool - **/ - this.remove = function(name) { - if (self.options.debug) log('remove ' + name); - if (typeof name != 'undefined') { - delete(self.cookieObject[name]); - } else { - self.setFromObject({}); - } - return save(); - }; - - /** - * cookieJar.setFromObject(object) - * - * Uses the object as the set of values to store in the cookie jar - * - * @access public - * @param object object new values for the cookie jar - * @return bool - **/ - this.setFromObject = function(object) { - if (typeof object == 'object') { - if (self.options.debug) log('setFromObject'); - self.cookieObject = object; - return save(); - } - }; - - /** - * cookieJar.toObject() - * - * Returns the contents of the cookie jar as an object - * - * @access public - * @return object contents of the cookie jar - **/ - this.toObject = function() { - if (self.options.debug) log('toObject'); - return self.cookieObject; - }; - - /** - * cookieJar.toString() - * - * Returns the contents of the cookie jar as a JSON encoded string - * - * @access public - * @return string contents of the cookie jar as JSON - **/ - this.toString = function() { - if (self.options.debug) log('toString = ' + $.toJSON(self.cookieObject)); - return $.toJSON(self.cookieObject); - }; - - /** - * cookieJar.destroy() - * - * Removes the cookie containing the cookie jar from the server - * - * @access public - * @return bool - **/ - this.destroy = function() { - if (self.options.debug) log('destroy'); - self.cookieObject = {}; - return $.cookie(self.cookieName, null, self.options.cookie); - }; - - /** - * cookieJar(name, [options]) - * - * loads a cookie jar for the name provided, creates new if none found - * - * @param string name - * @param object options - * @return object cookieJar - **/ - this.construct = function(name, options) { - self.options = $.extend({ - cookie: { - expires: 365, - path: '/' - }, - cacheCookie: true, - cookiePrefix: 'jqCookieJar_', - debug: false - }, options); - - self.cookieName = self.options.cookiePrefix + name; - load(); - return self; - }; - - var self = this; - self.construct(name, options); - }; - }; -})(jQuery); diff --git a/static/js/lib/jquery.form.min.js b/static/js/lib/jquery.form.min.js deleted file mode 100644 index 7321a3b06c..0000000000 --- a/static/js/lib/jquery.form.min.js +++ /dev/null @@ -1,11 +0,0 @@ -/*! - * jQuery Form Plugin - * version: 3.51.0-2014.06.20 - * Requires jQuery v1.5 or later - * Copyright (c) 2014 M. Alsup - * Examples and documentation at: http://malsup.com/jquery/form/ - * Project repository: https://github.com/malsup/form - * Dual licensed under the MIT and GPL licenses. - * https://github.com/malsup/form#copyright-and-license - */ -!function(e){"use strict";"function"==typeof define&&define.amd?define(["jquery"],e):e("undefined"!=typeof jQuery?jQuery:window.Zepto)}(function(e){"use strict";function t(t){var r=t.data;t.isDefaultPrevented()||(t.preventDefault(),e(t.target).ajaxSubmit(r))}function r(t){var r=t.target,a=e(r);if(!a.is("[type=submit],[type=image]")){var n=a.closest("[type=submit]");if(0===n.length)return;r=n[0]}var i=this;if(i.clk=r,"image"==r.type)if(void 0!==t.offsetX)i.clk_x=t.offsetX,i.clk_y=t.offsetY;else if("function"==typeof e.fn.offset){var o=a.offset();i.clk_x=t.pageX-o.left,i.clk_y=t.pageY-o.top}else i.clk_x=t.pageX-r.offsetLeft,i.clk_y=t.pageY-r.offsetTop;setTimeout(function(){i.clk=i.clk_x=i.clk_y=null},100)}function a(){if(e.fn.ajaxSubmit.debug){var t="[jquery.form] "+Array.prototype.join.call(arguments,"");window.console&&window.console.log?window.console.log(t):window.opera&&window.opera.postError&&window.opera.postError(t)}}var n={};n.fileapi=void 0!==e("").get(0).files,n.formdata=void 0!==window.FormData;var i=!!e.fn.prop;e.fn.attr2=function(){if(!i)return this.attr.apply(this,arguments);var e=this.prop.apply(this,arguments);return e&&e.jquery||"string"==typeof e?e:this.attr.apply(this,arguments)},e.fn.ajaxSubmit=function(t){function r(r){var a,n,i=e.param(r,t.traditional).split("&"),o=i.length,s=[];for(a=0;o>a;a++)i[a]=i[a].replace(/\+/g," "),n=i[a].split("="),s.push([decodeURIComponent(n[0]),decodeURIComponent(n[1])]);return s}function o(a){for(var n=new FormData,i=0;i').val(m.extraData[d].value).appendTo(w)[0]:e('').val(m.extraData[d]).appendTo(w)[0]);m.iframeTarget||v.appendTo("body"),g.attachEvent?g.attachEvent("onload",s):g.addEventListener("load",s,!1),setTimeout(t,15);try{w.submit()}catch(h){var x=document.createElement("form").submit;x.apply(w)}}finally{w.setAttribute("action",i),w.setAttribute("enctype",c),r?w.setAttribute("target",r):f.removeAttr("target"),e(l).remove()}}function s(t){if(!x.aborted&&!F){if(M=n(g),M||(a("cannot access response document"),t=k),t===D&&x)return x.abort("timeout"),void S.reject(x,"timeout");if(t==k&&x)return x.abort("server abort"),void S.reject(x,"error","server abort");if(M&&M.location.href!=m.iframeSrc||T){g.detachEvent?g.detachEvent("onload",s):g.removeEventListener("load",s,!1);var r,i="success";try{if(T)throw"timeout";var o="xml"==m.dataType||M.XMLDocument||e.isXMLDoc(M);if(a("isXml="+o),!o&&window.opera&&(null===M.body||!M.body.innerHTML)&&--O)return a("requeing onLoad callback, DOM not available"),void setTimeout(s,250);var u=M.body?M.body:M.documentElement;x.responseText=u?u.innerHTML:null,x.responseXML=M.XMLDocument?M.XMLDocument:M,o&&(m.dataType="xml"),x.getResponseHeader=function(e){var t={"content-type":m.dataType};return t[e.toLowerCase()]},u&&(x.status=Number(u.getAttribute("status"))||x.status,x.statusText=u.getAttribute("statusText")||x.statusText);var c=(m.dataType||"").toLowerCase(),l=/(json|script|text)/.test(c);if(l||m.textarea){var f=M.getElementsByTagName("textarea")[0];if(f)x.responseText=f.value,x.status=Number(f.getAttribute("status"))||x.status,x.statusText=f.getAttribute("statusText")||x.statusText;else if(l){var p=M.getElementsByTagName("pre")[0],h=M.getElementsByTagName("body")[0];p?x.responseText=p.textContent?p.textContent:p.innerText:h&&(x.responseText=h.textContent?h.textContent:h.innerText)}}else"xml"==c&&!x.responseXML&&x.responseText&&(x.responseXML=X(x.responseText));try{E=_(x,c,m)}catch(y){i="parsererror",x.error=r=y||i}}catch(y){a("error caught: ",y),i="error",x.error=r=y||i}x.aborted&&(a("upload aborted"),i=null),x.status&&(i=x.status>=200&&x.status<300||304===x.status?"success":"error"),"success"===i?(m.success&&m.success.call(m.context,E,"success",x),S.resolve(x.responseText,"success",x),d&&e.event.trigger("ajaxSuccess",[x,m])):i&&(void 0===r&&(r=x.statusText),m.error&&m.error.call(m.context,x,i,r),S.reject(x,"error",r),d&&e.event.trigger("ajaxError",[x,m,r])),d&&e.event.trigger("ajaxComplete",[x,m]),d&&!--e.active&&e.event.trigger("ajaxStop"),m.complete&&m.complete.call(m.context,x,i),F=!0,m.timeout&&clearTimeout(j),setTimeout(function(){m.iframeTarget?v.attr("src",m.iframeSrc):v.remove(),x.responseXML=null},100)}}}var c,l,m,d,p,v,g,x,y,b,T,j,w=f[0],S=e.Deferred();if(S.abort=function(e){x.abort(e)},r)for(l=0;l'),v.css({position:"absolute",top:"-1000px",left:"-1000px"})),g=v[0],x={aborted:0,responseText:null,responseXML:null,status:0,statusText:"n/a",getAllResponseHeaders:function(){},getResponseHeader:function(){},setRequestHeader:function(){},abort:function(t){var r="timeout"===t?"timeout":"aborted";a("aborting upload... "+r),this.aborted=1;try{g.contentWindow.document.execCommand&&g.contentWindow.document.execCommand("Stop")}catch(n){}v.attr("src",m.iframeSrc),x.error=r,m.error&&m.error.call(m.context,x,r,t),d&&e.event.trigger("ajaxError",[x,m,r]),m.complete&&m.complete.call(m.context,x,r)}},d=m.global,d&&0===e.active++&&e.event.trigger("ajaxStart"),d&&e.event.trigger("ajaxSend",[x,m]),m.beforeSend&&m.beforeSend.call(m.context,x,m)===!1)return m.global&&e.active--,S.reject(),S;if(x.aborted)return S.reject(),S;y=w.clk,y&&(b=y.name,b&&!y.disabled&&(m.extraData=m.extraData||{},m.extraData[b]=y.value,"image"==y.type&&(m.extraData[b+".x"]=w.clk_x,m.extraData[b+".y"]=w.clk_y)));var D=1,k=2,A=e("meta[name=csrf-token]").attr("content"),L=e("meta[name=csrf-param]").attr("content");L&&A&&(m.extraData=m.extraData||{},m.extraData[L]=A),m.forceSync?o():setTimeout(o,10);var E,M,F,O=50,X=e.parseXML||function(e,t){return window.ActiveXObject?(t=new ActiveXObject("Microsoft.XMLDOM"),t.async="false",t.loadXML(e)):t=(new DOMParser).parseFromString(e,"text/xml"),t&&t.documentElement&&"parsererror"!=t.documentElement.nodeName?t:null},C=e.parseJSON||function(e){return window.eval("("+e+")")},_=function(t,r,a){var n=t.getResponseHeader("content-type")||"",i="xml"===r||!r&&n.indexOf("xml")>=0,o=i?t.responseXML:t.responseText;return i&&"parsererror"===o.documentElement.nodeName&&e.error&&e.error("parsererror"),a&&a.dataFilter&&(o=a.dataFilter(o,r)),"string"==typeof o&&("json"===r||!r&&n.indexOf("json")>=0?o=C(o):("script"===r||!r&&n.indexOf("javascript")>=0)&&e.globalEval(o)),o};return S}if(!this.length)return a("ajaxSubmit: skipping submit process - no element selected"),this;var u,c,l,f=this;"function"==typeof t?t={success:t}:void 0===t&&(t={}),u=t.type||this.attr2("method"),c=t.url||this.attr2("action"),l="string"==typeof c?e.trim(c):"",l=l||window.location.href||"",l&&(l=(l.match(/^([^#]+)/)||[])[1]),t=e.extend(!0,{url:l,success:e.ajaxSettings.success,type:u||e.ajaxSettings.type,iframeSrc:/^https/i.test(window.location.href||"")?"javascript:false":"about:blank"},t);var m={};if(this.trigger("form-pre-serialize",[this,t,m]),m.veto)return a("ajaxSubmit: submit vetoed via form-pre-serialize trigger"),this;if(t.beforeSerialize&&t.beforeSerialize(this,t)===!1)return a("ajaxSubmit: submit aborted via beforeSerialize callback"),this;var d=t.traditional;void 0===d&&(d=e.ajaxSettings.traditional);var p,h=[],v=this.formToArray(t.semantic,h);if(t.data&&(t.extraData=t.data,p=e.param(t.data,d)),t.beforeSubmit&&t.beforeSubmit(v,this,t)===!1)return a("ajaxSubmit: submit aborted via beforeSubmit callback"),this;if(this.trigger("form-submit-validate",[v,this,t,m]),m.veto)return a("ajaxSubmit: submit vetoed via form-submit-validate trigger"),this;var g=e.param(v,d);p&&(g=g?g+"&"+p:p),"GET"==t.type.toUpperCase()?(t.url+=(t.url.indexOf("?")>=0?"&":"?")+g,t.data=null):t.data=g;var x=[];if(t.resetForm&&x.push(function(){f.resetForm()}),t.clearForm&&x.push(function(){f.clearForm(t.includeHidden)}),!t.dataType&&t.target){var y=t.success||function(){};x.push(function(r){var a=t.replaceTarget?"replaceWith":"html";e(t.target)[a](r).each(y,arguments)})}else t.success&&x.push(t.success);if(t.success=function(e,r,a){for(var n=t.context||this,i=0,o=x.length;o>i;i++)x[i].apply(n,[e,r,a||f,f])},t.error){var b=t.error;t.error=function(e,r,a){var n=t.context||this;b.apply(n,[e,r,a,f])}}if(t.complete){var T=t.complete;t.complete=function(e,r){var a=t.context||this;T.apply(a,[e,r,f])}}var j=e("input[type=file]:enabled",this).filter(function(){return""!==e(this).val()}),w=j.length>0,S="multipart/form-data",D=f.attr("enctype")==S||f.attr("encoding")==S,k=n.fileapi&&n.formdata;a("fileAPI :"+k);var A,L=(w||D)&&!k;t.iframe!==!1&&(t.iframe||L)?t.closeKeepAlive?e.get(t.closeKeepAlive,function(){A=s(v)}):A=s(v):A=(w||D)&&k?o(v):e.ajax(t),f.removeData("jqxhr").data("jqxhr",A);for(var E=0;Ec;c++)if(d=u[c],f=d.name,f&&!d.disabled)if(t&&o.clk&&"image"==d.type)o.clk==d&&(a.push({name:f,value:e(d).val(),type:d.type}),a.push({name:f+".x",value:o.clk_x},{name:f+".y",value:o.clk_y}));else if(m=e.fieldValue(d,!0),m&&m.constructor==Array)for(r&&r.push(d),l=0,h=m.length;h>l;l++)a.push({name:f,value:m[l]});else if(n.fileapi&&"file"==d.type){r&&r.push(d);var v=d.files;if(v.length)for(l=0;li;i++)r.push({name:a,value:n[i]});else null!==n&&"undefined"!=typeof n&&r.push({name:this.name,value:n})}}),e.param(r)},e.fn.fieldValue=function(t){for(var r=[],a=0,n=this.length;n>a;a++){var i=this[a],o=e.fieldValue(i,t);null===o||"undefined"==typeof o||o.constructor==Array&&!o.length||(o.constructor==Array?e.merge(r,o):r.push(o))}return r},e.fieldValue=function(t,r){var a=t.name,n=t.type,i=t.tagName.toLowerCase();if(void 0===r&&(r=!0),r&&(!a||t.disabled||"reset"==n||"button"==n||("checkbox"==n||"radio"==n)&&!t.checked||("submit"==n||"image"==n)&&t.form&&t.form.clk!=t||"select"==i&&-1==t.selectedIndex))return null;if("select"==i){var o=t.selectedIndex;if(0>o)return null;for(var s=[],u=t.options,c="select-one"==n,l=c?o+1:u.length,f=c?o:0;l>f;f++){var m=u[f];if(m.selected){var d=m.value;if(d||(d=m.attributes&&m.attributes.value&&!m.attributes.value.specified?m.text:m.value),c)return d;s.push(d)}}return s}return e(t).val()},e.fn.clearForm=function(t){return this.each(function(){e("input,select,textarea",this).clearFields(t)})},e.fn.clearFields=e.fn.clearInputs=function(t){var r=/^(?:color|date|datetime|email|month|number|password|range|search|tel|text|time|url|week)$/i;return this.each(function(){var a=this.type,n=this.tagName.toLowerCase();r.test(a)||"textarea"==n?this.value="":"checkbox"==a||"radio"==a?this.checked=!1:"select"==n?this.selectedIndex=-1:"file"==a?/MSIE/.test(navigator.userAgent)?e(this).replaceWith(e(this).clone(!0)):e(this).val(""):t&&(t===!0&&/hidden/.test(a)||"string"==typeof t&&e(this).is(t))&&(this.value="")})},e.fn.resetForm=function(){return this.each(function(){("function"==typeof this.reset||"object"==typeof this.reset&&!this.reset.nodeType)&&this.reset()})},e.fn.enable=function(e){return void 0===e&&(e=!0),this.each(function(){this.disabled=!e})},e.fn.selected=function(t){return void 0===t&&(t=!0),this.each(function(){var r=this.type;if("checkbox"==r||"radio"==r)this.checked=t;else if("option"==this.tagName.toLowerCase()){var a=e(this).parent("select");t&&a[0]&&"select-one"==a[0].type&&a.find("option").selected(!1),this.selected=t}})},e.fn.ajaxSubmit.debug=!1}); \ No newline at end of file diff --git a/static/js/lib/jquery.json-2.2.min.js b/static/js/lib/jquery.json-2.2.min.js deleted file mode 100644 index bad4a0afa0..0000000000 --- a/static/js/lib/jquery.json-2.2.min.js +++ /dev/null @@ -1,31 +0,0 @@ - -(function($){$.toJSON=function(o) -{if(typeof(JSON)=='object'&&JSON.stringify) -return JSON.stringify(o);var type=typeof(o);if(o===null) -return"null";if(type=="undefined") -return undefined;if(type=="number"||type=="boolean") -return o+"";if(type=="string") -return $.quoteString(o);if(type=='object') -{if(typeof o.toJSON=="function") -return $.toJSON(o.toJSON());if(o.constructor===Date) -{var month=o.getUTCMonth()+1;if(month<10)month='0'+month;var day=o.getUTCDate();if(day<10)day='0'+day;var year=o.getUTCFullYear();var hours=o.getUTCHours();if(hours<10)hours='0'+hours;var minutes=o.getUTCMinutes();if(minutes<10)minutes='0'+minutes;var seconds=o.getUTCSeconds();if(seconds<10)seconds='0'+seconds;var milli=o.getUTCMilliseconds();if(milli<100)milli='0'+milli;if(milli<10)milli='0'+milli;return'"'+year+'-'+month+'-'+day+'T'+ -hours+':'+minutes+':'+seconds+'.'+milli+'Z"';} -if(o.constructor===Array) -{var ret=[];for(var i=0;i=this.setting.startline)? true : false; - if (this.state.shouldvisible && !this.state.isvisible){ - this.$control.stop().animate({opacity:1}, this.setting.fadeduration[0]); - this.state.isvisible = true; - } else if (this.state.shouldvisible === false && this.state.isvisible){ - this.$control.stop().animate({opacity:0}, this.setting.fadeduration[1]); - this.state.isvisible=false; - } - }, - - init:function(){ - jQuery(document).ready(function($){ - var mainobj = scrolltotop; - var iebrws = document.all; - mainobj.cssfixedsupport=!iebrws || iebrws && document.compatMode.toLowerCase() === "CSS1Compat".toLowerCase() && window.XMLHttpRequest; //not IE or IE7+ browsers in standards mode - mainobj.$body=(window.opera)? (document.compatMode.toLowerCase() === "CSS1Compat".toLowerCase() ? $('html') : $('body')) : $('html,body'); - mainobj.$control=$('
        '+mainobj.controlHTML+'
        ') - .css({position:mainobj.cssfixedsupport? 'fixed' : 'absolute', bottom:mainobj.controlattrs.offsety, right:mainobj.controlattrs.offsetx, opacity:0, cursor:'pointer'}) - .attr({title:'Scroll Back to Top'}) - .click(function(){mainobj.scrollup(); return false;}) - .appendTo('body'); - if (document.all && !window.XMLHttpRequest && mainobj.$control.text()!==''){ //loose check for IE6 and below, plus whether control contains any text - mainobj.$control.css({width:mainobj.$control.width()}); //IE6- seems to require an explicit width on a DIV containing text - } - mainobj.togglecontrol(); - $('a[href="' + mainobj.anchorkeyword +'"]').click(function(){ - mainobj.scrollup(); - return false; - }); - $(window).bind('scroll resize', function(){ - mainobj.togglecontrol(); - }); - }); - } -}; - -scrolltotop.init(); diff --git a/static/js/lib/jquery.selectboxes.min.js b/static/js/lib/jquery.selectboxes.min.js deleted file mode 100644 index 2ea85acc01..0000000000 --- a/static/js/lib/jquery.selectboxes.min.js +++ /dev/null @@ -1,14 +0,0 @@ -/* - * - * Copyright (c) 2006-2008 Sam Collett (http://www.texotela.co.uk) - * Dual licensed under the MIT (http://www.opensource.org/licenses/mit-license.php) - * and GPL (http://www.opensource.org/licenses/gpl-license.php) licenses. - * - * Version 2.2.4 - * Demo: http://www.texotela.co.uk/code/jquery/select/ - * - * $LastChangedDate: 2008-06-17 17:27:25 +0100 (Tue, 17 Jun 2008) $ - * $Rev: 5727 $ - * - */ -;(function(h){h.fn.addOption=function(){var j=function(a,f,c,g){var d=document.createElement("option");d.value=f,d.text=c;var b=a.options;var e=b.length;if(!a.cache){a.cache={};for(var i=0;i=2){if(typeof(k[1])=="boolean")l=k[1];else if(typeof(k[2])=="boolean")l=k[2];if(!m){o=k[0];p=k[1]}}this.each(function(){if(this.nodeName.toLowerCase()!="select")return;if(m){for(var a in n){j(this,a,n[a],l)}}else{j(this,o,p,l)}});return this};h.fn.ajaxAddOption=function(c,g,d,b,e){if(typeof(c)!="string")return this;if(typeof(g)!="object")g={};if(typeof(d)!="boolean")d=true;this.each(function(){var f=this;h.getJSON(c,g,function(a){h(f).addOption(a,d);if(typeof b=="function"){if(typeof e=="object"){b.apply(f,e)}else{b.call(f)}}})});return this};h.fn.removeOption=function(){var d=arguments;if(d.length==0)return this;var b=typeof(d[0]);var e,i;if(b=="string"||b=="object"||b=="function"){e=d[0];if(e.constructor==Array){var j=e.length;for(var k=0;k=0;g--){if(e.constructor==RegExp){if(f[g].value.match(e)){a=true}}else if(f[g].value==e){a=true}if(a&&d[1]===true)a=f[g].selected;if(a){f[g]=null}a=false}}else{if(d[1]===true){a=f[i].selected}else{a=true}if(a){this.remove(i)}}});return this};h.fn.sortOptions=function(e){var i=h(this).selectedValues();var j=typeof(e)=="undefined"?true:!!e;this.each(function(){if(this.nodeName.toLowerCase()!="select")return;var c=this.options;var g=c.length;var d=[];for(var b=0;bo2t?-1:1}});for(var b=0;b= _getTopOffset(element) + threshold + element.offsetHeight; + } + + function _isAtLeftOfViewport() { + var fold; + if (container === window) { + fold = documentLeft; + } else { + fold = _getLeftOffset(container); + } + return fold >= _getLeftOffset(element) + threshold + element.offsetWidth; + } + + ownerDocument = element.ownerDocument; + documentTop = window.pageYOffset || ownerDocument.body.scrollTop; + documentLeft = window.pageXOffset || ownerDocument.body.scrollLeft; + + return !_isBelowViewport() && !_isAboveViewport() && !_isAtRightOfViewport() && !_isAtLeftOfViewport(); + } + + function _now() { + var d = new Date(); + return d.getTime(); + } + + function _merge_objects(obj1, obj2) { + var obj3 = {}, + propertyName; + for (propertyName in obj1) { + if (obj1.hasOwnProperty(propertyName)) { + obj3[propertyName] = obj1[propertyName]; + } + } + for (propertyName in obj2) { + if (obj2.hasOwnProperty(propertyName)) { + obj3[propertyName] = obj2[propertyName]; + } + } + return obj3; + } + + function _convertToArray(nodeSet) { + return Array.prototype.slice.call(nodeSet); + } + + function _setSourcesForPicture(element, srcsetDataAttribute) { + var parent = element.parentElement; + if (parent.tagName !== 'PICTURE') { + return; + } + for (var i = 0; i < parent.children.length; i++) { + var pictureChild = parent.children[i]; + if (pictureChild.tagName === 'SOURCE') { + var sourceSrcset = pictureChild.getAttribute('data-' + srcsetDataAttribute); + if (sourceSrcset) { + pictureChild.setAttribute('srcset', sourceSrcset); + } + } + } + } + + function _setSources(element, srcsetDataAttribute, srcDataAttribute) { + var tagName = element.tagName; + var elementSrc = element.getAttribute('data-' + srcDataAttribute); + if (tagName === "IMG") { + _setSourcesForPicture(element, srcsetDataAttribute); + var imgSrcset = element.getAttribute('data-' + srcsetDataAttribute); + if (imgSrcset) element.setAttribute("srcset", imgSrcset); + if (elementSrc) element.setAttribute("src", elementSrc); + return; + } + if (tagName === "IFRAME") { + if (elementSrc) element.setAttribute("src", elementSrc); + return; + } + if (elementSrc) element.style.backgroundImage = "url(" + elementSrc + ")"; + } + + function _bind(fn, obj) { + return function() { + return fn.apply(obj, arguments); + }; + } + + + /* + * INITIALIZER + * ----------- + */ + + function LazyLoad(instanceSettings) { + _init(); + + this._settings = _merge_objects(_defaultSettings, instanceSettings); + this._queryOriginNode = this._settings.container === window ? document : this._settings.container; + + this._previousLoopTime = 0; + this._loopTimeout = null; + + this._handleScrollFn = _bind(this.handleScroll, this); + + window.addEventListener("resize", this._handleScrollFn); + this.update(); + } + + + /* + * PRIVATE FUNCTIONS *RELATED* TO A SPECIFIC INSTANCE OF LAZY LOAD + * --------------------------------------------------------------- + */ + + LazyLoad.prototype._showOnAppear = function(element) { + var settings = this._settings; + + function errorCallback() { + element.removeEventListener("load", loadCallback); + element.classList.remove(settings.class_loading); + if (settings.callback_error) { + settings.callback_error(element); + } + } + function loadCallback() { + /* As this method is asynchronous, it must be protected against external destroy() calls */ + if (settings === null) { + return; + } + /* Calling LOAD callback */ + if (settings.callback_load) { + settings.callback_load(element); + } + element.classList.remove(settings.class_loading); + element.classList.add(settings.class_loaded); + element.removeEventListener("load", loadCallback); + element.removeEventListener("error", errorCallback); + } + + if (element.tagName === "IMG" || element.tagName === "IFRAME") { + element.addEventListener("load", loadCallback); + element.addEventListener("error", errorCallback); + element.classList.add(settings.class_loading); + } + + _setSources(element, settings.data_srcset, settings.data_src); + /* Calling SET callback */ + if (settings.callback_set) { + settings.callback_set(element); + } + }; + + LazyLoad.prototype._loopThroughElements = function() { + var i, element, + settings = this._settings, + elements = this._elements, + elementsLength = (!elements) ? 0 : elements.length, + processedIndexes = []; + + for (i = 0; i < elementsLength; i++) { + element = elements[i]; + /* If must skip_invisible and element is invisible, skip it */ + if (settings.skip_invisible && (element.offsetParent === null)) { + continue; + } + if (_isInsideViewport(element, settings.container, settings.threshold)) { + this._showOnAppear(element); + + /* Marking the element as processed. */ + processedIndexes.push(i); + element.wasProcessed = true; + } + } + /* Removing processed elements from this._elements. */ + while (processedIndexes.length > 0) { + elements.splice(processedIndexes.pop(), 1); + /* Calling the end loop callback */ + if (settings.callback_processed) { + settings.callback_processed(elements.length); + } + } + /* Stop listening to scroll event when 0 elements remains */ + if (elementsLength === 0) { + this._stopScrollHandler(); + } + }; + + LazyLoad.prototype._purgeElements = function() { + var i, element, + elements = this._elements, + elementsLength = elements.length, + elementsToPurge = []; + + for (i = 0; i < elementsLength; i++) { + element = elements[i]; + /* If the element has already been processed, skip it */ + if (element.wasProcessed) { + elementsToPurge.push(i); + } + } + /* Removing elements to purge from this._elements. */ + while (elementsToPurge.length > 0) { + elements.splice(elementsToPurge.pop(), 1); + } + }; + + LazyLoad.prototype._startScrollHandler = function() { + if (!this._isHandlingScroll) { + this._isHandlingScroll = true; + this._settings.container.addEventListener("scroll", this._handleScrollFn); + } + }; + + LazyLoad.prototype._stopScrollHandler = function() { + if (this._isHandlingScroll) { + this._isHandlingScroll = false; + this._settings.container.removeEventListener("scroll", this._handleScrollFn); + } + }; + + + /* + * PUBLIC FUNCTIONS + * ---------------- + */ + + LazyLoad.prototype.handleScroll = function() { + var remainingTime, + now, + throttle; + + // IE8 fix for destroy() malfunctioning + if (!this._settings) { + return; + } + + now = _now(); + throttle = this._settings.throttle; + + if (throttle !== 0) { + remainingTime = throttle - (now - this._previousLoopTime); + if (remainingTime <= 0 || remainingTime > throttle) { + if (this._loopTimeout) { + clearTimeout(this._loopTimeout); + this._loopTimeout = null; + } + this._previousLoopTime = now; + this._loopThroughElements(); + } else if (!this._loopTimeout) { + this._loopTimeout = setTimeout(_bind(function() { + this._previousLoopTime = _now(); + this._loopTimeout = null; + this._loopThroughElements(); + }, this), remainingTime); + } + } else { + this._loopThroughElements(); + } + }; + + LazyLoad.prototype.update = function() { + this._elements = _convertToArray(this._queryOriginNode.querySelectorAll(this._settings.elements_selector)); + this._purgeElements(); + this._loopThroughElements(); + this._startScrollHandler(); + }; + + LazyLoad.prototype.destroy = function() { + window.removeEventListener("resize", this._handleScrollFn); + if (this._loopTimeout) { + clearTimeout(this._loopTimeout); + this._loopTimeout = null; + } + this._stopScrollHandler(); + this._elements = null; + this._queryOriginNode = null; + this._settings = null; + }; + + + return LazyLoad; + + +})); diff --git a/static/js/lib/lodash.min.js b/static/js/lib/lodash.min.js deleted file mode 100644 index 396c1106ca..0000000000 --- a/static/js/lib/lodash.min.js +++ /dev/null @@ -1,127 +0,0 @@ -/** - * @license - * lodash lodash.com/license | Underscore.js 1.8.3 underscorejs.org/LICENSE - */ -;(function(){function t(t,n){return t.set(n[0],n[1]),t}function n(t,n){return t.add(n),t}function r(t,n,r){switch(r.length){case 0:return t.call(n);case 1:return t.call(n,r[0]);case 2:return t.call(n,r[0],r[1]);case 3:return t.call(n,r[0],r[1],r[2])}return t.apply(n,r)}function e(t,n,r,e){for(var u=-1,o=t?t.length:0;++u=t?t:r),n!==T&&(t=t>=n?t:n)),t}function rn(t,n,r,e,o,i,f){var c;if(e&&(c=i?e(t,o,i,f):e(t)),c!==T)return c;if(!Ze(t))return t;if(o=yi(t)){if(c=Kr(t),!n)return lr(t,c)}else{var a=qr(t),l="[object Function]"==a||"[object GeneratorFunction]"==a;if(bi(t))return or(t,n);if("[object Object]"==a||"[object Arguments]"==a||l&&!i){if(C(t))return i?t:{};if(c=Gr(l?{}:t), -!n)return hr(t,Xt(c,t))}else{if(!Ct[a])return i?t:{};c=Jr(t,a,rn,n)}}if(f||(f=new qt),i=f.get(t))return i;if(f.set(t,c),!o)var s=r?gn(t,iu,Tr):iu(t);return u(s||t,function(u,o){s&&(o=u,u=t[o]),Yt(c,o,rn(u,n,r,e,o,t,f))}),c}function en(t){var n=iu(t),r=n.length;return function(e){if(null==e)return!r;for(var u=r;u--;){var o=n[u],i=t[o],f=e[o];if(f===T&&!(o in Object(e))||!i(f))return false}return true}}function un(t){return Ze(t)?Tu(t):{}}function on(t,n,r){if(typeof t!="function")throw new Au("Expected a function"); -return At(function(){t.apply(T,r)},n)}function fn(t,n,r,e){var u=-1,o=c,i=true,f=t.length,s=[],h=n.length;if(!f)return s;r&&(n=l(n,O(r))),e?(o=a,i=false):n.length>=200&&(o=E,i=false,n=new Zt(n));t:for(;++u0&&r(f)?n>1?sn(f,n-1,r,e,u):s(u,f):e||(u[u.length]=f)}return u}function hn(t,n){return t&&ko(t,n,iu)}function pn(t,n){return t&&Eo(t,n,iu)}function _n(t,n){return f(n,function(n){return Fe(t[n])})}function vn(t,n){n=ne(n,t)?[n]:er(n);for(var r=0,e=n.length;null!=t&&e>r;)t=t[fe(n[r++])];return r&&r==e?t:T}function gn(t,n,r){ -return n=n(t),yi(t)?n:s(n,r(t))}function dn(t,n){return t>n}function yn(t,n){return null!=t&&(Wu.call(t,n)||typeof t=="object"&&n in t&&null===Ju(Object(t)))}function bn(t,n){return null!=t&&n in Object(t)}function xn(t,n,r){for(var e=r?a:c,u=t[0].length,o=t.length,i=o,f=Array(o),s=1/0,h=[];i--;){var p=t[i];i&&n&&(p=l(p,O(n))),s=to(p.length,s),f[i]=!r&&(n||u>=120&&p.length>=120)?new Zt(i&&p):T}var p=t[0],_=-1,v=f[0];t:for(;++_h.length;){var g=p[_],d=n?n(g):g,g=r||0!==g?g:0;if(v?!E(v,d):!e(h,d,r)){ -for(i=o;--i;){var y=f[i];if(y?!E(y,d):!e(t[i],d,r))continue t}v&&v.push(d),h.push(g)}}return h}function jn(t,n,r){var e={};return hn(t,function(t,u,o){n(e,r(t),u,o)}),e}function wn(t,n,e){return ne(n,t)||(n=er(n),t=ie(t,n),n=ve(n)),n=null==t?t:t[fe(n)],null==n?T:r(n,t,e)}function mn(t,n,r,e,u){if(t===n)n=true;else if(null==t||null==n||!Ze(t)&&!Te(n))n=t!==t&&n!==n;else t:{var o=yi(t),i=yi(n),f="[object Array]",c="[object Array]";o||(f=qr(t),f="[object Arguments]"==f?"[object Object]":f),i||(c=qr(n), -c="[object Arguments]"==c?"[object Object]":c);var a="[object Object]"==f&&!C(t),i="[object Object]"==c&&!C(n);if((c=f==c)&&!a)u||(u=new qt),n=o||Ye(t)?zr(t,n,mn,r,e,u):Ur(t,n,f,mn,r,e,u);else{if(!(2&e)&&(o=a&&Wu.call(t,"__wrapped__"),f=i&&Wu.call(n,"__wrapped__"),o||f)){t=o?t.value():t,n=f?n.value():n,u||(u=new qt),n=mn(t,n,r,e,u);break t}if(c)n:if(u||(u=new qt),o=2&e,f=iu(t),i=f.length,c=iu(n).length,i==c||o){for(a=i;a--;){var l=f[a];if(!(o?l in n:yn(n,l))){n=false;break n}}if(c=u.get(t))n=c==n;else{ -c=true,u.set(t,n);for(var s=o;++at}function In(t,n){var r=-1,e=Ue(t)?Array(t.length):[]; -return Ao(t,function(t,u,o){e[++r]=n(t,u,o)}),e}function Rn(t){var n=Pr(t);return 1==n.length&&n[0][2]?ue(n[0][0],n[0][1]):function(r){return r===t||An(r,t,n)}}function Wn(t,n){return ne(t)&&n===n&&!Ze(n)?ue(fe(t),n):function(r){var e=uu(r,t);return e===T&&e===n?ou(r,t):mn(n,e,T,3)}}function Bn(t,n,r,e,o){if(t!==n){if(!yi(n)&&!Ye(n))var i=fu(n);u(i||n,function(u,f){if(i&&(f=u,u=n[f]),Ze(u)){o||(o=new qt);var c=f,a=o,l=t[c],s=n[c],h=a.get(s);if(h)Jt(t,c,h);else{var h=e?e(l,s,c+"",t,n,a):T,p=h===T;p&&(h=s, -yi(s)||Ye(s)?yi(l)?h=l:$e(l)?h=lr(l):(p=false,h=rn(s,true)):Ve(s)||ze(s)?ze(l)?h=ru(l):!Ze(l)||r&&Fe(l)?(p=false,h=rn(s,true)):h=l:p=false),a.set(s,h),p&&Bn(h,s,r,e,a),a["delete"](s),Jt(t,c,h)}}else c=e?e(t[f],u,f+"",t,n,o):T,c===T&&(c=u),Jt(t,f,c)})}}function Ln(t,n){var r=t.length;return r?(n+=0>n?r:0,Xr(n,r)?t[n]:T):void 0}function Mn(t,n,r){var e=-1;return n=l(n.length?n:[pu],O(Fr())),t=In(t,function(t){return{a:l(n,function(n){return n(t)}),b:++e,c:t}}),j(t,function(t,n){var e;t:{e=-1;for(var u=t.a,o=n.a,i=u.length,f=r.length;++e=f?c:c*("desc"==r[e]?-1:1);break t}}e=t.b-n.b}return e})}function Cn(t,n){return t=Object(t),h(n,function(n,r){return r in t&&(n[r]=t[r]),n},{})}function zn(t,n){for(var r=-1,e=gn(t,fu,Bo),u=e.length,o={};++rn||n>9007199254740991)return r;do n%2&&(r+=t),(n=Gu(n/2))&&(t+=t);while(n);return r}function Zn(t,n,r,e){n=ne(n,t)?[n]:er(n);for(var u=-1,o=n.length,i=o-1,f=t;null!=f&&++un&&(n=-n>u?0:u+n),r=r>u?u:r,0>r&&(r+=u),u=n>r?0:r-n>>>0,n>>>=0,r=Array(u);++e=u){for(;u>e;){var o=e+u>>>1,i=t[o];null!==i&&!Je(i)&&(r?n>=i:n>i)?e=o+1:u=o}return u} -return Kn(t,n,pu,r)}function Kn(t,n,r,e){n=r(n);for(var u=0,o=t?t.length:0,i=n!==n,f=null===n,c=Je(n),a=n===T;o>u;){var l=Gu((u+o)/2),s=r(t[l]),h=s!==T,p=null===s,_=s===s,v=Je(s);(i?e||_:a?_&&(e||h):f?_&&h&&(e||!p):c?_&&h&&!p&&(e||!v):p||v?0:e?n>=s:n>s)?u=l+1:o=l}return to(o,4294967294)}function Gn(t,n){for(var r=-1,e=t.length,u=0,o=[];++r=200){if(u=n?null:Io(t))return D(u);i=false,u=E,l=new Zt}else l=n?[]:f;t:for(;++ee?n[e]:T);return i}function rr(t){return $e(t)?t:[]}function er(t){return yi(t)?t:Co(t)}function ur(t,n,r){var e=t.length;return r=r===T?e:r,!n&&r>=e?t:Tn(t,n,r)}function or(t,n){ -if(n)return t.slice();var r=new t.constructor(t.length);return t.copy(r),r}function ir(t){var n=new t.constructor(t.byteLength);return new Fu(n).set(new Fu(t)),n}function fr(t,n){if(t!==n){var r=t!==T,e=null===t,u=t===t,o=Je(t),i=n!==T,f=null===n,c=n===n,a=Je(n);if(!f&&!a&&!o&&t>n||o&&i&&c&&!f&&!a||e&&i&&c||!r&&c||!u)return 1;if(!e&&!o&&!a&&n>t||a&&r&&u&&!e&&!o||f&&r&&u||!i&&u||!c)return-1}return 0}function cr(t,n,r,e){var u=-1,o=t.length,i=r.length,f=-1,c=n.length,a=Xu(o-i,0),l=Array(c+a);for(e=!e;++fu)&&(l[r[u]]=t[u]);for(;a--;)l[f++]=t[u++];return l}function ar(t,n,r,e){var u=-1,o=t.length,i=-1,f=r.length,c=-1,a=n.length,l=Xu(o-f,0),s=Array(l+a);for(e=!e;++uu)&&(s[l+r[i]]=t[u++]);return s}function lr(t,n){var r=-1,e=t.length;for(n||(n=Array(e));++r1?r[u-1]:T,i=u>2?r[2]:T,o=t.length>3&&typeof o=="function"?(u--,o):T;for(i&&te(r[0],r[1],i)&&(o=3>u?T:o,u=1),n=Object(n);++ei&&f[0]!==a&&f[i-1]!==a?[]:$(f,a), -i-=c.length,e>i?Br(t,n,Ar,u.placeholder,T,f,c,T,T,e-i):r(this&&this!==Kt&&this instanceof u?o:t,this,f)}var o=xr(t);return u}function wr(t){return function(n,r,e){var u=Object(n);if(r=Fr(r,3),!Ue(n))var o=iu(n);return e=t(o||n,function(t,n){return o&&(n=t,t=u[n]),r(t,n,u)},e),e>-1?n[o?o[e]:e]:T}}function mr(t){return Me(function(n){n=sn(n,1);var r=n.length,e=r,u=zt.prototype.thru;for(t&&n.reverse();e--;){var o=n[e];if(typeof o!="function")throw new Au("Expected a function");if(u&&!i&&"wrapper"==$r(o))var i=new zt([],true); -}for(e=i?e:r;++e=200)return i.plant(e).value();for(var u=0,t=r?n[u].apply(this,t):e;++ud)return j=$(y,j),Br(t,n,Ar,l.placeholder,r,y,j,f,c,a-d);if(j=h?r:this,b=p?j[t]:t,d=y.length,f){x=y.length;for(var w=to(f.length,x),m=lr(y);w--;){var A=f[w];y[w]=Xr(A,x)?m[A]:T}}else v&&d>1&&y.reverse();return s&&d>c&&(y.length=c),this&&this!==Kt&&this instanceof l&&(b=g||xr(b)),b.apply(j,y)}var s=128&n,h=1&n,p=2&n,_=24&n,v=512&n,g=p?T:xr(t);return l}function Or(t,n){return function(r,e){return jn(r,t,n(e))}}function kr(t){return function(n,r){var e; -if(n===T&&r===T)return 0;if(n!==T&&(e=n),r!==T){if(e===T)return r;typeof n=="string"||typeof r=="string"?(n=Yn(n),r=Yn(r)):(n=Jn(n),r=Jn(r)),e=t(n,r)}return e}}function Er(t){return Me(function(n){return n=1==n.length&&yi(n[0])?l(n[0],O(Fr())):l(sn(n,1,Qr),O(Fr())),Me(function(e){var u=this;return t(n,function(t){return r(t,u,e)})})})}function Sr(t,n){n=n===T?" ":Yn(n);var r=n.length;return 2>r?r?Pn(n,t):n:(r=Pn(n,Ku(t/N(n))),Wt.test(n)?ur(r.match(It),0,t).join(""):r.slice(0,t))}function Ir(t,n,e,u){ -function o(){for(var n=-1,c=arguments.length,a=-1,l=u.length,s=Array(l+c),h=this&&this!==Kt&&this instanceof o?f:t;++an?1:-1:nu(e)||0;var u=-1;r=Xu(Ku((r-n)/(e||1)),0);for(var o=Array(r);r--;)o[t?r:++u]=n,n+=e;return o}}function Wr(t){return function(n,r){return typeof n=="string"&&typeof r=="string"||(n=nu(n), -r=nu(r)),t(n,r)}}function Br(t,n,r,e,u,o,i,f,c,a){var l=8&n,s=l?i:T;i=l?T:i;var h=l?o:T;return o=l?T:o,n=(n|(l?32:64))&~(l?64:32),4&n||(n&=-4),n=[t,n,u,h,s,o,i,f,c,a],r=r.apply(T,n),re(t)&&Mo(r,n),r.placeholder=e,r}function Lr(t){var n=wu[t];return function(t,r){if(t=nu(t),r=to(Xe(r),292)){var e=(eu(t)+"e").split("e"),e=n(e[0]+"e"+(+e[1]+r)),e=(eu(e)+"e").split("e");return+(e[0]+"e"+(+e[1]-r))}return n(t)}}function Mr(t){return function(n){var r=qr(n);return"[object Map]"==r?U(n):"[object Set]"==r?F(n):A(n,t(n)); -}}function Cr(t,n,r,e,u,o,i,f){var c=2&n;if(!c&&typeof t!="function")throw new Au("Expected a function");var a=e?e.length:0;if(a||(n&=-97,e=u=T),i=i===T?i:Xu(Xe(i),0),f=f===T?f:Xe(f),a-=u?u.length:0,64&n){var l=e,s=u;e=u=T}var h=c?T:Ro(t);return o=[t,n,r,e,u,l,s,o,i,f],h&&(r=o[1],t=h[1],n=r|t,e=128==t&&8==r||128==t&&256==r&&h[8]>=o[7].length||384==t&&h[8]>=h[7].length&&8==r,131>n||e)&&(1&t&&(o[2]=h[2],n|=1&r?0:4),(r=h[3])&&(e=o[3],o[3]=e?cr(e,r,h[4]):r,o[4]=e?$(o[3],"__lodash_placeholder__"):h[4]), -(r=h[5])&&(e=o[5],o[5]=e?ar(e,r,h[6]):r,o[6]=e?$(o[5],"__lodash_placeholder__"):h[6]),(r=h[7])&&(o[7]=r),128&t&&(o[8]=null==o[8]?h[8]:to(o[8],h[8])),null==o[9]&&(o[9]=h[9]),o[0]=h[0],o[1]=n),t=o[0],n=o[1],r=o[2],e=o[3],u=o[4],f=o[9]=null==o[9]?c?0:t.length:Xu(o[9]-a,0),!f&&24&n&&(n&=-25),(h?So:Mo)(n&&1!=n?8==n||16==n?jr(t,n,f):32!=n&&33!=n||u.length?Ar.apply(T,o):Ir(t,n,r,e):dr(t,n,r),o)}function zr(t,n,r,e,u,o){var i=2&u,f=t.length,c=n.length;if(f!=c&&!(i&&c>f))return false;if(c=o.get(t))return c==n; -var c=-1,a=true,l=1&u?new Zt:T;for(o.set(t,n);++c-1&&0==t%1&&n>t}function te(t,n,r){if(!Ze(r))return false;var e=typeof n;return("number"==e?Ue(r)&&Xr(n,r.length):"string"==e&&n in r)?Ce(r[n],t):false}function ne(t,n){if(yi(t))return false;var r=typeof t;return"number"==r||"symbol"==r||"boolean"==r||null==t||Je(t)?true:ut.test(t)||!et.test(t)||null!=n&&t in Object(n)}function re(t){ -var n=$r(t),r=Ot[n];return typeof r=="function"&&n in Ut.prototype?t===r?true:(n=Ro(r),!!n&&t===n[0]):false}function ee(t){var n=t&&t.constructor;return t===(typeof n=="function"&&n.prototype||ku)}function ue(t,n){return function(r){return null==r?false:r[t]===n&&(n!==T||t in Object(r))}}function oe(t,n,r,e,u,o){return Ze(t)&&Ze(n)&&Bn(t,n,T,oe,o.set(n,t)),t}function ie(t,n){return 1==n.length?t:vn(t,Tn(n,0,-1))}function fe(t){if(typeof t=="string"||Je(t))return t;var n=t+"";return"0"==n&&1/t==-q?"-0":n}function ce(t){ -if(null!=t){try{return Ru.call(t)}catch(n){}return t+""}return""}function ae(t){if(t instanceof Ut)return t.clone();var n=new zt(t.__wrapped__,t.__chain__);return n.__actions__=lr(t.__actions__),n.__index__=t.__index__,n.__values__=t.__values__,n}function le(t,n,r){var e=t?t.length:0;return e?(n=r||n===T?1:Xe(n),Tn(t,0>n?0:n,e)):[]}function se(t,n,r){var e=t?t.length:0;return e?(n=r||n===T?1:Xe(n),n=e-n,Tn(t,0,0>n?0:n)):[]}function he(t,n,r){var e=t?t.length:0;return e?(r=null==r?0:Xe(r),0>r&&(r=Xu(e+r,0)), -g(t,Fr(n,3),r)):-1}function pe(t,n,r){var e=t?t.length:0;if(!e)return-1;var u=e-1;return r!==T&&(u=Xe(r),u=0>r?Xu(e+u,0):to(u,e-1)),g(t,Fr(n,3),u,true)}function _e(t){return t&&t.length?t[0]:T}function ve(t){var n=t?t.length:0;return n?t[n-1]:T}function ge(t,n){return t&&t.length&&n&&n.length?Dn(t,n):t}function de(t){return t?uo.call(t):t}function ye(t){if(!t||!t.length)return[];var n=0;return t=f(t,function(t){return $e(t)?(n=Xu(t.length,n),true):void 0}),m(n,function(n){return l(t,Un(n))})}function be(t,n){ -if(!t||!t.length)return[];var e=ye(t);return null==n?e:l(e,function(t){return r(n,T,t)})}function xe(t){return t=Ot(t),t.__chain__=true,t}function je(t,n){return n(t)}function we(){return this}function me(t,n){return(yi(t)?u:Ao)(t,Fr(n,3))}function Ae(t,n){return(yi(t)?o:Oo)(t,Fr(n,3))}function Oe(t,n){return(yi(t)?l:In)(t,Fr(n,3))}function ke(t,n,r){var e=-1,u=He(t),o=u.length,i=o-1;for(n=(r?te(t,n,r):n===T)?1:nn(Xe(n),0,o);++e=t&&(n=T),r}}function Re(t,n,r){return n=r?T:n,t=Cr(t,8,T,T,T,T,T,n),t.placeholder=Re.placeholder,t}function We(t,n,r){return n=r?T:n,t=Cr(t,16,T,T,T,T,T,n),t.placeholder=We.placeholder,t}function Be(t,n,r){function e(n){var r=c,e=a;return c=a=T,_=n,s=t.apply(e,r); -}function u(t){var r=t-p;return t-=_,p===T||r>=n||0>r||g&&t>=l}function o(){var t=Ee();if(u(t))return i(t);var r;r=t-_,t=n-(t-p),r=g?to(t,l-r):t,h=At(o,r)}function i(t){return h=T,d&&c?e(t):(c=a=T,s)}function f(){var t=Ee(),r=u(t);if(c=arguments,a=this,p=t,r){if(h===T)return _=t=p,h=At(o,n),v?e(t):s;if(g)return h=At(o,n),e(p)}return h===T&&(h=At(o,n)),s}var c,a,l,s,h,p,_=0,v=false,g=false,d=true;if(typeof t!="function")throw new Au("Expected a function");return n=nu(n)||0,Ze(r)&&(v=!!r.leading,l=(g="maxWait"in r)?Xu(nu(r.maxWait)||0,n):l, -d="trailing"in r?!!r.trailing:d),f.cancel=function(){_=0,c=p=a=h=T},f.flush=function(){return h===T?s:i(Ee())},f}function Le(t,n){function r(){var e=arguments,u=n?n.apply(this,e):e[0],o=r.cache;return o.has(u)?o.get(u):(e=t.apply(this,e),r.cache=o.set(u,e),e)}if(typeof t!="function"||n&&typeof n!="function")throw new Au("Expected a function");return r.cache=new(Le.Cache||Pt),r}function Me(t,n){if(typeof t!="function")throw new Au("Expected a function");return n=Xu(n===T?t.length-1:Xe(n),0),function(){ -for(var e=arguments,u=-1,o=Xu(e.length-n,0),i=Array(o);++u-1&&0==t%1&&9007199254740991>=t}function Ze(t){var n=typeof t;return!!t&&("object"==n||"function"==n)}function Te(t){return!!t&&typeof t=="object"}function qe(t){return typeof t=="number"||Te(t)&&"[object Number]"==Mu.call(t); -}function Ve(t){return!Te(t)||"[object Object]"!=Mu.call(t)||C(t)?false:(t=Ju(Object(t)),null===t?true:(t=Wu.call(t,"constructor")&&t.constructor,typeof t=="function"&&t instanceof t&&Ru.call(t)==Lu))}function Ke(t){return Ze(t)&&"[object RegExp]"==Mu.call(t)}function Ge(t){return typeof t=="string"||!yi(t)&&Te(t)&&"[object String]"==Mu.call(t)}function Je(t){return typeof t=="symbol"||Te(t)&&"[object Symbol]"==Mu.call(t)}function Ye(t){return Te(t)&&Pe(t.length)&&!!Mt[Mu.call(t)]}function He(t){if(!t)return[]; -if(Ue(t))return Ge(t)?t.match(It):lr(t);if(Zu&&t[Zu])return z(t[Zu]());var n=qr(t);return("[object Map]"==n?U:"[object Set]"==n?D:cu)(t)}function Qe(t){return t?(t=nu(t),t===q||t===-q?1.7976931348623157e308*(0>t?-1:1):t===t?t:0):0===t?t:0}function Xe(t){t=Qe(t);var n=t%1;return t===t?n?t-n:t:0}function tu(t){return t?nn(Xe(t),0,4294967295):0}function nu(t){if(typeof t=="number")return t;if(Je(t))return V;if(Ze(t)&&(t=Fe(t.valueOf)?t.valueOf():t,t=Ze(t)?t+"":t),typeof t!="string")return 0===t?t:+t; -t=t.replace(ct,"");var n=dt.test(t);return n||bt.test(t)?Nt(t.slice(2),n?2:8):gt.test(t)?V:+t}function ru(t){return sr(t,fu(t))}function eu(t){return null==t?"":Yn(t)}function uu(t,n,r){return t=null==t?T:vn(t,n),t===T?r:t}function ou(t,n){return null!=t&&Vr(t,n,bn)}function iu(t){var n=ee(t);if(!n&&!Ue(t))return Qu(Object(t));var r,e=Yr(t),u=!!e,e=e||[],o=e.length;for(r in t)!yn(t,r)||u&&("length"==r||Xr(r,o))||n&&"constructor"==r||e.push(r);return e}function fu(t){for(var n=-1,r=ee(t),e=En(t),u=e.length,o=Yr(t),i=!!o,o=o||[],f=o.length;++nt?false:(t==n.length-1?n.pop():Vu.call(n,t,1),true)},Dt.prototype.get=function(t){ -var n=this.__data__;return t=Ht(n,t),0>t?T:n[t][1]},Dt.prototype.has=function(t){return-1e?r.push([t,n]):r[e][1]=n,this},Pt.prototype.clear=function(){this.__data__={hash:new $t,map:new(fo||Dt),string:new $t}},Pt.prototype["delete"]=function(t){return Nr(this,t)["delete"](t)},Pt.prototype.get=function(t){return Nr(this,t).get(t)},Pt.prototype.has=function(t){return Nr(this,t).has(t)},Pt.prototype.set=function(t,n){ -return Nr(this,t).set(t,n),this},Zt.prototype.add=Zt.prototype.push=function(t){return this.__data__.set(t,"__lodash_hash_undefined__"),this},Zt.prototype.has=function(t){return this.__data__.has(t)},qt.prototype.clear=function(){this.__data__=new Dt},qt.prototype["delete"]=function(t){return this.__data__["delete"](t)},qt.prototype.get=function(t){return this.__data__.get(t)},qt.prototype.has=function(t){return this.__data__.has(t)},qt.prototype.set=function(t,n){var r=this.__data__;return r instanceof Dt&&200==r.__data__.length&&(r=this.__data__=new Pt(r.__data__)), -r.set(t,n),this};var Ao=vr(hn),Oo=vr(pn,true),ko=gr(),Eo=gr(true);Nu&&!qu.call({valueOf:1},"valueOf")&&(En=function(t){return z(Nu(t))});var So=ho?function(t,n){return ho.set(t,n),t}:pu,Io=ao&&1/D(new ao([,-0]))[1]==q?function(t){return new ao(t)}:gu,Ro=ho?function(t){return ho.get(t)}:gu,Wo=Un("length");Pu||(Tr=yu);var Bo=Pu?function(t){for(var n=[];t;)s(n,Tr(t)),t=Ju(Object(t));return n}:Tr;(io&&"[object DataView]"!=qr(new io(new ArrayBuffer(1)))||fo&&"[object Map]"!=qr(new fo)||co&&"[object Promise]"!=qr(co.resolve())||ao&&"[object Set]"!=qr(new ao)||lo&&"[object WeakMap]"!=qr(new lo))&&(qr=function(t){ -var n=Mu.call(t);if(t=(t="[object Object]"==n?t.constructor:T)?ce(t):T)switch(t){case vo:return"[object DataView]";case go:return"[object Map]";case yo:return"[object Promise]";case bo:return"[object Set]";case xo:return"[object WeakMap]"}return n});var Lo=Su?Fe:bu,Mo=function(){var t=0,n=0;return function(r,e){var u=Ee(),o=16-(u-n);if(n=u,o>0){if(150<=++t)return r}else t=0;return So(r,e)}}(),Co=Le(function(t){var n=[];return eu(t).replace(ot,function(t,r,e,u){n.push(e?u.replace(ht,"$1"):r||t)}), -n}),zo=Me(function(t,n){return $e(t)?fn(t,sn(n,1,$e,true)):[]}),Uo=Me(function(t,n){var r=ve(n);return $e(r)&&(r=T),$e(t)?fn(t,sn(n,1,$e,true),Fr(r)):[]}),$o=Me(function(t,n){var r=ve(n);return $e(r)&&(r=T),$e(t)?fn(t,sn(n,1,$e,true),T,r):[]}),Do=Me(function(t){var n=l(t,rr);return n.length&&n[0]===t[0]?xn(n):[]}),Fo=Me(function(t){var n=ve(t),r=l(t,rr);return n===ve(r)?n=T:r.pop(),r.length&&r[0]===t[0]?xn(r,Fr(n)):[]}),No=Me(function(t){var n=ve(t),r=l(t,rr);return n===ve(r)?n=T:r.pop(),r.length&&r[0]===t[0]?xn(r,T,n):[]; -}),Po=Me(ge),Zo=Me(function(t,n){n=sn(n,1);var r=t?t.length:0,e=tn(t,n);return Fn(t,l(n,function(t){return Xr(t,r)?+t:t}).sort(fr)),e}),To=Me(function(t){return Hn(sn(t,1,$e,true))}),qo=Me(function(t){var n=ve(t);return $e(n)&&(n=T),Hn(sn(t,1,$e,true),Fr(n))}),Vo=Me(function(t){var n=ve(t);return $e(n)&&(n=T),Hn(sn(t,1,$e,true),T,n)}),Ko=Me(function(t,n){return $e(t)?fn(t,n):[]}),Go=Me(function(t){return tr(f(t,$e))}),Jo=Me(function(t){var n=ve(t);return $e(n)&&(n=T),tr(f(t,$e),Fr(n))}),Yo=Me(function(t){ -var n=ve(t);return $e(n)&&(n=T),tr(f(t,$e),T,n)}),Ho=Me(ye),Qo=Me(function(t){var n=t.length,n=n>1?t[n-1]:T,n=typeof n=="function"?(t.pop(),n):T;return be(t,n)}),Xo=Me(function(t){function n(n){return tn(n,t)}t=sn(t,1);var r=t.length,e=r?t[0]:0,u=this.__wrapped__;return!(r>1||this.__actions__.length)&&u instanceof Ut&&Xr(e)?(u=u.slice(e,+e+(r?1:0)),u.__actions__.push({func:je,args:[n],thisArg:T}),new zt(u,this.__chain__).thru(function(t){return r&&!t.length&&t.push(T),t})):this.thru(n)}),ti=pr(function(t,n,r){ -Wu.call(t,r)?++t[r]:t[r]=1}),ni=wr(he),ri=wr(pe),ei=pr(function(t,n,r){Wu.call(t,r)?t[r].push(n):t[r]=[n]}),ui=Me(function(t,n,e){var u=-1,o=typeof n=="function",i=ne(n),f=Ue(t)?Array(t.length):[];return Ao(t,function(t){var c=o?n:i&&null!=t?t[n]:T;f[++u]=c?r(c,t,e):wn(t,n,e)}),f}),oi=pr(function(t,n,r){t[r]=n}),ii=pr(function(t,n,r){t[r?0:1].push(n)},function(){return[[],[]]}),fi=Me(function(t,n){if(null==t)return[];var r=n.length;return r>1&&te(t,n[0],n[1])?n=[]:r>2&&te(n[0],n[1],n[2])&&(n=[n[0]]), -n=1==n.length&&yi(n[0])?n[0]:sn(n,1,Qr),Mn(t,n,[])}),ci=Me(function(t,n,r){var e=1;if(r.length)var u=$(r,Dr(ci)),e=32|e;return Cr(t,e,n,r,u)}),ai=Me(function(t,n,r){var e=3;if(r.length)var u=$(r,Dr(ai)),e=32|e;return Cr(n,e,t,r,u)}),li=Me(function(t,n){return on(t,1,n)}),si=Me(function(t,n,r){return on(t,nu(n)||0,r)});Le.Cache=Pt;var hi=Me(function(t,n){n=1==n.length&&yi(n[0])?l(n[0],O(Fr())):l(sn(n,1,Qr),O(Fr()));var e=n.length;return Me(function(u){for(var o=-1,i=to(u.length,e);++o=n}),yi=Array.isArray,bi=Uu?function(t){return t instanceof Uu}:bu,xi=Wr(Sn),ji=Wr(function(t,n){return n>=t}),wi=_r(function(t,n){if(po||ee(n)||Ue(n))sr(n,iu(n),t);else for(var r in n)Wu.call(n,r)&&Yt(t,r,n[r])}),mi=_r(function(t,n){if(po||ee(n)||Ue(n))sr(n,fu(n),t);else for(var r in n)Yt(t,r,n[r]); -}),Ai=_r(function(t,n,r,e){sr(n,fu(n),t,e)}),Oi=_r(function(t,n,r,e){sr(n,iu(n),t,e)}),ki=Me(function(t,n){return tn(t,sn(n,1))}),Ei=Me(function(t){return t.push(T,Vt),r(Ai,T,t)}),Si=Me(function(t){return t.push(T,oe),r(Li,T,t)}),Ii=Or(function(t,n,r){t[n]=r},hu(pu)),Ri=Or(function(t,n,r){Wu.call(t,n)?t[n].push(r):t[n]=[r]},Fr),Wi=Me(wn),Bi=_r(function(t,n,r){Bn(t,n,r)}),Li=_r(function(t,n,r,e){Bn(t,n,r,e)}),Mi=Me(function(t,n){return null==t?{}:(n=l(sn(n,1),fe),Cn(t,fn(gn(t,fu,Bo),n)))}),Ci=Me(function(t,n){ -return null==t?{}:Cn(t,l(sn(n,1),fe))}),zi=Mr(iu),Ui=Mr(fu),$i=br(function(t,n,r){return n=n.toLowerCase(),t+(r?au(n):n)}),Di=br(function(t,n,r){return t+(r?"-":"")+n.toLowerCase()}),Fi=br(function(t,n,r){return t+(r?" ":"")+n.toLowerCase()}),Ni=yr("toLowerCase"),Pi=br(function(t,n,r){return t+(r?"_":"")+n.toLowerCase()}),Zi=br(function(t,n,r){return t+(r?" ":"")+qi(n)}),Ti=br(function(t,n,r){return t+(r?" ":"")+n.toUpperCase()}),qi=yr("toUpperCase"),Vi=Me(function(t,n){try{return r(t,T,n)}catch(e){ -return De(e)?e:new ju(e)}}),Ki=Me(function(t,n){return u(sn(n,1),function(n){n=fe(n),t[n]=ci(t[n],t)}),t}),Gi=mr(),Ji=mr(true),Yi=Me(function(t,n){return function(r){return wn(r,t,n)}}),Hi=Me(function(t,n){return function(r){return wn(t,r,n)}}),Qi=Er(l),Xi=Er(i),tf=Er(_),nf=Rr(),rf=Rr(true),ef=kr(function(t,n){return t+n}),uf=Lr("ceil"),of=kr(function(t,n){return t/n}),ff=Lr("floor"),cf=kr(function(t,n){return t*n}),af=Lr("round"),lf=kr(function(t,n){return t-n});return Ot.after=function(t,n){if(typeof n!="function")throw new Au("Expected a function"); -return t=Xe(t),function(){return 1>--t?n.apply(this,arguments):void 0}},Ot.ary=Se,Ot.assign=wi,Ot.assignIn=mi,Ot.assignInWith=Ai,Ot.assignWith=Oi,Ot.at=ki,Ot.before=Ie,Ot.bind=ci,Ot.bindAll=Ki,Ot.bindKey=ai,Ot.castArray=function(){if(!arguments.length)return[];var t=arguments[0];return yi(t)?t:[t]},Ot.chain=xe,Ot.chunk=function(t,n,r){if(n=(r?te(t,n,r):n===T)?1:Xu(Xe(n),0),r=t?t.length:0,!r||1>n)return[];for(var e=0,u=0,o=Array(Ku(r/n));r>e;)o[u++]=Tn(t,e,e+=n);return o},Ot.compact=function(t){for(var n=-1,r=t?t.length:0,e=0,u=[];++nr&&(r=-r>u?0:u+r),e=e===T||e>u?u:Xe(e),0>e&&(e+=u),e=r>e?0:tu(e);e>r;)t[r++]=n;return t},Ot.filter=function(t,n){return(yi(t)?f:ln)(t,Fr(n,3))},Ot.flatMap=function(t,n){return sn(Oe(t,n),1)},Ot.flatMapDeep=function(t,n){return sn(Oe(t,n),q)},Ot.flatMapDepth=function(t,n,r){return r=r===T?1:Xe(r),sn(Oe(t,n),r)},Ot.flatten=function(t){return t&&t.length?sn(t,1):[]},Ot.flattenDeep=function(t){return t&&t.length?sn(t,q):[]},Ot.flattenDepth=function(t,n){return t&&t.length?(n=n===T?1:Xe(n),sn(t,n)):[]; -},Ot.flip=function(t){return Cr(t,512)},Ot.flow=Gi,Ot.flowRight=Ji,Ot.fromPairs=function(t){for(var n=-1,r=t?t.length:0,e={};++n>>0,r?(t=eu(t))&&(typeof n=="string"||null!=n&&!Ke(n))&&(n=Yn(n),""==n&&Wt.test(t))?ur(t.match(It),0,r):oo.call(t,n,r):[]},Ot.spread=function(t,n){if(typeof t!="function")throw new Au("Expected a function");return n=n===T?0:Xu(Xe(n),0),Me(function(e){var u=e[n];return e=ur(e,0,n),u&&s(e,u),r(t,this,e)})},Ot.tail=function(t){return le(t,1)},Ot.take=function(t,n,r){return t&&t.length?(n=r||n===T?1:Xe(n), -Tn(t,0,0>n?0:n)):[]},Ot.takeRight=function(t,n,r){var e=t?t.length:0;return e?(n=r||n===T?1:Xe(n),n=e-n,Tn(t,0>n?0:n,e)):[]},Ot.takeRightWhile=function(t,n){return t&&t.length?Qn(t,Fr(n,3),false,true):[]},Ot.takeWhile=function(t,n){return t&&t.length?Qn(t,Fr(n,3)):[]},Ot.tap=function(t,n){return n(t),t},Ot.throttle=function(t,n,r){var e=true,u=true;if(typeof t!="function")throw new Au("Expected a function");return Ze(r)&&(e="leading"in r?!!r.leading:e,u="trailing"in r?!!r.trailing:u),Be(t,n,{leading:e,maxWait:n, -trailing:u})},Ot.thru=je,Ot.toArray=He,Ot.toPairs=zi,Ot.toPairsIn=Ui,Ot.toPath=function(t){return yi(t)?l(t,fe):Je(t)?[t]:lr(Co(t))},Ot.toPlainObject=ru,Ot.transform=function(t,n,r){var e=yi(t)||Ye(t);if(n=Fr(n,4),null==r)if(e||Ze(t)){var o=t.constructor;r=e?yi(t)?new o:[]:Fe(o)?un(Ju(Object(t))):{}}else r={};return(e?u:hn)(t,function(t,e,u){return n(r,t,e,u)}),r},Ot.unary=function(t){return Se(t,1)},Ot.union=To,Ot.unionBy=qo,Ot.unionWith=Vo,Ot.uniq=function(t){return t&&t.length?Hn(t):[]},Ot.uniqBy=function(t,n){ -return t&&t.length?Hn(t,Fr(n)):[]},Ot.uniqWith=function(t,n){return t&&t.length?Hn(t,T,n):[]},Ot.unset=function(t,n){var r;if(null==t)r=true;else{r=t;var e=n,e=ne(e,r)?[e]:er(e);r=ie(r,e),e=fe(ve(e)),r=!(null!=r&&yn(r,e))||delete r[e]}return r},Ot.unzip=ye,Ot.unzipWith=be,Ot.update=function(t,n,r){return null==t?t:Zn(t,n,(typeof r=="function"?r:pu)(vn(t,n)),void 0)},Ot.updateWith=function(t,n,r,e){return e=typeof e=="function"?e:T,null!=t&&(t=Zn(t,n,(typeof r=="function"?r:pu)(vn(t,n)),e)),t},Ot.values=cu, -Ot.valuesIn=function(t){return null==t?[]:k(t,fu(t))},Ot.without=Ko,Ot.words=su,Ot.wrap=function(t,n){return n=null==n?pu:n,pi(n,t)},Ot.xor=Go,Ot.xorBy=Jo,Ot.xorWith=Yo,Ot.zip=Ho,Ot.zipObject=function(t,n){return nr(t||[],n||[],Yt)},Ot.zipObjectDeep=function(t,n){return nr(t||[],n||[],Zn)},Ot.zipWith=Qo,Ot.entries=zi,Ot.entriesIn=Ui,Ot.extend=mi,Ot.extendWith=Ai,vu(Ot,Ot),Ot.add=ef,Ot.attempt=Vi,Ot.camelCase=$i,Ot.capitalize=au,Ot.ceil=uf,Ot.clamp=function(t,n,r){return r===T&&(r=n,n=T),r!==T&&(r=nu(r), -r=r===r?r:0),n!==T&&(n=nu(n),n=n===n?n:0),nn(nu(t),n,r)},Ot.clone=function(t){return rn(t,false,true)},Ot.cloneDeep=function(t){return rn(t,true,true)},Ot.cloneDeepWith=function(t,n){return rn(t,true,true,n)},Ot.cloneWith=function(t,n){return rn(t,false,true,n)},Ot.deburr=lu,Ot.divide=of,Ot.endsWith=function(t,n,r){t=eu(t),n=Yn(n);var e=t.length;return r=r===T?e:nn(Xe(r),0,e),r-=n.length,r>=0&&t.indexOf(n,r)==r},Ot.eq=Ce,Ot.escape=function(t){return(t=eu(t))&&X.test(t)?t.replace(H,B):t},Ot.escapeRegExp=function(t){ -return(t=eu(t))&&ft.test(t)?t.replace(it,"\\$&"):t},Ot.every=function(t,n,r){var e=yi(t)?i:cn;return r&&te(t,n,r)&&(n=T),e(t,Fr(n,3))},Ot.find=ni,Ot.findIndex=he,Ot.findKey=function(t,n){return v(t,Fr(n,3),hn)},Ot.findLast=ri,Ot.findLastIndex=pe,Ot.findLastKey=function(t,n){return v(t,Fr(n,3),pn)},Ot.floor=ff,Ot.forEach=me,Ot.forEachRight=Ae,Ot.forIn=function(t,n){return null==t?t:ko(t,Fr(n,3),fu)},Ot.forInRight=function(t,n){return null==t?t:Eo(t,Fr(n,3),fu)},Ot.forOwn=function(t,n){return t&&hn(t,Fr(n,3)); -},Ot.forOwnRight=function(t,n){return t&&pn(t,Fr(n,3))},Ot.get=uu,Ot.gt=gi,Ot.gte=di,Ot.has=function(t,n){return null!=t&&Vr(t,n,yn)},Ot.hasIn=ou,Ot.head=_e,Ot.identity=pu,Ot.includes=function(t,n,r,e){return t=Ue(t)?t:cu(t),r=r&&!e?Xe(r):0,e=t.length,0>r&&(r=Xu(e+r,0)),Ge(t)?e>=r&&-1r&&(r=Xu(e+r,0)),d(t,n,r)):-1},Ot.inRange=function(t,n,r){return n=nu(n)||0,r===T?(r=n,n=0):r=nu(r)||0,t=nu(t), -t>=to(n,r)&&t=-9007199254740991&&9007199254740991>=t; -},Ot.isSet=function(t){return Te(t)&&"[object Set]"==qr(t)},Ot.isString=Ge,Ot.isSymbol=Je,Ot.isTypedArray=Ye,Ot.isUndefined=function(t){return t===T},Ot.isWeakMap=function(t){return Te(t)&&"[object WeakMap]"==qr(t)},Ot.isWeakSet=function(t){return Te(t)&&"[object WeakSet]"==Mu.call(t)},Ot.join=function(t,n){return t?Hu.call(t,n):""},Ot.kebabCase=Di,Ot.last=ve,Ot.lastIndexOf=function(t,n,r){var e=t?t.length:0;if(!e)return-1;var u=e;if(r!==T&&(u=Xe(r),u=(0>u?Xu(e+u,0):to(u,e-1))+1),n!==n)return M(t,u-1,true); -for(;u--;)if(t[u]===n)return u;return-1},Ot.lowerCase=Fi,Ot.lowerFirst=Ni,Ot.lt=xi,Ot.lte=ji,Ot.max=function(t){return t&&t.length?an(t,pu,dn):T},Ot.maxBy=function(t,n){return t&&t.length?an(t,Fr(n),dn):T},Ot.mean=function(t){return b(t,pu)},Ot.meanBy=function(t,n){return b(t,Fr(n))},Ot.min=function(t){return t&&t.length?an(t,pu,Sn):T},Ot.minBy=function(t,n){return t&&t.length?an(t,Fr(n),Sn):T},Ot.stubArray=yu,Ot.stubFalse=bu,Ot.stubObject=function(){return{}},Ot.stubString=function(){return""},Ot.stubTrue=function(){ -return true},Ot.multiply=cf,Ot.nth=function(t,n){return t&&t.length?Ln(t,Xe(n)):T},Ot.noConflict=function(){return Kt._===this&&(Kt._=Cu),this},Ot.noop=gu,Ot.now=Ee,Ot.pad=function(t,n,r){t=eu(t);var e=(n=Xe(n))?N(t):0;return!n||e>=n?t:(n=(n-e)/2,Sr(Gu(n),r)+t+Sr(Ku(n),r))},Ot.padEnd=function(t,n,r){t=eu(t);var e=(n=Xe(n))?N(t):0;return n&&n>e?t+Sr(n-e,r):t},Ot.padStart=function(t,n,r){t=eu(t);var e=(n=Xe(n))?N(t):0;return n&&n>e?Sr(n-e,r)+t:t},Ot.parseInt=function(t,n,r){return r||null==n?n=0:n&&(n=+n), -t=eu(t).replace(ct,""),no(t,n||(vt.test(t)?16:10))},Ot.random=function(t,n,r){if(r&&typeof r!="boolean"&&te(t,n,r)&&(n=r=T),r===T&&(typeof n=="boolean"?(r=n,n=T):typeof t=="boolean"&&(r=t,t=T)),t===T&&n===T?(t=0,n=1):(t=nu(t)||0,n===T?(n=t,t=0):n=nu(n)||0),t>n){var e=t;t=n,n=e}return r||t%1||n%1?(r=ro(),to(t+r*(n-t+Ft("1e-"+((r+"").length-1))),n)):Nn(t,n)},Ot.reduce=function(t,n,r){var e=yi(t)?h:x,u=3>arguments.length;return e(t,Fr(n,4),r,u,Ao)},Ot.reduceRight=function(t,n,r){var e=yi(t)?p:x,u=3>arguments.length; -return e(t,Fr(n,4),r,u,Oo)},Ot.repeat=function(t,n,r){return n=(r?te(t,n,r):n===T)?1:Xe(n),Pn(eu(t),n)},Ot.replace=function(){var t=arguments,n=eu(t[0]);return 3>t.length?n:eo.call(n,t[1],t[2])},Ot.result=function(t,n,r){n=ne(n,t)?[n]:er(n);var e=-1,u=n.length;for(u||(t=T,u=1);++e0?t[Nn(0,n-1)]:T},Ot.size=function(t){if(null==t)return 0; -if(Ue(t)){var n=t.length;return n&&Ge(t)?N(t):n}return Te(t)&&(n=qr(t),"[object Map]"==n||"[object Set]"==n)?t.size:iu(t).length},Ot.snakeCase=Pi,Ot.some=function(t,n,r){var e=yi(t)?_:qn;return r&&te(t,n,r)&&(n=T),e(t,Fr(n,3))},Ot.sortedIndex=function(t,n){return Vn(t,n)},Ot.sortedIndexBy=function(t,n,r){return Kn(t,n,Fr(r))},Ot.sortedIndexOf=function(t,n){var r=t?t.length:0;if(r){var e=Vn(t,n);if(r>e&&Ce(t[e],n))return e}return-1},Ot.sortedLastIndex=function(t,n){return Vn(t,n,true)},Ot.sortedLastIndexBy=function(t,n,r){ -return Kn(t,n,Fr(r),true)},Ot.sortedLastIndexOf=function(t,n){if(t&&t.length){var r=Vn(t,n,true)-1;if(Ce(t[r],n))return r}return-1},Ot.startCase=Zi,Ot.startsWith=function(t,n,r){return t=eu(t),r=nn(Xe(r),0,t.length),t.lastIndexOf(Yn(n),r)==r},Ot.subtract=lf,Ot.sum=function(t){return t&&t.length?w(t,pu):0},Ot.sumBy=function(t,n){return t&&t.length?w(t,Fr(n)):0},Ot.template=function(t,n,r){var e=Ot.templateSettings;r&&te(t,n,r)&&(n=T),t=eu(t),n=Ai({},n,e,Vt),r=Ai({},n.imports,e.imports,Vt);var u,o,i=iu(r),f=k(r,i),c=0; -r=n.interpolate||wt;var a="__p+='";r=mu((n.escape||wt).source+"|"+r.source+"|"+(r===rt?pt:wt).source+"|"+(n.evaluate||wt).source+"|$","g");var l="sourceURL"in n?"//# sourceURL="+n.sourceURL+"\n":"";if(t.replace(r,function(n,r,e,i,f,l){return e||(e=i),a+=t.slice(c,l).replace(mt,L),r&&(u=true,a+="'+__e("+r+")+'"),f&&(o=true,a+="';"+f+";\n__p+='"),e&&(a+="'+((__t=("+e+"))==null?'':__t)+'"),c=l+n.length,n}),a+="';",(n=n.variable)||(a="with(obj){"+a+"}"),a=(o?a.replace(K,""):a).replace(G,"$1").replace(J,"$1;"), -a="function("+(n||"obj")+"){"+(n?"":"obj||(obj={});")+"var __t,__p=''"+(u?",__e=_.escape":"")+(o?",__j=Array.prototype.join;function print(){__p+=__j.call(arguments,'')}":";")+a+"return __p}",n=Vi(function(){return Function(i,l+"return "+a).apply(T,f)}),n.source=a,De(n))throw n;return n},Ot.times=function(t,n){if(t=Xe(t),1>t||t>9007199254740991)return[];var r=4294967295,e=to(t,4294967295);for(n=Fr(n),t-=4294967295,e=m(e,n);++r=o)return t;if(o=r-N(e),1>o)return e;if(r=i?ur(i,0,o).join(""):t.slice(0,o),u===T)return r+e;if(i&&(o+=r.length-o),Ke(u)){if(t.slice(o).search(u)){var f=r;for(u.global||(u=mu(u.source,eu(_t.exec(u))+"g")), -u.lastIndex=0;i=u.exec(f);)var c=i.index;r=r.slice(0,c===T?o:c)}}else t.indexOf(Yn(u),o)!=o&&(u=r.lastIndexOf(u),u>-1&&(r=r.slice(0,u)));return r+e},Ot.unescape=function(t){return(t=eu(t))&&Q.test(t)?t.replace(Y,P):t},Ot.uniqueId=function(t){var n=++Bu;return eu(t)+n},Ot.upperCase=Ti,Ot.upperFirst=qi,Ot.each=me,Ot.eachRight=Ae,Ot.first=_e,vu(Ot,function(){var t={};return hn(Ot,function(n,r){Wu.call(Ot.prototype,r)||(t[r]=n)}),t}(),{chain:false}),Ot.VERSION="4.13.1",u("bind bindKey curry curryRight partial partialRight".split(" "),function(t){ -Ot[t].placeholder=Ot}),u(["drop","take"],function(t,n){Ut.prototype[t]=function(r){var e=this.__filtered__;if(e&&!n)return new Ut(this);r=r===T?1:Xu(Xe(r),0);var u=this.clone();return e?u.__takeCount__=to(r,u.__takeCount__):u.__views__.push({size:to(r,4294967295),type:t+(0>u.__dir__?"Right":"")}),u},Ut.prototype[t+"Right"]=function(n){return this.reverse()[t](n).reverse()}}),u(["filter","map","takeWhile"],function(t,n){var r=n+1,e=1==r||3==r;Ut.prototype[t]=function(t){var n=this.clone();return n.__iteratees__.push({ -iteratee:Fr(t,3),type:r}),n.__filtered__=n.__filtered__||e,n}}),u(["head","last"],function(t,n){var r="take"+(n?"Right":"");Ut.prototype[t]=function(){return this[r](1).value()[0]}}),u(["initial","tail"],function(t,n){var r="drop"+(n?"":"Right");Ut.prototype[t]=function(){return this.__filtered__?new Ut(this):this[r](1)}}),Ut.prototype.compact=function(){return this.filter(pu)},Ut.prototype.find=function(t){return this.filter(t).head()},Ut.prototype.findLast=function(t){return this.reverse().find(t); -},Ut.prototype.invokeMap=Me(function(t,n){return typeof t=="function"?new Ut(this):this.map(function(r){return wn(r,t,n)})}),Ut.prototype.reject=function(t){return t=Fr(t,3),this.filter(function(n){return!t(n)})},Ut.prototype.slice=function(t,n){t=Xe(t);var r=this;return r.__filtered__&&(t>0||0>n)?new Ut(r):(0>t?r=r.takeRight(-t):t&&(r=r.drop(t)),n!==T&&(n=Xe(n),r=0>n?r.dropRight(-n):r.take(n-t)),r)},Ut.prototype.takeRightWhile=function(t){return this.reverse().takeWhile(t).reverse()},Ut.prototype.toArray=function(){ -return this.take(4294967295)},hn(Ut.prototype,function(t,n){var r=/^(?:filter|find|map|reject)|While$/.test(n),e=/^(?:head|last)$/.test(n),u=Ot[e?"take"+("last"==n?"Right":""):n],o=e||/^find/.test(n);u&&(Ot.prototype[n]=function(){function n(t){return t=u.apply(Ot,s([t],f)),e&&h?t[0]:t}var i=this.__wrapped__,f=e?[1]:arguments,c=i instanceof Ut,a=f[0],l=c||yi(i);l&&r&&typeof a=="function"&&1!=a.length&&(c=l=false);var h=this.__chain__,p=!!this.__actions__.length,a=o&&!h,c=c&&!p;return!o&&l?(i=c?i:new Ut(this), -i=t.apply(i,f),i.__actions__.push({func:je,args:[n],thisArg:T}),new zt(i,h)):a&&c?t.apply(this,f):(i=this.thru(n),a?e?i.value()[0]:i.value():i)})}),u("pop push shift sort splice unshift".split(" "),function(t){var n=Ou[t],r=/^(?:push|sort|unshift)$/.test(t)?"tap":"thru",e=/^(?:pop|shift)$/.test(t);Ot.prototype[t]=function(){var t=arguments;if(e&&!this.__chain__){var u=this.value();return n.apply(yi(u)?u:[],t)}return this[r](function(r){return n.apply(yi(r)?r:[],t)})}}),hn(Ut.prototype,function(t,n){ -var r=Ot[n];if(r){var e=r.name+"";(_o[e]||(_o[e]=[])).push({name:n,func:r})}}),_o[Ar(T,2).name]=[{name:"wrapper",func:T}],Ut.prototype.clone=function(){var t=new Ut(this.__wrapped__);return t.__actions__=lr(this.__actions__),t.__dir__=this.__dir__,t.__filtered__=this.__filtered__,t.__iteratees__=lr(this.__iteratees__),t.__takeCount__=this.__takeCount__,t.__views__=lr(this.__views__),t},Ut.prototype.reverse=function(){if(this.__filtered__){var t=new Ut(this);t.__dir__=-1,t.__filtered__=true}else t=this.clone(), -t.__dir__*=-1;return t},Ut.prototype.value=function(){var t,n=this.__wrapped__.value(),r=this.__dir__,e=yi(n),u=0>r,o=e?n.length:0;t=o;for(var i=this.__views__,f=0,c=-1,a=i.length;++co||o==t&&a==t)return Xn(n,this.__actions__);e=[]; -t:for(;t--&&a>c;){for(u+=r,o=-1,l=n[u];++o=this.__values__.length,n=t?T:this.__values__[this.__index__++];return{done:t,value:n}},Ot.prototype.plant=function(t){ -for(var n,r=this;r instanceof kt;){var e=ae(r);e.__index__=0,e.__values__=T,n?u.__wrapped__=e:n=e;var u=e,r=r.__wrapped__}return u.__wrapped__=t,n},Ot.prototype.reverse=function(){var t=this.__wrapped__;return t instanceof Ut?(this.__actions__.length&&(t=new Ut(this)),t=t.reverse(),t.__actions__.push({func:je,args:[de],thisArg:T}),new zt(t,this.__chain__)):this.thru(de)},Ot.prototype.toJSON=Ot.prototype.valueOf=Ot.prototype.value=function(){return Xn(this.__wrapped__,this.__actions__)},Zu&&(Ot.prototype[Zu]=we), -Ot}var T,q=1/0,V=NaN,K=/\b__p\+='';/g,G=/\b(__p\+=)''\+/g,J=/(__e\(.*?\)|\b__t\))\+'';/g,Y=/&(?:amp|lt|gt|quot|#39|#96);/g,H=/[&<>"'`]/g,Q=RegExp(Y.source),X=RegExp(H.source),tt=/<%-([\s\S]+?)%>/g,nt=/<%([\s\S]+?)%>/g,rt=/<%=([\s\S]+?)%>/g,et=/\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/,ut=/^\w*$/,ot=/[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(\.|\[\])(?:\4|$))/g,it=/[\\^$.*+?()[\]{}|]/g,ft=RegExp(it.source),ct=/^\s+|\s+$/g,at=/^\s+/,lt=/\s+$/,st=/[a-zA-Z0-9]+/g,ht=/\\(\\)?/g,pt=/\$\{([^\\}]*(?:\\.[^\\}]*)*)\}/g,_t=/\w*$/,vt=/^0x/i,gt=/^[-+]0x[0-9a-f]+$/i,dt=/^0b[01]+$/i,yt=/^\[object .+?Constructor\]$/,bt=/^0o[0-7]+$/i,xt=/^(?:0|[1-9]\d*)$/,jt=/[\xc0-\xd6\xd8-\xde\xdf-\xf6\xf8-\xff]/g,wt=/($^)/,mt=/['\n\r\u2028\u2029\\]/g,At="[\\ufe0e\\ufe0f]?(?:[\\u0300-\\u036f\\ufe20-\\ufe23\\u20d0-\\u20f0]|\\ud83c[\\udffb-\\udfff])?(?:\\u200d(?:[^\\ud800-\\udfff]|(?:\\ud83c[\\udde6-\\uddff]){2}|[\\ud800-\\udbff][\\udc00-\\udfff])[\\ufe0e\\ufe0f]?(?:[\\u0300-\\u036f\\ufe20-\\ufe23\\u20d0-\\u20f0]|\\ud83c[\\udffb-\\udfff])?)*",Ot="(?:[\\u2700-\\u27bf]|(?:\\ud83c[\\udde6-\\uddff]){2}|[\\ud800-\\udbff][\\udc00-\\udfff])"+At,kt="(?:[^\\ud800-\\udfff][\\u0300-\\u036f\\ufe20-\\ufe23\\u20d0-\\u20f0]?|[\\u0300-\\u036f\\ufe20-\\ufe23\\u20d0-\\u20f0]|(?:\\ud83c[\\udde6-\\uddff]){2}|[\\ud800-\\udbff][\\udc00-\\udfff]|[\\ud800-\\udfff])",Et=RegExp("['\u2019]","g"),St=RegExp("[\\u0300-\\u036f\\ufe20-\\ufe23\\u20d0-\\u20f0]","g"),It=RegExp("\\ud83c[\\udffb-\\udfff](?=\\ud83c[\\udffb-\\udfff])|"+kt+At,"g"),Rt=RegExp(["[A-Z\\xc0-\\xd6\\xd8-\\xde]?[a-z\\xdf-\\xf6\\xf8-\\xff]+(?:['\u2019](?:d|ll|m|re|s|t|ve))?(?=[\\xac\\xb1\\xd7\\xf7\\x00-\\x2f\\x3a-\\x40\\x5b-\\x60\\x7b-\\xbf\\u2000-\\u206f \\t\\x0b\\f\\xa0\\ufeff\\n\\r\\u2028\\u2029\\u1680\\u180e\\u2000\\u2001\\u2002\\u2003\\u2004\\u2005\\u2006\\u2007\\u2008\\u2009\\u200a\\u202f\\u205f\\u3000]|[A-Z\\xc0-\\xd6\\xd8-\\xde]|$)|(?:[A-Z\\xc0-\\xd6\\xd8-\\xde]|[^\\ud800-\\udfff\\xac\\xb1\\xd7\\xf7\\x00-\\x2f\\x3a-\\x40\\x5b-\\x60\\x7b-\\xbf\\u2000-\\u206f \\t\\x0b\\f\\xa0\\ufeff\\n\\r\\u2028\\u2029\\u1680\\u180e\\u2000\\u2001\\u2002\\u2003\\u2004\\u2005\\u2006\\u2007\\u2008\\u2009\\u200a\\u202f\\u205f\\u3000\\d+\\u2700-\\u27bfa-z\\xdf-\\xf6\\xf8-\\xffA-Z\\xc0-\\xd6\\xd8-\\xde])+(?:['\u2019](?:D|LL|M|RE|S|T|VE))?(?=[\\xac\\xb1\\xd7\\xf7\\x00-\\x2f\\x3a-\\x40\\x5b-\\x60\\x7b-\\xbf\\u2000-\\u206f \\t\\x0b\\f\\xa0\\ufeff\\n\\r\\u2028\\u2029\\u1680\\u180e\\u2000\\u2001\\u2002\\u2003\\u2004\\u2005\\u2006\\u2007\\u2008\\u2009\\u200a\\u202f\\u205f\\u3000]|[A-Z\\xc0-\\xd6\\xd8-\\xde](?:[a-z\\xdf-\\xf6\\xf8-\\xff]|[^\\ud800-\\udfff\\xac\\xb1\\xd7\\xf7\\x00-\\x2f\\x3a-\\x40\\x5b-\\x60\\x7b-\\xbf\\u2000-\\u206f \\t\\x0b\\f\\xa0\\ufeff\\n\\r\\u2028\\u2029\\u1680\\u180e\\u2000\\u2001\\u2002\\u2003\\u2004\\u2005\\u2006\\u2007\\u2008\\u2009\\u200a\\u202f\\u205f\\u3000\\d+\\u2700-\\u27bfa-z\\xdf-\\xf6\\xf8-\\xffA-Z\\xc0-\\xd6\\xd8-\\xde])|$)|[A-Z\\xc0-\\xd6\\xd8-\\xde]?(?:[a-z\\xdf-\\xf6\\xf8-\\xff]|[^\\ud800-\\udfff\\xac\\xb1\\xd7\\xf7\\x00-\\x2f\\x3a-\\x40\\x5b-\\x60\\x7b-\\xbf\\u2000-\\u206f \\t\\x0b\\f\\xa0\\ufeff\\n\\r\\u2028\\u2029\\u1680\\u180e\\u2000\\u2001\\u2002\\u2003\\u2004\\u2005\\u2006\\u2007\\u2008\\u2009\\u200a\\u202f\\u205f\\u3000\\d+\\u2700-\\u27bfa-z\\xdf-\\xf6\\xf8-\\xffA-Z\\xc0-\\xd6\\xd8-\\xde])+(?:['\u2019](?:d|ll|m|re|s|t|ve))?|[A-Z\\xc0-\\xd6\\xd8-\\xde]+(?:['\u2019](?:D|LL|M|RE|S|T|VE))?|\\d+",Ot].join("|"),"g"),Wt=RegExp("[\\u200d\\ud800-\\udfff\\u0300-\\u036f\\ufe20-\\ufe23\\u20d0-\\u20f0\\ufe0e\\ufe0f]"),Bt=/[a-z][A-Z]|[A-Z]{2,}[a-z]|[0-9][a-zA-Z]|[a-zA-Z][0-9]|[^a-zA-Z0-9 ]/,Lt="Array Buffer DataView Date Error Float32Array Float64Array Function Int8Array Int16Array Int32Array Map Math Object Promise Reflect RegExp Set String Symbol TypeError Uint8Array Uint8ClampedArray Uint16Array Uint32Array WeakMap _ isFinite parseInt setTimeout".split(" "),Mt={}; -Mt["[object Float32Array]"]=Mt["[object Float64Array]"]=Mt["[object Int8Array]"]=Mt["[object Int16Array]"]=Mt["[object Int32Array]"]=Mt["[object Uint8Array]"]=Mt["[object Uint8ClampedArray]"]=Mt["[object Uint16Array]"]=Mt["[object Uint32Array]"]=true,Mt["[object Arguments]"]=Mt["[object Array]"]=Mt["[object ArrayBuffer]"]=Mt["[object Boolean]"]=Mt["[object DataView]"]=Mt["[object Date]"]=Mt["[object Error]"]=Mt["[object Function]"]=Mt["[object Map]"]=Mt["[object Number]"]=Mt["[object Object]"]=Mt["[object RegExp]"]=Mt["[object Set]"]=Mt["[object String]"]=Mt["[object WeakMap]"]=false; -var Ct={};Ct["[object Arguments]"]=Ct["[object Array]"]=Ct["[object ArrayBuffer]"]=Ct["[object DataView]"]=Ct["[object Boolean]"]=Ct["[object Date]"]=Ct["[object Float32Array]"]=Ct["[object Float64Array]"]=Ct["[object Int8Array]"]=Ct["[object Int16Array]"]=Ct["[object Int32Array]"]=Ct["[object Map]"]=Ct["[object Number]"]=Ct["[object Object]"]=Ct["[object RegExp]"]=Ct["[object Set]"]=Ct["[object String]"]=Ct["[object Symbol]"]=Ct["[object Uint8Array]"]=Ct["[object Uint8ClampedArray]"]=Ct["[object Uint16Array]"]=Ct["[object Uint32Array]"]=true, -Ct["[object Error]"]=Ct["[object Function]"]=Ct["[object WeakMap]"]=false;var zt={"\xc0":"A","\xc1":"A","\xc2":"A","\xc3":"A","\xc4":"A","\xc5":"A","\xe0":"a","\xe1":"a","\xe2":"a","\xe3":"a","\xe4":"a","\xe5":"a","\xc7":"C","\xe7":"c","\xd0":"D","\xf0":"d","\xc8":"E","\xc9":"E","\xca":"E","\xcb":"E","\xe8":"e","\xe9":"e","\xea":"e","\xeb":"e","\xcc":"I","\xcd":"I","\xce":"I","\xcf":"I","\xec":"i","\xed":"i","\xee":"i","\xef":"i","\xd1":"N","\xf1":"n","\xd2":"O","\xd3":"O","\xd4":"O","\xd5":"O","\xd6":"O", -"\xd8":"O","\xf2":"o","\xf3":"o","\xf4":"o","\xf5":"o","\xf6":"o","\xf8":"o","\xd9":"U","\xda":"U","\xdb":"U","\xdc":"U","\xf9":"u","\xfa":"u","\xfb":"u","\xfc":"u","\xdd":"Y","\xfd":"y","\xff":"y","\xc6":"Ae","\xe6":"ae","\xde":"Th","\xfe":"th","\xdf":"ss"},Ut={"&":"&","<":"<",">":">",'"':""","'":"'","`":"`"},$t={"&":"&","<":"<",">":">",""":'"',"'":"'","`":"`"},Dt={"\\":"\\","'":"'","\n":"n","\r":"r","\u2028":"u2028","\u2029":"u2029"},Ft=parseFloat,Nt=parseInt,Pt=typeof exports=="object"&&exports,Zt=Pt&&typeof module=="object"&&module,Tt=Zt&&Zt.exports===Pt,qt=R(typeof self=="object"&&self),Vt=R(typeof this=="object"&&this),Kt=R(typeof global=="object"&&global)||qt||Vt||Function("return this")(),Gt=Z(); -(qt||{})._=Gt,typeof define=="function"&&typeof define.amd=="object"&&define.amd? define(function(){return Gt}):Zt?((Zt.exports=Gt)._=Gt,Pt._=Gt):Kt._=Gt}).call(this); diff --git a/static/js/lib/loglevel.min.js b/static/js/lib/loglevel.min.js deleted file mode 100644 index 404c43f646..0000000000 --- a/static/js/lib/loglevel.min.js +++ /dev/null @@ -1,2 +0,0 @@ -/*! loglevel - v1.4.0 - https://github.com/pimterry/loglevel - (c) 2015 Tim Perry - licensed MIT */ -!function(a,b){"use strict";"object"==typeof module&&module.exports&&"function"==typeof require?module.exports=b():"function"==typeof define&&"object"==typeof define.amd?define(b):a.log=b()}(this,function(){"use strict";function a(a){return typeof console===h?!1:void 0!==console[a]?b(console,a):void 0!==console.log?b(console,"log"):g}function b(a,b){var c=a[b];if("function"==typeof c.bind)return c.bind(a);try{return Function.prototype.bind.call(c,a)}catch(d){return function(){return Function.prototype.apply.apply(c,[a,arguments])}}}function c(a,b,c){return function(){typeof console!==h&&(d.call(this,b,c),this[a].apply(this,arguments))}}function d(a,b){for(var c=0;cc?g:this.methodFactory(d,a,b)}}function e(b,d,e){return a(b)||c.apply(this,arguments)}function f(a,b,c){function f(a){var b=(i[a]||"silent").toUpperCase();try{return void(window.localStorage[l]=b)}catch(c){}try{window.document.cookie=encodeURIComponent(l)+"="+b+";"}catch(c){}}function g(){var a;try{a=window.localStorage[l]}catch(b){}if(typeof a===h)try{var c=window.document.cookie,d=c.indexOf(encodeURIComponent(l)+"=");d&&(a=/^([^;]+)/.exec(c.slice(d))[1])}catch(b){}return void 0===k.levels[a]&&(a=void 0),a}var j,k=this,l="loglevel";a&&(l+=":"+a),k.levels={TRACE:0,DEBUG:1,INFO:2,WARN:3,ERROR:4,SILENT:5},k.methodFactory=c||e,k.getLevel=function(){return j},k.setLevel=function(b,c){if("string"==typeof b&&void 0!==k.levels[b.toUpperCase()]&&(b=k.levels[b.toUpperCase()]),!("number"==typeof b&&b>=0&&b<=k.levels.SILENT))throw"log.setLevel() called with invalid level: "+b;return j=b,c!==!1&&f(b),d.call(k,b,a),typeof console===h&&b= 0) { - return this.settings.strings.inPast; - } - - var seconds = Math.abs(distanceMillis) / 1000; - var minutes = seconds / 60; - var hours = minutes / 60; - var days = hours / 24; - var years = days / 365; - - function substitute(stringOrFunction, number) { - var string = $.isFunction(stringOrFunction) ? stringOrFunction(number, distanceMillis) : stringOrFunction; - var value = ($l.numbers && $l.numbers[number]) || number; - return string.replace(/%d/i, value); - } - - var words = seconds < 45 && substitute($l.seconds, Math.round(seconds)) || - seconds < 90 && substitute($l.minute, 1) || - minutes < 45 && substitute($l.minutes, Math.round(minutes)) || - minutes < 90 && substitute($l.hour, 1) || - hours < 24 && substitute($l.hours, Math.round(hours)) || - hours < 42 && substitute($l.day, 1) || - days < 30 && substitute($l.days, Math.round(days)) || - days < 45 && substitute($l.month, 1) || - days < 365 && substitute($l.months, Math.round(days / 30)) || - years < 1.5 && substitute($l.year, 1) || - substitute($l.years, Math.round(years)); - - var separator = $l.wordSeparator || ""; - if ($l.wordSeparator === undefined) { separator = " "; } - return $.trim([prefix, words, suffix].join(separator)); - }, - - parse: function(iso8601) { - var s = $.trim(iso8601); - s = s.replace(/\.\d+/,""); // remove milliseconds - s = s.replace(/-/,"/").replace(/-/,"/"); - s = s.replace(/T/," ").replace(/Z/," UTC"); - s = s.replace(/([\+\-]\d\d)\:?(\d\d)/," $1$2"); // -04:00 -> -0400 - s = s.replace(/([\+\-]\d\d)$/," $100"); // +09 -> +0900 - return new Date(s); - }, - datetime: function(elem) { - var iso8601 = $t.isTime(elem) ? $(elem).attr("datetime") : $(elem).attr("title"); - return $t.parse(iso8601); - }, - isTime: function(elem) { - // jQuery's `is()` doesn't play well with HTML5 in IE - return $(elem).get(0).tagName.toLowerCase() === "time"; // $(elem).is("time"); - } - }); - - // functions that can be called via $(el).timeago('action') - // init is default when no action is given - // functions are called with context of a single element - var functions = { - init: function() { - var refresh_el = $.proxy(refresh, this); - refresh_el(); - var $s = $t.settings; - if ($s.refreshMillis > 0) { - this._timeagoInterval = setInterval(refresh_el, $s.refreshMillis); - } - }, - update: function(timestamp) { - var date = (timestamp instanceof Date) ? timestamp : $t.parse(timestamp); - $(this).data('timeago', { datetime: date }); - if ($t.settings.localeTitle) { - $(this).attr("title", date.toLocaleString()); - } - refresh.apply(this); - }, - updateFromDOM: function() { - $(this).data('timeago', { datetime: $t.parse( $t.isTime(this) ? $(this).attr("datetime") : $(this).attr("title") ) }); - refresh.apply(this); - }, - dispose: function () { - if (this._timeagoInterval) { - window.clearInterval(this._timeagoInterval); - this._timeagoInterval = null; - } - } - }; - - $.fn.timeago = function(action, options) { - var fn = action ? functions[action] : functions.init; - if (!fn) { - throw new Error("Unknown function name '"+ action +"' for timeago"); - } - // each over objects here and call the requested function - this.each(function() { - fn.call(this, options); - }); - return this; - }; - - function refresh() { - var $s = $t.settings; - - //check if it's still visible - if ($s.autoDispose && !$.contains(document.documentElement,this)) { - //stop if it has been removed - $(this).timeago("dispose"); - return this; - } - - var data = prepareData(this); - - if (!isNaN(data.datetime)) { - if ( $s.cutoff === 0 || Math.abs(distance(data.datetime)) < $s.cutoff) { - $(this).text(inWords(data.datetime)); - } else { - if ($(this).attr('title').length > 0) { - $(this).text($(this).attr('title')); - } - } - } - return this; - } - - function prepareData(element) { - element = $(element); - if (!element.data("timeago")) { - element.data("timeago", { datetime: $t.datetime(element) }); - var text = $.trim(element.text()); - if ($t.settings.localeTitle) { - element.attr("title", element.data('timeago').datetime.toLocaleString()); - } else if (text.length > 0 && !($t.isTime(element) && element.attr("title"))) { - element.attr("title", text); - } - } - return element.data("timeago"); - } - - function inWords(date) { - return $t.inWords(distance(date)); - } - - function distance(date) { - return (new Date().getTime() - date.getTime()); - } - - // fix for IE6 suckage - document.createElement("abbr"); - document.createElement("time"); -})); diff --git a/static/js/manage/backlog-overview.js b/static/js/manage/backlog-overview.js index 3b5c262798..08b9fffc53 100644 --- a/static/js/manage/backlog-overview.js +++ b/static/js/manage/backlog-overview.js @@ -1,5 +1,4 @@ MEDUSA.manage.backlogOverview = function() { - checkForcedSearch(); function checkForcedSearch() { @@ -43,14 +42,14 @@ MEDUSA.manage.backlogOverview = function() { if (episodeStatus.indexOf('snatched') >= 0) { img.prop('src', 'images/yes16.png'); setTimeout(function() { - img.parent().parent().parent().remove() - }, 3000) + img.parent().parent().parent().remove(); + }, 3000); } else { img.prop('src', 'images/search16.png'); } } } - }); + }); } $('#pickShow').on('change', function() { @@ -60,7 +59,33 @@ MEDUSA.manage.backlogOverview = function() { } }); - $('.forceBacklog').on('click', function(){ + $('#backlog_period').on('change', function() { + api.patch('config/main', { + backlogOverview: { + period: $(this).val() + } + }).then(function(response) { + log.info(response); + window.location.reload(); + }).catch(function(err) { + log.error(err); + }); + }); + + $('#backlog_status').on('change', function() { + api.patch('config/main', { + backlogOverview: { + status: $(this).val() + } + }).then(function(response) { + log.info(response); + window.location.reload(); + }).catch(function(err) { + log.error(err); + }); + }); + + $('.forceBacklog').on('click', function() { $.get($(this).attr('href')); $(this).text('Searching...'); return false; @@ -78,8 +103,8 @@ MEDUSA.manage.backlogOverview = function() { if (data.result.toLowerCase() === 'success') { img.prop('src', 'images/yes16.png'); setTimeout(function() { - img.parent().parent().parent().remove() - }, 3000) + img.parent().parent().parent().remove(); + }, 3000); } else { img.prop('src', 'images/no16.png'); } diff --git a/static/js/manage/manage-searches.js b/static/js/manage/manage-searches.js new file mode 100644 index 0000000000..21a27af096 --- /dev/null +++ b/static/js/manage/manage-searches.js @@ -0,0 +1,98 @@ +MEDUSA.manage.manageSearches = function() { + /** + * Get total number current scene exceptions per source. Will request medusa, xem and anidb name exceptions. + * @param exceptions - A list of exception types with their last_updates. + */ + var updateExceptionTable = function(exceptions) { + var status = $('#sceneExceptionStatus'); + + var medusaException = exceptions.data.filter(function(obj) { + return obj.id === 'local'; + }); + var cusExceptionDate = new Date(medusaException[0].lastRefresh * 1000).toLocaleDateString(); + + var xemException = exceptions.data.filter(function(obj) { + return obj.id === 'xem'; + }); + var xemExceptionDate = new Date(xemException[0].lastRefresh * 1000).toLocaleDateString(); + + var anidbException = exceptions.data.filter(function(obj) { + return obj.id === 'anidb'; + }); + var anidbExceptionDate = new Date(anidbException[0].lastRefresh * 1000).toLocaleDateString(); + + var table = $('
          ') + .append( + '
        • ' + + '' + + 'Last updated medusa\'s exceptions ' + + cusExceptionDate + ) + .append( + '
        • ' + + '' + + 'Last updated xem exceptions ' + + xemExceptionDate + ) + .append( + '
        • Last updated anidb exceptions ' + + anidbExceptionDate + ); + + status.append(table); + $('.forceSceneExceptionRefresh').removeClass('disabled'); + }; + + /** + * Update an element with a spinner gif and a descriptive message. + * @param spinnerContainer - An element we can use to add the spinner and message to. + * @param message - A string with the message to display behind the spinner. + * @param showSpinner - A boolean to show or not show the spinner (gif). + */ + var updateSpinner = function(spinnerContainer, message, showSpinner) { + if (showSpinner) { + message = ' ' + message; + } + $(spinnerContainer).empty().append(message); + }; + + /** + * Trigger the force refresh of all the exception types. + */ + $('.forceSceneExceptionRefresh').on('click', function() { + var status = $('#sceneExceptionStatus'); + // Start a spinner. + updateSpinner(status, 'Retrieving scene exceptions...', true); + + api.post('alias-source/all/operation', {type: 'REFRESH'}, { + timeout: 60000 + }).then(function(response) { + status[0].innerHTML = ''; + status.append( + $('').text(response.data.result) + ); + + api.get('alias-source').then(function(response) { + updateExceptionTable(response); + $('.forceSceneExceptionRefresh').addClass('disabled'); + }).catch(function(err) { + log.error('Trying to get scene exceptions failed with error: ' + err); + updateSpinner(status, 'Trying to get scene exceptions failed with error: ' + err, false); + }); + updateSpinner(status, 'Finished updating scene exceptions.', false); + }).catch(function(err) { + log.error('Trying to update scene exceptions failed with error: ' + err); + updateSpinner(status, 'Trying to update scene exceptions failed with error: ' + err, false); + }); + }); + + // Initially load the exception types last updates on page load. + api.get('alias-source').then(function(response) { + updateExceptionTable(response); + }).catch(function(err) { + log.error('Trying to get scene exceptions failed with error: ' + err); + }); +}; diff --git a/static/js/mass-edit.js b/static/js/mass-edit.js index 0fc0cc5459..64b50360b2 100644 --- a/static/js/mass-edit.js +++ b/static/js/mass-edit.js @@ -14,7 +14,8 @@ $(document).ready(function() { $('#display_new_root_dir_' + curIndex).html('' + $(this).val() + ''); }); - $('.edit_root_dir').on('click', function() { + $('.edit_root_dir').on('click', function(event) { + event.preventDefault(); var curIndex = findDirIndex($(this).attr('id')); var initialDir = $('#new_root_dir_' + curIndex).val(); $(this).nFileBrowser(editRootDir, { diff --git a/static/js/mass-update.js b/static/js/mass-update.js index f2847e54ce..346c826349 100644 --- a/static/js/mass-update.js +++ b/static/js/mass-update.js @@ -62,6 +62,8 @@ $(document).ready(function() { } }); + var totalCount = [].concat.apply([], [updateArr, refreshArr, renameArr, subtitleArr, deleteArr, removeArr, metadataArr]).length; // eslint-disable-line no-useless-call + if (deleteCount >= 1) { $.confirm({ title: 'Delete Shows', @@ -76,7 +78,7 @@ $(document).ready(function() { deleteArr.push($(this).attr('id').split('-')[1]); } }); - if (updateArr.length + refreshArr.length + renameArr.length + subtitleArr.length + deleteArr.length + removeArr.length + metadataArr.length === 0) { + if (totalCount === 0) { return false; } var params = $.param({ @@ -93,7 +95,7 @@ $(document).ready(function() { } }); } - if (updateArr.length + refreshArr.length + renameArr.length + subtitleArr.length + deleteArr.length + removeArr.length + metadataArr.length === 0) { + if (totalCount === 0) { return false; } var params = $.param({ diff --git a/static/js/plot-tooltip.js b/static/js/plot-tooltip.js index 26a95dab9d..6a0f85db37 100644 --- a/static/js/plot-tooltip.js +++ b/static/js/plot-tooltip.js @@ -1,16 +1,16 @@ $(function() { $('.plotInfo').each(function() { var match = $(this).attr('id').match(/^plot_info_([\da-z]+)_(\d+)_(\d+)$/); - // http://localhost:8081/api/v2/show/tvdb83462/s01e01/description?api_key=xxx + // http://localhost:8081/api/v2/series/tvdb83462/episode/s01e01/description?api_key=xxx $(this).qtip({ content: { text: function(event, qt) { - api.get('show/' + match[1] + '/s' + match[2] + 'e' + match[3] + '/description').then(function(response) { + api.get('series/' + match[1] + '/episode/s' + match[2] + 'e' + match[3] + '/description').then(function(response) { // Set the tooltip content upon successful retrieval qt.set('content.text', response.data); - }, function(xhr, status, error) { + }, function(xhr) { // Upon failure... set the tooltip content to the status and error value - qt.set('content.text', status + ': ' + error); + qt.set('content.text', 'Error while loading plot: ' + xhr.status + ': ' + xhr.statusText); }); return 'Loading...'; } diff --git a/static/js/quality-chooser.js b/static/js/quality-chooser.js index 1f723f85e7..0396961a7e 100644 --- a/static/js/quality-chooser.js +++ b/static/js/quality-chooser.js @@ -31,50 +31,53 @@ $(document).ready(function() { function backloggedEpisodes() { var selectedPreffered = []; var selectedAllowed = []; - $('#preferred_qualities :selected').each(function(i, selected){ + $('#preferred_qualities :selected').each(function(i, selected) { selectedPreffered[i] = $(selected).val(); }); - $('#allowed_qualities :selected').each(function(i, selected){ + $('#allowed_qualities :selected').each(function(i, selected) { selectedAllowed[i] = $(selected).val(); }); - var url = 'show/' + $('#showIndexerSlug').attr('value') + - '/backlogged' + + var url = 'series/' + $('#series_slug').attr('value') + + '/legacy/backlogged' + '?allowed=' + selectedAllowed + - '&preferred=' + selectedPreffered + '&preferred=' + selectedPreffered; api.get(url).then(function(response) { - var newBacklogged = response.data.new - var existingBacklogged = response.data.existing - var variation = Math.abs(newBacklogged - existingBacklogged) - var html = 'Currently you have ' + existingBacklogged + ' backlogged episodes.
          ' - if (newBacklogged == -1 || existingBacklogged == -1) { - html = 'No qualities selected' + var newBacklogged = response.data.new; + var existingBacklogged = response.data.existing; + var variation = Math.abs(newBacklogged - existingBacklogged); + var html = 'Current backlog: ' + existingBacklogged + ' episodes
          '; + if (newBacklogged === -1 || existingBacklogged === -1) { + html = 'No qualities selected'; } else if (newBacklogged === existingBacklogged) { - html += 'This change won\'t affect your backlogged episodes' - } else if (newBacklogged > existingBacklogged) { - html += '
          WARNING: your backlogged episodes will increase by ' + variation + '' - html+= '.
          Total new backlogged: ' + newBacklogged + '' - // Only show the archive action div if we have backlog increase - $('#archive').show(); + html += 'This change won\'t affect your backlogged episodes'; } else { - html += 'Your backlogged episodes will decrease by ' + variation + '' - html+= '.
          Total new backlogged: ' + newBacklogged + '' + html += '
          New backlog: ' + newBacklogged + ' episodes'; + html += '

          '; + var change = ''; + if (newBacklogged > existingBacklogged) { + html += 'WARNING: '; + change = 'increase'; + // Only show the archive action div if we have backlog increase + $('#archive').show(); + } else { + change = 'decrease'; + } + html += 'Backlog will ' + change + ' by ' + variation + ' episodes.'; } - $('#backlogged_episodes').html(html); + $('#backloggedEpisodes').html(html); }); } function archiveEpisodes() { - var url = 'show/' + $('#showIndexerName').attr('value') + $('#showID').attr('value') + - '/archiveEpisodes' - api.get(url).then(function(response) { - var archivedStatus = response.data.archived - var html = '' - if (archivedStatus) { - html = 'Successfuly archived episodes' + var url = 'series/' + $('#series_slug').attr('value') + '/operation'; + api.post(url, {type: 'ARCHIVE_EPISODES'}).then(function(response) { + var html = ''; + if (response.status === 201) { + html = 'Successfully archived episodes'; // Recalculate backlogged episodes after we archive it backloggedEpisodes(); - } else { - html = 'Not episodes needed to be archived' + } else if (response.status === 204) { + html = 'No episodes to be archived'; } $('#archivedStatus').html(html); // Restore button text @@ -90,34 +93,34 @@ $(document).ready(function() { var allowed = $.map($('#allowed_qualities option:selected'), function(option) { return option.text; }); - var both = allowed.concat(preferred.filter(function (item) { + var both = allowed.concat(preferred.filter(function(item) { return allowed.indexOf(item) < 0; })); - var allowed_preferred_explanation = both.join(', '); - var preferred_explanation = preferred.join(', '); - var allowed_explanation = allowed.join(', '); + var allowedPreferredExplanation = both.join(', '); + var preferredExplanation = preferred.join(', '); + var allowedExplanation = allowed.join(', '); - $('#allowed_preferred_explanation').text(allowed_preferred_explanation); - $('#preferred_explanation').text(preferred_explanation); - $('#allowed_explanation').text(allowed_explanation); + $('#allowedPreferredExplanation').text(allowedPreferredExplanation); + $('#preferredExplanation').text(preferredExplanation); + $('#allowedExplanation').text(allowedExplanation); - $('#allowed_text').hide(); - $('#preferred_text1').hide(); - $('#preferred_text2').hide(); - $('#quality_explanation').show(); + $('#allowedText').hide(); + $('#preferredText1').hide(); + $('#preferredText2').hide(); + $('#qualityExplanation').show(); - if (preferred.length) { - $('#preferred_text1').show(); - $('#preferred_text2').show(); - } else if (allowed.length) { - $('#allowed_text').show(); + if (preferred.length >= 1) { + $('#preferredText1').show(); + $('#preferredText2').show(); + } else if (allowed.length >= 1) { + $('#allowedText').show(); } else { - $('#quality_explanation').hide(); + $('#qualityExplanation').hide(); } } - $('#archiveEpisodes').on('click', function(){ + $('#archiveEpisodes').on('click', function() { $.get($(this).attr('href')); $(this).val('Archiving...'); archiveEpisodes(); @@ -128,7 +131,7 @@ $(document).ready(function() { setFromPresets($('#qualityPreset :selected').val()); }); - $('#qualityPreset, #preferred_qualities, #allowed_qualities').on('change', function(){ + $('#qualityPreset, #preferred_qualities, #allowed_qualities').on('change', function() { setQualityText(); backloggedEpisodes(); }); diff --git a/static/js/root-dirs.js b/static/js/root-dirs.js index 5e6ed14b27..2c9da7efc1 100644 --- a/static/js/root-dirs.js +++ b/static/js/root-dirs.js @@ -1,3 +1,4 @@ +// @TODO: Remove this when we fully drop support for IE > 8 // Avoid `console` errors in browsers that lack a console. (function() { // eslint-disable-line wrap-iife var method; @@ -23,8 +24,6 @@ $(document).ready(function() { function setDefault(which, force) { - console.log('setting default to ' + which); - if (which !== undefined && which.length === 0) { return; } @@ -57,6 +56,9 @@ $(document).ready(function() { function refreshRootDirs() { if ($('#rootDirs').length === 0) { + /* Trigger change event as $.rootDirCheck() function is not + always available when this section of code is called. */ + $('#rootDirs').trigger('change'); return; } @@ -80,24 +82,21 @@ $(document).ready(function() { $('#defaultRootDir').prop('disabled', doDisable); $('#editRootDir').prop('disabled', doDisable); - var logString = ''; var dirString = ''; if ($('#whichDefaultRootDir').val().length >= 4) { dirString = $('#whichDefaultRootDir').val().substr(3); } $('#rootDirs option').each(function() { - logString += $(this).val() + '=' + $(this).text() + '->' + $(this).attr('id') + '\n'; if (dirString.length !== 0) { dirString += '|' + $(this).val(); } }); - logString += 'def: ' + $('#whichDefaultRootDir').val(); - console.log(logString); $('#rootDirText').val(dirString); - $('#rootDirText').change(); - console.log('rootDirText: ' + $('#rootDirText').val()); + // Manually trigger change event as setting .val directly doesn't + $('#rootDirs').trigger('change'); } + function addRootDir(path) { if (path.length === 0) { return; diff --git a/static/js/schedule/index.js b/static/js/schedule/index.js index 73b63586be..6c682dfd12 100644 --- a/static/js/schedule/index.js +++ b/static/js/schedule/index.js @@ -1,5 +1,5 @@ MEDUSA.schedule.index = function() { - if ($.isMeta({'layout': 'schedule'}, ['list'])) { + if ($.isMeta({layout: 'schedule'}, ['list'])) { var sortCodes = { date: 0, show: 2, @@ -37,7 +37,7 @@ MEDUSA.schedule.index = function() { $.ajaxEpSearch(); } - if ($.isMeta({'layout': 'schedule'}, ['banner', 'poster'])) { + if ($.isMeta({layout: 'schedule'}, ['banner', 'poster'])) { $.ajaxEpSearch({ size: 16, loadingImage: 'loading16' + MEDUSA.config.themeSpinner + '.gif' @@ -61,16 +61,16 @@ MEDUSA.schedule.index = function() { $.tablesorter.columnSelector.attachTo($('#showListTable'), '#popover-target'); }); - $('.show-option select[name="layout"]').on('change', function(){ - api.patch('config', { + $('.show-option select[name="layout"]').on('change', function() { + api.patch('config/main', { layout: { schedule: $(this).val() } }).then(function(response) { log.info(response); window.location.reload(); - }).catch(function (error) { - log.info(error); + }).catch(function(err) { + log.info(err); }); }); }; diff --git a/static/js/vender.js b/static/js/vender.js index 3b92016621..df1ff63542 100644 --- a/static/js/vender.js +++ b/static/js/vender.js @@ -10043,145 +10043,6 @@ return jQuery; */ if("undefined"==typeof jQuery)throw new Error("Bootstrap's JavaScript requires jQuery");+function(a){"use strict";var b=a.fn.jquery.split(" ")[0].split(".");if(b[0]<2&&b[1]<9||1==b[0]&&9==b[1]&&b[2]<1||b[0]>3)throw new Error("Bootstrap's JavaScript requires jQuery version 1.9.1 or higher, but lower than version 4")}(jQuery),+function(a){"use strict";function b(){var a=document.createElement("bootstrap"),b={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"};for(var c in b)if(void 0!==a.style[c])return{end:b[c]};return!1}a.fn.emulateTransitionEnd=function(b){var c=!1,d=this;a(this).one("bsTransitionEnd",function(){c=!0});var e=function(){c||a(d).trigger(a.support.transition.end)};return setTimeout(e,b),this},a(function(){a.support.transition=b(),a.support.transition&&(a.event.special.bsTransitionEnd={bindType:a.support.transition.end,delegateType:a.support.transition.end,handle:function(b){if(a(b.target).is(this))return b.handleObj.handler.apply(this,arguments)}})})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var c=a(this),e=c.data("bs.alert");e||c.data("bs.alert",e=new d(this)),"string"==typeof b&&e[b].call(c)})}var c='[data-dismiss="alert"]',d=function(b){a(b).on("click",c,this.close)};d.VERSION="3.3.7",d.TRANSITION_DURATION=150,d.prototype.close=function(b){function c(){g.detach().trigger("closed.bs.alert").remove()}var e=a(this),f=e.attr("data-target");f||(f=e.attr("href"),f=f&&f.replace(/.*(?=#[^\s]*$)/,""));var g=a("#"===f?[]:f);b&&b.preventDefault(),g.length||(g=e.closest(".alert")),g.trigger(b=a.Event("close.bs.alert")),b.isDefaultPrevented()||(g.removeClass("in"),a.support.transition&&g.hasClass("fade")?g.one("bsTransitionEnd",c).emulateTransitionEnd(d.TRANSITION_DURATION):c())};var e=a.fn.alert;a.fn.alert=b,a.fn.alert.Constructor=d,a.fn.alert.noConflict=function(){return a.fn.alert=e,this},a(document).on("click.bs.alert.data-api",c,d.prototype.close)}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.button"),f="object"==typeof b&&b;e||d.data("bs.button",e=new c(this,f)),"toggle"==b?e.toggle():b&&e.setState(b)})}var c=function(b,d){this.$element=a(b),this.options=a.extend({},c.DEFAULTS,d),this.isLoading=!1};c.VERSION="3.3.7",c.DEFAULTS={loadingText:"loading..."},c.prototype.setState=function(b){var c="disabled",d=this.$element,e=d.is("input")?"val":"html",f=d.data();b+="Text",null==f.resetText&&d.data("resetText",d[e]()),setTimeout(a.proxy(function(){d[e](null==f[b]?this.options[b]:f[b]),"loadingText"==b?(this.isLoading=!0,d.addClass(c).attr(c,c).prop(c,!0)):this.isLoading&&(this.isLoading=!1,d.removeClass(c).removeAttr(c).prop(c,!1))},this),0)},c.prototype.toggle=function(){var a=!0,b=this.$element.closest('[data-toggle="buttons"]');if(b.length){var c=this.$element.find("input");"radio"==c.prop("type")?(c.prop("checked")&&(a=!1),b.find(".active").removeClass("active"),this.$element.addClass("active")):"checkbox"==c.prop("type")&&(c.prop("checked")!==this.$element.hasClass("active")&&(a=!1),this.$element.toggleClass("active")),c.prop("checked",this.$element.hasClass("active")),a&&c.trigger("change")}else this.$element.attr("aria-pressed",!this.$element.hasClass("active")),this.$element.toggleClass("active")};var d=a.fn.button;a.fn.button=b,a.fn.button.Constructor=c,a.fn.button.noConflict=function(){return a.fn.button=d,this},a(document).on("click.bs.button.data-api",'[data-toggle^="button"]',function(c){var d=a(c.target).closest(".btn");b.call(d,"toggle"),a(c.target).is('input[type="radio"], input[type="checkbox"]')||(c.preventDefault(),d.is("input,button")?d.trigger("focus"):d.find("input:visible,button:visible").first().trigger("focus"))}).on("focus.bs.button.data-api blur.bs.button.data-api",'[data-toggle^="button"]',function(b){a(b.target).closest(".btn").toggleClass("focus",/^focus(in)?$/.test(b.type))})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.carousel"),f=a.extend({},c.DEFAULTS,d.data(),"object"==typeof b&&b),g="string"==typeof b?b:f.slide;e||d.data("bs.carousel",e=new c(this,f)),"number"==typeof b?e.to(b):g?e[g]():f.interval&&e.pause().cycle()})}var c=function(b,c){this.$element=a(b),this.$indicators=this.$element.find(".carousel-indicators"),this.options=c,this.paused=null,this.sliding=null,this.interval=null,this.$active=null,this.$items=null,this.options.keyboard&&this.$element.on("keydown.bs.carousel",a.proxy(this.keydown,this)),"hover"==this.options.pause&&!("ontouchstart"in document.documentElement)&&this.$element.on("mouseenter.bs.carousel",a.proxy(this.pause,this)).on("mouseleave.bs.carousel",a.proxy(this.cycle,this))};c.VERSION="3.3.7",c.TRANSITION_DURATION=600,c.DEFAULTS={interval:5e3,pause:"hover",wrap:!0,keyboard:!0},c.prototype.keydown=function(a){if(!/input|textarea/i.test(a.target.tagName)){switch(a.which){case 37:this.prev();break;case 39:this.next();break;default:return}a.preventDefault()}},c.prototype.cycle=function(b){return b||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(a.proxy(this.next,this),this.options.interval)),this},c.prototype.getItemIndex=function(a){return this.$items=a.parent().children(".item"),this.$items.index(a||this.$active)},c.prototype.getItemForDirection=function(a,b){var c=this.getItemIndex(b),d="prev"==a&&0===c||"next"==a&&c==this.$items.length-1;if(d&&!this.options.wrap)return b;var e="prev"==a?-1:1,f=(c+e)%this.$items.length;return this.$items.eq(f)},c.prototype.to=function(a){var b=this,c=this.getItemIndex(this.$active=this.$element.find(".item.active"));if(!(a>this.$items.length-1||a<0))return this.sliding?this.$element.one("slid.bs.carousel",function(){b.to(a)}):c==a?this.pause().cycle():this.slide(a>c?"next":"prev",this.$items.eq(a))},c.prototype.pause=function(b){return b||(this.paused=!0),this.$element.find(".next, .prev").length&&a.support.transition&&(this.$element.trigger(a.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},c.prototype.next=function(){if(!this.sliding)return this.slide("next")},c.prototype.prev=function(){if(!this.sliding)return this.slide("prev")},c.prototype.slide=function(b,d){var e=this.$element.find(".item.active"),f=d||this.getItemForDirection(b,e),g=this.interval,h="next"==b?"left":"right",i=this;if(f.hasClass("active"))return this.sliding=!1;var j=f[0],k=a.Event("slide.bs.carousel",{relatedTarget:j,direction:h});if(this.$element.trigger(k),!k.isDefaultPrevented()){if(this.sliding=!0,g&&this.pause(),this.$indicators.length){this.$indicators.find(".active").removeClass("active");var l=a(this.$indicators.children()[this.getItemIndex(f)]);l&&l.addClass("active")}var m=a.Event("slid.bs.carousel",{relatedTarget:j,direction:h});return a.support.transition&&this.$element.hasClass("slide")?(f.addClass(b),f[0].offsetWidth,e.addClass(h),f.addClass(h),e.one("bsTransitionEnd",function(){f.removeClass([b,h].join(" ")).addClass("active"),e.removeClass(["active",h].join(" ")),i.sliding=!1,setTimeout(function(){i.$element.trigger(m)},0)}).emulateTransitionEnd(c.TRANSITION_DURATION)):(e.removeClass("active"),f.addClass("active"),this.sliding=!1,this.$element.trigger(m)),g&&this.cycle(),this}};var d=a.fn.carousel;a.fn.carousel=b,a.fn.carousel.Constructor=c,a.fn.carousel.noConflict=function(){return a.fn.carousel=d,this};var e=function(c){var d,e=a(this),f=a(e.attr("data-target")||(d=e.attr("href"))&&d.replace(/.*(?=#[^\s]+$)/,""));if(f.hasClass("carousel")){var g=a.extend({},f.data(),e.data()),h=e.attr("data-slide-to");h&&(g.interval=!1),b.call(f,g),h&&f.data("bs.carousel").to(h),c.preventDefault()}};a(document).on("click.bs.carousel.data-api","[data-slide]",e).on("click.bs.carousel.data-api","[data-slide-to]",e),a(window).on("load",function(){a('[data-ride="carousel"]').each(function(){var c=a(this);b.call(c,c.data())})})}(jQuery),+function(a){"use strict";function b(b){var c,d=b.attr("data-target")||(c=b.attr("href"))&&c.replace(/.*(?=#[^\s]+$)/,"");return a(d)}function c(b){return this.each(function(){var c=a(this),e=c.data("bs.collapse"),f=a.extend({},d.DEFAULTS,c.data(),"object"==typeof b&&b);!e&&f.toggle&&/show|hide/.test(b)&&(f.toggle=!1),e||c.data("bs.collapse",e=new d(this,f)),"string"==typeof b&&e[b]()})}var d=function(b,c){this.$element=a(b),this.options=a.extend({},d.DEFAULTS,c),this.$trigger=a('[data-toggle="collapse"][href="#'+b.id+'"],[data-toggle="collapse"][data-target="#'+b.id+'"]'),this.transitioning=null,this.options.parent?this.$parent=this.getParent():this.addAriaAndCollapsedClass(this.$element,this.$trigger),this.options.toggle&&this.toggle()};d.VERSION="3.3.7",d.TRANSITION_DURATION=350,d.DEFAULTS={toggle:!0},d.prototype.dimension=function(){var a=this.$element.hasClass("width");return a?"width":"height"},d.prototype.show=function(){if(!this.transitioning&&!this.$element.hasClass("in")){var b,e=this.$parent&&this.$parent.children(".panel").children(".in, .collapsing");if(!(e&&e.length&&(b=e.data("bs.collapse"),b&&b.transitioning))){var f=a.Event("show.bs.collapse");if(this.$element.trigger(f),!f.isDefaultPrevented()){e&&e.length&&(c.call(e,"hide"),b||e.data("bs.collapse",null));var g=this.dimension();this.$element.removeClass("collapse").addClass("collapsing")[g](0).attr("aria-expanded",!0),this.$trigger.removeClass("collapsed").attr("aria-expanded",!0),this.transitioning=1;var h=function(){this.$element.removeClass("collapsing").addClass("collapse in")[g](""),this.transitioning=0,this.$element.trigger("shown.bs.collapse")};if(!a.support.transition)return h.call(this);var i=a.camelCase(["scroll",g].join("-"));this.$element.one("bsTransitionEnd",a.proxy(h,this)).emulateTransitionEnd(d.TRANSITION_DURATION)[g](this.$element[0][i])}}}},d.prototype.hide=function(){if(!this.transitioning&&this.$element.hasClass("in")){var b=a.Event("hide.bs.collapse");if(this.$element.trigger(b),!b.isDefaultPrevented()){var c=this.dimension();this.$element[c](this.$element[c]())[0].offsetHeight,this.$element.addClass("collapsing").removeClass("collapse in").attr("aria-expanded",!1),this.$trigger.addClass("collapsed").attr("aria-expanded",!1),this.transitioning=1;var e=function(){this.transitioning=0,this.$element.removeClass("collapsing").addClass("collapse").trigger("hidden.bs.collapse")};return a.support.transition?void this.$element[c](0).one("bsTransitionEnd",a.proxy(e,this)).emulateTransitionEnd(d.TRANSITION_DURATION):e.call(this)}}},d.prototype.toggle=function(){this[this.$element.hasClass("in")?"hide":"show"]()},d.prototype.getParent=function(){return a(this.options.parent).find('[data-toggle="collapse"][data-parent="'+this.options.parent+'"]').each(a.proxy(function(c,d){var e=a(d);this.addAriaAndCollapsedClass(b(e),e)},this)).end()},d.prototype.addAriaAndCollapsedClass=function(a,b){var c=a.hasClass("in");a.attr("aria-expanded",c),b.toggleClass("collapsed",!c).attr("aria-expanded",c)};var e=a.fn.collapse;a.fn.collapse=c,a.fn.collapse.Constructor=d,a.fn.collapse.noConflict=function(){return a.fn.collapse=e,this},a(document).on("click.bs.collapse.data-api",'[data-toggle="collapse"]',function(d){var e=a(this);e.attr("data-target")||d.preventDefault();var f=b(e),g=f.data("bs.collapse"),h=g?"toggle":e.data();c.call(f,h)})}(jQuery),+function(a){"use strict";function b(b){var c=b.attr("data-target");c||(c=b.attr("href"),c=c&&/#[A-Za-z]/.test(c)&&c.replace(/.*(?=#[^\s]*$)/,""));var d=c&&a(c);return d&&d.length?d:b.parent()}function c(c){c&&3===c.which||(a(e).remove(),a(f).each(function(){var d=a(this),e=b(d),f={relatedTarget:this};e.hasClass("open")&&(c&&"click"==c.type&&/input|textarea/i.test(c.target.tagName)&&a.contains(e[0],c.target)||(e.trigger(c=a.Event("hide.bs.dropdown",f)),c.isDefaultPrevented()||(d.attr("aria-expanded","false"),e.removeClass("open").trigger(a.Event("hidden.bs.dropdown",f)))))}))}function d(b){return this.each(function(){var c=a(this),d=c.data("bs.dropdown");d||c.data("bs.dropdown",d=new g(this)),"string"==typeof b&&d[b].call(c)})}var e=".dropdown-backdrop",f='[data-toggle="dropdown"]',g=function(b){a(b).on("click.bs.dropdown",this.toggle)};g.VERSION="3.3.7",g.prototype.toggle=function(d){var e=a(this);if(!e.is(".disabled, :disabled")){var f=b(e),g=f.hasClass("open");if(c(),!g){"ontouchstart"in document.documentElement&&!f.closest(".navbar-nav").length&&a(document.createElement("div")).addClass("dropdown-backdrop").insertAfter(a(this)).on("click",c);var h={relatedTarget:this};if(f.trigger(d=a.Event("show.bs.dropdown",h)),d.isDefaultPrevented())return;e.trigger("focus").attr("aria-expanded","true"),f.toggleClass("open").trigger(a.Event("shown.bs.dropdown",h))}return!1}},g.prototype.keydown=function(c){if(/(38|40|27|32)/.test(c.which)&&!/input|textarea/i.test(c.target.tagName)){var d=a(this);if(c.preventDefault(),c.stopPropagation(),!d.is(".disabled, :disabled")){var e=b(d),g=e.hasClass("open");if(!g&&27!=c.which||g&&27==c.which)return 27==c.which&&e.find(f).trigger("focus"),d.trigger("click");var h=" li:not(.disabled):visible a",i=e.find(".dropdown-menu"+h);if(i.length){var j=i.index(c.target);38==c.which&&j>0&&j--,40==c.which&&jdocument.documentElement.clientHeight;this.$element.css({paddingLeft:!this.bodyIsOverflowing&&a?this.scrollbarWidth:"",paddingRight:this.bodyIsOverflowing&&!a?this.scrollbarWidth:""})},c.prototype.resetAdjustments=function(){this.$element.css({paddingLeft:"",paddingRight:""})},c.prototype.checkScrollbar=function(){var a=window.innerWidth;if(!a){var b=document.documentElement.getBoundingClientRect();a=b.right-Math.abs(b.left)}this.bodyIsOverflowing=document.body.clientWidth
          ',trigger:"hover focus",title:"",delay:0,html:!1,container:!1,viewport:{selector:"body",padding:0}},c.prototype.init=function(b,c,d){if(this.enabled=!0,this.type=b,this.$element=a(c),this.options=this.getOptions(d),this.$viewport=this.options.viewport&&a(a.isFunction(this.options.viewport)?this.options.viewport.call(this,this.$element):this.options.viewport.selector||this.options.viewport),this.inState={click:!1,hover:!1,focus:!1},this.$element[0]instanceof document.constructor&&!this.options.selector)throw new Error("`selector` option must be specified when initializing "+this.type+" on the window.document object!");for(var e=this.options.trigger.split(" "),f=e.length;f--;){var g=e[f];if("click"==g)this.$element.on("click."+this.type,this.options.selector,a.proxy(this.toggle,this));else if("manual"!=g){var h="hover"==g?"mouseenter":"focusin",i="hover"==g?"mouseleave":"focusout";this.$element.on(h+"."+this.type,this.options.selector,a.proxy(this.enter,this)),this.$element.on(i+"."+this.type,this.options.selector,a.proxy(this.leave,this))}}this.options.selector?this._options=a.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},c.prototype.getDefaults=function(){return c.DEFAULTS},c.prototype.getOptions=function(b){return b=a.extend({},this.getDefaults(),this.$element.data(),b),b.delay&&"number"==typeof b.delay&&(b.delay={show:b.delay,hide:b.delay}),b},c.prototype.getDelegateOptions=function(){var b={},c=this.getDefaults();return this._options&&a.each(this._options,function(a,d){c[a]!=d&&(b[a]=d)}),b},c.prototype.enter=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget).data("bs."+this.type);return c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c)),b instanceof a.Event&&(c.inState["focusin"==b.type?"focus":"hover"]=!0),c.tip().hasClass("in")||"in"==c.hoverState?void(c.hoverState="in"):(clearTimeout(c.timeout),c.hoverState="in",c.options.delay&&c.options.delay.show?void(c.timeout=setTimeout(function(){"in"==c.hoverState&&c.show()},c.options.delay.show)):c.show())},c.prototype.isInStateTrue=function(){for(var a in this.inState)if(this.inState[a])return!0;return!1},c.prototype.leave=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget).data("bs."+this.type);if(c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c)),b instanceof a.Event&&(c.inState["focusout"==b.type?"focus":"hover"]=!1),!c.isInStateTrue())return clearTimeout(c.timeout),c.hoverState="out",c.options.delay&&c.options.delay.hide?void(c.timeout=setTimeout(function(){"out"==c.hoverState&&c.hide()},c.options.delay.hide)):c.hide()},c.prototype.show=function(){var b=a.Event("show.bs."+this.type);if(this.hasContent()&&this.enabled){this.$element.trigger(b);var d=a.contains(this.$element[0].ownerDocument.documentElement,this.$element[0]);if(b.isDefaultPrevented()||!d)return;var e=this,f=this.tip(),g=this.getUID(this.type);this.setContent(),f.attr("id",g),this.$element.attr("aria-describedby",g),this.options.animation&&f.addClass("fade");var h="function"==typeof this.options.placement?this.options.placement.call(this,f[0],this.$element[0]):this.options.placement,i=/\s?auto?\s?/i,j=i.test(h);j&&(h=h.replace(i,"")||"top"),f.detach().css({top:0,left:0,display:"block"}).addClass(h).data("bs."+this.type,this),this.options.container?f.appendTo(this.options.container):f.insertAfter(this.$element),this.$element.trigger("inserted.bs."+this.type);var k=this.getPosition(),l=f[0].offsetWidth,m=f[0].offsetHeight;if(j){var n=h,o=this.getPosition(this.$viewport);h="bottom"==h&&k.bottom+m>o.bottom?"top":"top"==h&&k.top-mo.width?"left":"left"==h&&k.left-lg.top+g.height&&(e.top=g.top+g.height-i)}else{var j=b.left-f,k=b.left+f+c;jg.right&&(e.left=g.left+g.width-k)}return e},c.prototype.getTitle=function(){var a,b=this.$element,c=this.options;return a=b.attr("data-original-title")||("function"==typeof c.title?c.title.call(b[0]):c.title)},c.prototype.getUID=function(a){do a+=~~(1e6*Math.random());while(document.getElementById(a));return a},c.prototype.tip=function(){if(!this.$tip&&(this.$tip=a(this.options.template),1!=this.$tip.length))throw new Error(this.type+" `template` option must consist of exactly 1 top-level element!");return this.$tip},c.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},c.prototype.enable=function(){this.enabled=!0},c.prototype.disable=function(){this.enabled=!1},c.prototype.toggleEnabled=function(){this.enabled=!this.enabled},c.prototype.toggle=function(b){var c=this;b&&(c=a(b.currentTarget).data("bs."+this.type),c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c))),b?(c.inState.click=!c.inState.click,c.isInStateTrue()?c.enter(c):c.leave(c)):c.tip().hasClass("in")?c.leave(c):c.enter(c)},c.prototype.destroy=function(){var a=this;clearTimeout(this.timeout),this.hide(function(){a.$element.off("."+a.type).removeData("bs."+a.type),a.$tip&&a.$tip.detach(),a.$tip=null,a.$arrow=null,a.$viewport=null,a.$element=null})};var d=a.fn.tooltip;a.fn.tooltip=b,a.fn.tooltip.Constructor=c,a.fn.tooltip.noConflict=function(){return a.fn.tooltip=d,this}}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.popover"),f="object"==typeof b&&b;!e&&/destroy|hide/.test(b)||(e||d.data("bs.popover",e=new c(this,f)),"string"==typeof b&&e[b]())})}var c=function(a,b){this.init("popover",a,b)};if(!a.fn.tooltip)throw new Error("Popover requires tooltip.js");c.VERSION="3.3.7",c.DEFAULTS=a.extend({},a.fn.tooltip.Constructor.DEFAULTS,{placement:"right",trigger:"click",content:"",template:''}),c.prototype=a.extend({},a.fn.tooltip.Constructor.prototype),c.prototype.constructor=c,c.prototype.getDefaults=function(){return c.DEFAULTS},c.prototype.setContent=function(){var a=this.tip(),b=this.getTitle(),c=this.getContent();a.find(".popover-title")[this.options.html?"html":"text"](b),a.find(".popover-content").children().detach().end()[this.options.html?"string"==typeof c?"html":"append":"text"](c),a.removeClass("fade top bottom left right in"),a.find(".popover-title").html()||a.find(".popover-title").hide()},c.prototype.hasContent=function(){return this.getTitle()||this.getContent()},c.prototype.getContent=function(){var a=this.$element,b=this.options;return a.attr("data-content")||("function"==typeof b.content?b.content.call(a[0]):b.content)},c.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".arrow")};var d=a.fn.popover;a.fn.popover=b,a.fn.popover.Constructor=c,a.fn.popover.noConflict=function(){return a.fn.popover=d,this}}(jQuery),+function(a){"use strict";function b(c,d){this.$body=a(document.body),this.$scrollElement=a(a(c).is(document.body)?window:c),this.options=a.extend({},b.DEFAULTS,d),this.selector=(this.options.target||"")+" .nav li > a",this.offsets=[],this.targets=[],this.activeTarget=null,this.scrollHeight=0,this.$scrollElement.on("scroll.bs.scrollspy",a.proxy(this.process,this)),this.refresh(),this.process()}function c(c){return this.each(function(){var d=a(this),e=d.data("bs.scrollspy"),f="object"==typeof c&&c;e||d.data("bs.scrollspy",e=new b(this,f)),"string"==typeof c&&e[c]()})}b.VERSION="3.3.7",b.DEFAULTS={offset:10},b.prototype.getScrollHeight=function(){return this.$scrollElement[0].scrollHeight||Math.max(this.$body[0].scrollHeight,document.documentElement.scrollHeight)},b.prototype.refresh=function(){var b=this,c="offset",d=0;this.offsets=[],this.targets=[],this.scrollHeight=this.getScrollHeight(),a.isWindow(this.$scrollElement[0])||(c="position",d=this.$scrollElement.scrollTop()),this.$body.find(this.selector).map(function(){var b=a(this),e=b.data("target")||b.attr("href"),f=/^#./.test(e)&&a(e);return f&&f.length&&f.is(":visible")&&[[f[c]().top+d,e]]||null}).sort(function(a,b){return a[0]-b[0]}).each(function(){b.offsets.push(this[0]),b.targets.push(this[1])})},b.prototype.process=function(){var a,b=this.$scrollElement.scrollTop()+this.options.offset,c=this.getScrollHeight(),d=this.options.offset+c-this.$scrollElement.height(),e=this.offsets,f=this.targets,g=this.activeTarget;if(this.scrollHeight!=c&&this.refresh(),b>=d)return g!=(a=f[f.length-1])&&this.activate(a);if(g&&b=e[a]&&(void 0===e[a+1]||b .dropdown-menu > .active").removeClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!1),b.addClass("active").find('[data-toggle="tab"]').attr("aria-expanded",!0),h?(b[0].offsetWidth,b.addClass("in")):b.removeClass("fade"),b.parent(".dropdown-menu").length&&b.closest("li.dropdown").addClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!0),e&&e()}var g=d.find("> .active"),h=e&&a.support.transition&&(g.length&&g.hasClass("fade")||!!d.find("> .fade").length);g.length&&h?g.one("bsTransitionEnd",f).emulateTransitionEnd(c.TRANSITION_DURATION):f(),g.removeClass("in")};var d=a.fn.tab;a.fn.tab=b,a.fn.tab.Constructor=c,a.fn.tab.noConflict=function(){return a.fn.tab=d,this};var e=function(c){c.preventDefault(),b.call(a(this),"show")};a(document).on("click.bs.tab.data-api",'[data-toggle="tab"]',e).on("click.bs.tab.data-api",'[data-toggle="pill"]',e)}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.affix"),f="object"==typeof b&&b;e||d.data("bs.affix",e=new c(this,f)),"string"==typeof b&&e[b]()})}var c=function(b,d){this.options=a.extend({},c.DEFAULTS,d),this.$target=a(this.options.target).on("scroll.bs.affix.data-api",a.proxy(this.checkPosition,this)).on("click.bs.affix.data-api",a.proxy(this.checkPositionWithEventLoop,this)),this.$element=a(b),this.affixed=null,this.unpin=null,this.pinnedOffset=null,this.checkPosition()};c.VERSION="3.3.7",c.RESET="affix affix-top affix-bottom",c.DEFAULTS={offset:0,target:window},c.prototype.getState=function(a,b,c,d){var e=this.$target.scrollTop(),f=this.$element.offset(),g=this.$target.height();if(null!=c&&"top"==this.affixed)return e=a-d&&"bottom"},c.prototype.getPinnedOffset=function(){if(this.pinnedOffset)return this.pinnedOffset;this.$element.removeClass(c.RESET).addClass("affix");var a=this.$target.scrollTop(),b=this.$element.offset();return this.pinnedOffset=b.top-a},c.prototype.checkPositionWithEventLoop=function(){setTimeout(a.proxy(this.checkPosition,this),1)},c.prototype.checkPosition=function(){if(this.$element.is(":visible")){var b=this.$element.height(),d=this.options.offset,e=d.top,f=d.bottom,g=Math.max(a(document).height(),a(document.body).height());"object"!=typeof d&&(f=e=d),"function"==typeof e&&(e=d.top(this.$element)),"function"==typeof f&&(f=d.bottom(this.$element));var h=this.getState(g,b,e,f);if(this.affixed!=h){null!=this.unpin&&this.$element.css("top","");var i="affix"+(h?"-"+h:""),j=a.Event(i+".bs.affix");if(this.$element.trigger(j),j.isDefaultPrevented())return;this.affixed=h,this.unpin="bottom"==h?this.getPinnedOffset():null,this.$element.removeClass(c.RESET).addClass(i).trigger(i.replace("affix","affixed")+".bs.affix")}"bottom"==h&&this.$element.offset({top:g-b-f})}};var d=a.fn.affix;a.fn.affix=b,a.fn.affix.Constructor=c,a.fn.affix.noConflict=function(){return a.fn.affix=d,this},a(window).on("load",function(){a('[data-spy="affix"]').each(function(){var c=a(this),d=c.data();d.offset=d.offset||{},null!=d.offsetBottom&&(d.offset.bottom=d.offsetBottom),null!=d.offsetTop&&(d.offset.top=d.offsetTop),b.call(c,d)})})}(jQuery); -/** -* bootstrap-formhelpers.js v2.3.0 by @vincentlamanna -* Copyright 2013 Vincent Lamanna -* http://www.apache.org/licenses/LICENSE-2.0 -*/ -if(!jQuery)throw new Error("Bootstrap Form Helpers requires jQuery");var BFHCountriesList={AF:"Afghanistan",AL:"Albania",DZ:"Algeria",AS:"American Samoa",AD:"Andorra",AO:"Angola",AI:"Anguilla",AQ:"Antarctica",AG:"Antigua and Barbuda",AR:"Argentina",AM:"Armenia",AW:"Aruba",AU:"Australia",AT:"Austria",AZ:"Azerbaijan",BH:"Bahrain",BD:"Bangladesh",BB:"Barbados",BY:"Belarus",BE:"Belgium",BZ:"Belize",BJ:"Benin",BM:"Bermuda",BT:"Bhutan",BO:"Bolivia",BA:"Bosnia and Herzegovina",BW:"Botswana",BV:"Bouvet Island",BR:"Brazil",IO:"British Indian Ocean Territory",VG:"British Virgin Islands",BN:"Brunei",BG:"Bulgaria",BF:"Burkina Faso",BI:"Burundi",CI:"Côte d'Ivoire",KH:"Cambodia",CM:"Cameroon",CA:"Canada",CV:"Cape Verde",KY:"Cayman Islands",CF:"Central African Republic",TD:"Chad",CL:"Chile",CN:"China",CX:"Christmas Island",CC:"Cocos (Keeling) Islands",CO:"Colombia",KM:"Comoros",CG:"Congo",CK:"Cook Islands",CR:"Costa Rica",HR:"Croatia",CU:"Cuba",CY:"Cyprus",CZ:"Czech Republic",CD:"Democratic Republic of the Congo",DK:"Denmark",DJ:"Djibouti",DM:"Dominica",DO:"Dominican Republic",TP:"East Timor",EC:"Ecuador",EG:"Egypt",SV:"El Salvador",GQ:"Equatorial Guinea",ER:"Eritrea",EE:"Estonia",ET:"Ethiopia",FO:"Faeroe Islands",FK:"Falkland Islands",FJ:"Fiji",FI:"Finland",MK:"Former Yugoslav Republic of Macedonia",FR:"France",FX:"France, Metropolitan",GF:"French Guiana",PF:"French Polynesia",TF:"French Southern Territories",GA:"Gabon",GE:"Georgia",DE:"Germany",GH:"Ghana",GI:"Gibraltar",GR:"Greece",GL:"Greenland",GD:"Grenada",GP:"Guadeloupe",GU:"Guam",GT:"Guatemala",GN:"Guinea",GW:"Guinea-Bissau",GY:"Guyana",HT:"Haiti",HM:"Heard and Mc Donald Islands",HN:"Honduras",HK:"Hong Kong",HU:"Hungary",IS:"Iceland",IN:"India",ID:"Indonesia",IR:"Iran",IQ:"Iraq",IE:"Ireland",IL:"Israel",IT:"Italy",JM:"Jamaica",JP:"Japan",JO:"Jordan",KZ:"Kazakhstan",KE:"Kenya",KI:"Kiribati",KW:"Kuwait",KG:"Kyrgyzstan",LA:"Laos",LV:"Latvia",LB:"Lebanon",LS:"Lesotho",LR:"Liberia",LY:"Libya",LI:"Liechtenstein",LT:"Lithuania",LU:"Luxembourg",MO:"Macau",MG:"Madagascar",MW:"Malawi",MY:"Malaysia",MV:"Maldives",ML:"Mali",MT:"Malta",MH:"Marshall Islands",MQ:"Martinique",MR:"Mauritania",MU:"Mauritius",YT:"Mayotte",MX:"Mexico",FM:"Micronesia",MD:"Moldova",MC:"Monaco",MN:"Mongolia",ME:"Montenegro",MS:"Montserrat",MA:"Morocco",MZ:"Mozambique",MM:"Myanmar",NA:"Namibia",NR:"Nauru",NP:"Nepal",NL:"Netherlands",AN:"Netherlands Antilles",NC:"New Caledonia",NZ:"New Zealand",NI:"Nicaragua",NE:"Niger",NG:"Nigeria",NU:"Niue",NF:"Norfolk Island",KP:"North Korea",MP:"Northern Marianas",NO:"Norway",OM:"Oman",PK:"Pakistan",PW:"Palau",PS:"Palestine",PA:"Panama",PG:"Papua New Guinea",PY:"Paraguay",PE:"Peru",PH:"Philippines",PN:"Pitcairn Islands",PL:"Poland",PT:"Portugal",PR:"Puerto Rico",QA:"Qatar",RE:"Reunion",RO:"Romania",RU:"Russia",RW:"Rwanda",ST:"São Tomé and Príncipe",SH:"Saint Helena",PM:"St. Pierre and Miquelon",KN:"Saint Kitts and Nevis",LC:"Saint Lucia",VC:"Saint Vincent and the Grenadines",WS:"Samoa",SM:"San Marino",SA:"Saudi Arabia",SN:"Senegal",RS:"Serbia",SC:"Seychelles",SL:"Sierra Leone",SG:"Singapore",SK:"Slovakia",SI:"Slovenia",SB:"Solomon Islands",SO:"Somalia",ZA:"South Africa",GS:"South Georgia and the South Sandwich Islands",KR:"South Korea",ES:"Spain",LK:"Sri Lanka",SD:"Sudan",SR:"Suriname",SJ:"Svalbard and Jan Mayen Islands",SZ:"Swaziland",SE:"Sweden",CH:"Switzerland",SY:"Syria",TW:"Taiwan",TJ:"Tajikistan",TZ:"Tanzania",TH:"Thailand",BS:"The Bahamas",GM:"The Gambia",TG:"Togo",TK:"Tokelau",TO:"Tonga",TT:"Trinidad and Tobago",TN:"Tunisia",TR:"Turkey",TM:"Turkmenistan",TC:"Turks and Caicos Islands",TV:"Tuvalu",VI:"US Virgin Islands",UG:"Uganda",UA:"Ukraine",AE:"United Arab Emirates",GB:"United Kingdom",US:"United States",UM:"United States Minor Outlying Islands",UY:"Uruguay",UZ:"Uzbekistan",VU:"Vanuatu",VA:"Vatican City",VE:"Venezuela",VN:"Vietnam",WF:"Wallis and Futuna Islands",EH:"Western Sahara",YE:"Yemen",ZM:"Zambia",ZW:"Zimbabwe"},BFHCurrenciesList={AED:{label:"United Arab Emirates dirham",currencyflag:"",symbol:"د.إ"},AFN:{label:"Afghan afghani",currencyflag:"",symbol:"؋"},ALL:{label:"Albanian lek",currencyflag:"",symbol:"L"},AMD:{label:"Armenian dram",currencyflag:"",symbol:"դր"},AOA:{label:"Angolan kwanza",currencyflag:"",symbol:"Kz"},ARS:{label:"Argentine peso",currencyflag:"",symbol:"$"},AUD:{label:"Australian dollar",currencyflag:"AUD",symbol:"$"},AWG:{label:"Aruban florin",currencyflag:"",symbol:"ƒ"},AZN:{label:"Azerbaijani manat",currencyflag:"",symbol:""},BAM:{label:"Bosnia and Herzegovina convertible mark",currencyflag:"",symbol:"KM"},BBD:{label:"Barbadian dollar",currencyflag:"",symbol:"$"},BDT:{label:"Bangladeshi taka",currencyflag:"",symbol:"৳"},BGN:{label:"Bulgarian lev",currencyflag:"",symbol:"лв"},BHD:{label:"Bahraini dinar",currencyflag:"",symbol:".د.ب"},BIF:{label:"Burundian franc",currencyflag:"",symbol:"Fr"},BMD:{label:"Bermudian dollar",currencyflag:"",symbol:"$"},BND:{label:"Brunei dollar",currencyflag:"",symbol:"$"},BOB:{label:"Bolivian boliviano",currencyflag:"",symbol:"Bs"},BRL:{label:"Brazilian real",currencyflag:"",symbol:"R$"},BSD:{label:"Bahamian dollar",currencyflag:"",symbol:"$"},BTN:{label:"Bhutanese ngultrum",currencyflag:"",symbol:"Nu"},BWP:{label:"Botswana pula",currencyflag:"",symbol:"P"},BYR:{label:"Belarusian ruble",currencyflag:"",symbol:"Br"},BZD:{label:"Belize dollar",currencyflag:"",symbol:"$"},CAD:{label:"Canadian dollar",currencyflag:"",symbol:"$"},CDF:{label:"Congolese franc",currencyflag:"",symbol:"Fr"},CHF:{label:"Swiss franc",currencyflag:"CHF",symbol:"Fr"},CLP:{label:"Chilean peso",currencyflag:"",symbol:"$"},CNY:{label:"Chinese yuan",currencyflag:"",symbol:"¥"},COP:{label:"Colombian peso",currencyflag:"",symbol:"$"},CRC:{label:"Costa Rican colón",currencyflag:"",symbol:"₡"},CUP:{label:"Cuban convertible peso",currencyflag:"",symbol:"$"},CVE:{label:"Cape Verdean escudo",currencyflag:"",symbol:"$"},CZK:{label:"Czech koruna",currencyflag:"",symbol:"Kč"},DJF:{label:"Djiboutian franc",currencyflag:"",symbol:"Fr"},DKK:{label:"Danish krone",currencyflag:"DKK",symbol:"kr"},DOP:{label:"Dominican peso",currencyflag:"",symbol:"$"},DZD:{label:"Algerian dinar",currencyflag:"",symbol:"د.ج"},EGP:{label:"Egyptian pound",currencyflag:"",symbol:"ج.م"},ERN:{label:"Eritrean nakfa",currencyflag:"",symbol:"Nfk"},ETB:{label:"Ethiopian birr",currencyflag:"",symbol:"Br"},EUR:{label:"Euro",currencyflag:"EUR",symbol:"€"},FJD:{label:"Fijian dollar",currencyflag:"",symbol:"$"},FKP:{label:"Falkland Islands pound",currencyflag:"",symbol:"£"},GBP:{label:"British pound",currencyflag:"",symbol:"£"},GEL:{label:"Georgian lari",currencyflag:"",symbol:"ლ"},GHS:{label:"Ghana cedi",currencyflag:"",symbol:"₵"},GMD:{label:"Gambian dalasi",currencyflag:"",symbol:"D"},GNF:{label:"Guinean franc",currencyflag:"",symbol:"Fr"},GTQ:{label:"Guatemalan quetzal",currencyflag:"",symbol:"Q"},GYD:{label:"Guyanese dollar",currencyflag:"",symbol:"$"},HKD:{label:"Hong Kong dollar",currencyflag:"",symbol:"$"},HNL:{label:"Honduran lempira",currencyflag:"",symbol:"L"},HRK:{label:"Croatian kuna",currencyflag:"",symbol:"kn"},HTG:{label:"Haitian gourde",currencyflag:"",symbol:"G"},HUF:{label:"Hungarian forint",currencyflag:"",symbol:"Ft"},IDR:{label:"Indonesian rupiah",currencyflag:"",symbol:"Rp"},ILS:{label:"Israeli new shekel",currencyflag:"",symbol:"₪"},IMP:{label:"Manx pound",currencyflag:"",symbol:"£"},INR:{label:"Indian rupee",currencyflag:"",symbol:""},IQD:{label:"Iraqi dinar",currencyflag:"",symbol:"ع.د"},IRR:{label:"Iranian rial",currencyflag:"",symbol:"﷼"},ISK:{label:"Icelandic króna",currencyflag:"",symbol:"kr"},JEP:{label:"Jersey pound",currencyflag:"",symbol:"£"},JMD:{label:"Jamaican dollar",currencyflag:"",symbol:"$"},JOD:{label:"Jordanian dinar",currencyflag:"",symbol:"د.ا"},JPY:{label:"Japanese yen",currencyflag:"",symbol:"¥"},KES:{label:"Kenyan shilling",currencyflag:"",symbol:"Sh"},KGS:{label:"Kyrgyzstani som",currencyflag:"",symbol:"лв"},KHR:{label:"Cambodian riel",currencyflag:"",symbol:"៛"},KMF:{label:"Comorian franc",currencyflag:"",symbol:"Fr"},KPW:{label:"North Korean won",currencyflag:"",symbol:"₩"},KRW:{label:"South Korean won",currencyflag:"",symbol:"₩"},KWD:{label:"Kuwaiti dinar",currencyflag:"",symbol:"د.ك"},KYD:{label:"Cayman Islands dollar",currencyflag:"",symbol:"$"},KZT:{label:"Kazakhstani tenge",currencyflag:"",symbol:"₸"},LAK:{label:"Lao kip",currencyflag:"",symbol:"₭"},LBP:{label:"Lebanese pound",currencyflag:"",symbol:"ل.ل"},LKR:{label:"Sri Lankan rupee",currencyflag:"",symbol:"Rs"},LRD:{label:"Liberian dollar",currencyflag:"",symbol:"$"},LSL:{label:"Lesotho loti",currencyflag:"",symbol:"L"},LTL:{label:"Lithuanian litas",currencyflag:"",symbol:"Lt"},LVL:{label:"Latvian lats",currencyflag:"",symbol:"Ls"},LYD:{label:"Libyan dinar",currencyflag:"",symbol:"ل.د"},MAD:{label:"Moroccan dirham",currencyflag:"",symbol:"د.م."},MDL:{label:"Moldovan leu",currencyflag:"",symbol:"L"},MGA:{label:"Malagasy ariary",currencyflag:"",symbol:"Ar"},MKD:{label:"Macedonian denar",currencyflag:"",symbol:"ден"},MMK:{label:"Burmese kyat",currencyflag:"",symbol:"Ks"},MNT:{label:"Mongolian tögrög",currencyflag:"",symbol:"₮"},MOP:{label:"Macanese pataca",currencyflag:"",symbol:"P"},MRO:{label:"Mauritanian ouguiya",currencyflag:"",symbol:"UM"},MUR:{label:"Mauritian rupee",currencyflag:"",symbol:"Rs"},MVR:{label:"Maldivian rufiyaa",currencyflag:"",symbol:".ރ"},MWK:{label:"Malawian kwacha",currencyflag:"",symbol:"MK"},MXN:{label:"Mexican peso",currencyflag:"",symbol:"$"},MYR:{label:"Malaysian ringgit",currencyflag:"",symbol:"MR"},MZN:{label:"Mozambican metical",currencyflag:"",symbol:"MT"},NAD:{label:"Namibian dollar",currencyflag:"",symbol:"$"},NGN:{label:"Nigerian naira",currencyflag:"",symbol:"₦"},NIO:{label:"Nicaraguan córdoba",currencyflag:"",symbol:"C$"},NOK:{label:"Norwegian krone",currencyflag:"",symbol:"kr"},NPR:{label:"Nepalese rupee",currencyflag:"",symbol:"Rs"},NZD:{label:"New Zealand dollar",currencyflag:"",symbol:"$"},OMR:{label:"Omani rial",currencyflag:"",symbol:"ر.ع."},PAB:{label:"Panamanian balboa",currencyflag:"",symbol:"B/."},PEN:{label:"Peruvian nuevo sol",currencyflag:"",symbol:"S/."},PGK:{label:"Papua New Guinean kina",currencyflag:"",symbol:"K"},PHP:{label:"Philippine peso",currencyflag:"",symbol:"₱"},PKR:{label:"Pakistani rupee",currencyflag:"",symbol:"Rs"},PLN:{label:"Polish złoty",currencyflag:"",symbol:"zł"},PRB:{label:"Transnistrian ruble",currencyflag:"",symbol:"р."},PYG:{label:"Paraguayan guaraní",currencyflag:"",symbol:"₲"},QAR:{label:"Qatari riyal",currencyflag:"",symbol:"ر.ق"},RON:{label:"Romanian leu",currencyflag:"",symbol:"L"},RSD:{label:"Serbian dinar",currencyflag:"",symbol:"дин"},RUB:{label:"Russian ruble",currencyflag:"",symbol:"руб."},RWF:{label:"Rwandan franc",currencyflag:"",symbol:"Fr"},SAR:{label:"Saudi riyal",currencyflag:"",symbol:"ر.س"},SBD:{label:"Solomon Islands dollar",currencyflag:"",symbol:"$"},SCR:{label:"Seychellois rupee",currencyflag:"",symbol:"Rs"},SDG:{label:"Singapore dollar",currencyflag:"",symbol:"$"},SEK:{label:"Swedish krona",currencyflag:"",symbol:"kr"},SGD:{label:"Singapore dollar",currencyflag:"",symbol:"$"},SHP:{label:"Saint Helena pound",currencyflag:"",symbol:"£"},SLL:{label:"Sierra Leonean leone",currencyflag:"",symbol:"Le"},SOS:{label:"Somali shilling",currencyflag:"",symbol:"Sh"},SRD:{label:"Surinamese dollar",currencyflag:"",symbol:"$"},SSP:{label:"South Sudanese pound",currencyflag:"",symbol:"£"},STD:{label:"São Tomé and Príncipe dobra",currencyflag:"",symbol:"Db"},SVC:{label:"Salvadoran colón",currencyflag:"",symbol:"₡"},SYP:{label:"Syrian pound",currencyflag:"",symbol:"£"},SZL:{label:"Swazi lilangeni",currencyflag:"",symbol:"L"},THB:{label:"Thai baht",currencyflag:"",symbol:"฿"},TJS:{label:"Tajikistani somoni",currencyflag:"",symbol:"SM"},TMT:{label:"Turkmenistan manat",currencyflag:"",symbol:"m"},TND:{label:"Tunisian dinar",currencyflag:"",symbol:"د.ت"},TOP:{label:"Tongan paʻanga",currencyflag:"",symbol:"T$"},TRY:{label:"Turkish lira",currencyflag:"",symbol:"₺"},TTD:{label:"Trinidad and Tobago dollar",currencyflag:"",symbol:"$"},TWD:{label:"New Taiwan dollar",currencyflag:"",symbol:"$"},TZS:{label:"Tanzanian shilling",currencyflag:"",symbol:"Sh"},UAH:{label:"Ukrainian hryvnia",currencyflag:"",symbol:"₴"},UGX:{label:"Ugandan shilling",currencyflag:"",symbol:"Sh"},USD:{label:"United States dollar",currencyflag:"",symbol:"$"},UYU:{label:"Uruguayan peso",currencyflag:"",symbol:"$"},UZS:{label:"Uzbekistani som",currencyflag:"",symbol:"лв"},VEF:{label:"Venezuelan bolívar",currencyflag:"",symbol:"Bs F"},VND:{label:"Vietnamese đồng",currencyflag:"",symbol:"₫"},VUV:{label:"Vanuatu vatu",currencyflag:"",symbol:"Vt"},WST:{label:"Samoan tālā",currencyflag:"",symbol:"T"},XAF:{label:"Central African CFA franc",currencyflag:"XAF",symbol:"Fr"},XCD:{label:"East Caribbean dollar",currencyflag:"XCD",symbol:"$"},XOF:{label:"West African CFA franc",currencyflag:"XOF",symbol:"Fr"},XPF:{label:"CFP franc",currencyflag:"XPF",symbol:"Fr"},YER:{label:"Yemeni rial",currencyflag:"",symbol:"﷼"},ZAR:{label:"South African rand",currencyflag:"ZAR",symbol:"R"},ZMW:{label:"Zambian kwacha",currencyflag:"",symbol:"ZK"},ZWL:{label:"Zimbabwean dollar",currencyflag:"",symbol:"$"}},BFHMonthsList=["January","February","March","April","May","June","July","August","September","October","November","December"],BFHDaysList=["SUN","MON","TUE","WED","THU","FRI","SAT"],BFHDayOfWeekStart=0,BFHFontsList={"Andale Mono":'"Andale Mono", AndaleMono, monospace',Arial:'Arial, "Helvetica Neue", Helvetica, sans-serif',"Arial Black":'"Arial Black", "Arial Bold", Gadget, sans-serif',"Arial Narrow":'"Arial Narrow", Arial, sans-serif',"Arial Rounded MT Bold":'"Arial Rounded MT Bold", "Helvetica Rounded", Arial, sans-serif',"Avant Garde":'"Avant Garde", Avantgarde, "Century Gothic", CenturyGothic, "AppleGothic", sans-serif',Baskerville:'Baskerville, "Baskerville Old Face", "Hoefler Text", Garamond, "Times New Roman", serif',"Big Caslon":'"Big Caslon", "Book Antiqua", "Palatino Linotype", Georgia, serif',"Bodoni MT":'"Bodoni MT", Didot, "Didot LT STD", "Hoefler Text", Garamond, "Times New Roman", serif',"Book Antiqua":'"Book Antiqua", Palatino, "Palatino Linotype", "Palatino LT STD", Georgia, serif',"Brush Script MT":'"Brush Script MT", cursive',Calibri:'Calibri, Candara, Segoe, "Segoe UI", Optima, Arial, sans-serif',"Calisto MT":'"Calisto MT", "Bookman Old Style", Bookman, "Goudy Old Style", Garamond, "Hoefler Text", "Bitstream Charter", Georgia, serif',Cambrio:"Cambria, Georgia, serif",Candara:'Candara, Calibri, Segoe, "Segoe UI", Optima, Arial, sans-serif',"Century Gothic":'"Century Gothic", CenturyGothic, AppleGothic, sans-serif',Consolas:"Consolas, monaco, monospace",Copperplate:'Copperplate, "Copperplate Gothic Light", fantasy',"Courier New":'"Courier New", Courier, "Lucida Sans Typewriter", "Lucida Typewriter", monospace',Didot:'Didot, "Didot LT STD", "Hoefler Text", Garamond, "Times New Roman", serif',"Franklin Gothic Medium":'"Franklin Gothic Medium", "Franklin Gothic", "ITC Franklin Gothic", Arial, sans-serif',Futura:'Futura, "Trebuchet MS", Arial, sans-serif',Garamond:'Garamond, Baskerville, "Baskerville Old Face", "Hoefler Text", "Times New Roman", serif',Geneva:"Geneva, Tahoma, Verdana, sans-serif",Georgia:'Georgia, Times, "Times New Roman", serif',"Gill Sans":'"Gill Sans", "Gill Sans MT", Calibri, sans-serif',"Goudy Old Style":'"Goudy Old Style", Garamond, "Big Caslon", "Times New Roman", serif',Helvetica:'"Helvetica Neue", Helvetica, Arial, sans-serif',"Hoefler Text":'"Hoefler Text", "Baskerville old face", Garamond, "Times New Roman", serif',Impact:'Impact, Haettenschweiler, "Franklin Gothic Bold", Charcoal, "Helvetica Inserat", "Bitstream Vera Sans Bold", "Arial Black", sans serif',"Lucida Bright":'"Lucida Bright", Georgia, serif',"Lucida Console":'"Lucida Console", "Lucida Sans Typewriter", Monaco, "Bitstream Vera Sans Mono", monospace',"Lucida Sans Typewriter":'"Lucida Sans Typewriter", "Lucida Console", Monaco, "Bitstream Vera Sans Mono", monospace',"Lucida Grande":'"Lucida Grande", "Lucida Sans Unicode", "Lucida Sans", Geneva, Verdana, sans-serif',Monaco:'Monaco, Consolas, "Lucida Console", monospace',Optima:'Optima, Segoe, "Segoe UI", Candara, Calibri, Arial, sans-serif',Palatino:'Palatino, "Palatino Linotype", "Palatino LT STD", "Book Antiqua", Georgia, serif',Papyrus:"Papyrus, fantasy",Perpetua:'Perpetua, Baskerville, "Big Caslon", "Palatino Linotype", Palatino, "URW Palladio L", "Nimbus Roman No9 L", serif',Rockwell:'Rockwell, "Courier Bold", Courier, Georgia, Times, "Times New Roman", serif',"Rockwell Extra Bold":'"Rockwell Extra Bold", "Rockwell Bold", monospace',"Segoe UI":'"Segoe UI", Frutiger, "Frutiger Linotype',Tahoma:"Tahoma, Verdana, Segoe, sans-serif","Times New Roman":'TimesNewRoman, "Times New Roman", Times, Baskerville, Georgia, serif',"Trebuchet MS":'"Trebuchet MS", "Lucida Grande", "Lucida Sans Unicode", "Lucida Sans", Tahoma, sans-serif',Verdana:"Verdana, Geneva, sans-serif"},BFHFontSizesList={8:"8px",9:"9px",10:"10px",11:"11px",12:"12px",14:"14px",16:"16px",18:"18px",20:"20px",24:"24px",28:"28px",36:"36px",48:"48px"},BFHGoogleFontsList={kind:"webfonts#webfontList",items:[{kind:"webfonts#webfont",family:"ABeeZee",variants:["regular","italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Abel",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Abril Fatface",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Aclonica",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Acme",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Actor",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Adamina",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Advent Pro",variants:["100","200","300","regular","500","600","700"],subsets:["latin-ext","latin","greek"]},{kind:"webfonts#webfont",family:"Aguafina Script",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Akronim",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Aladin",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Aldrich",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Alegreya",variants:["regular","italic","700","700italic","900","900italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Alegreya SC",variants:["regular","italic","700","700italic","900","900italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Alex Brush",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Alfa Slab One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Alice",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Alike",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Alike Angular",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Allan",variants:["regular","700"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Allerta",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Allerta Stencil",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Allura",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Almendra",variants:["regular","italic","700","700italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Almendra Display",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Almendra SC",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Amarante",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Amaranth",variants:["regular","italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Amatic SC",variants:["regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Amethysta",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Anaheim",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Andada",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Andika",variants:["regular"],subsets:["cyrillic","latin-ext","latin","cyrillic-ext"]},{kind:"webfonts#webfont",family:"Angkor",variants:["regular"],subsets:["khmer"]},{kind:"webfonts#webfont",family:"Annie Use Your Telescope",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Anonymous Pro",variants:["regular","italic","700","700italic"],subsets:["cyrillic","greek-ext","latin-ext","latin","greek","cyrillic-ext"]},{kind:"webfonts#webfont",family:"Antic",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Antic Didone",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Antic Slab",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Anton",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Arapey",variants:["regular","italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Arbutus",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Arbutus Slab",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Architects Daughter",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Archivo Black",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Archivo Narrow",variants:["regular","italic","700","700italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Arimo",variants:["regular","italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Arizonia",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Armata",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Artifika",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Arvo",variants:["regular","italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Asap",variants:["regular","italic","700","700italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Asset",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Astloch",variants:["regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Asul",variants:["regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Atomic Age",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Aubrey",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Audiowide",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Autour One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Average",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Average Sans",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Averia Gruesa Libre",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Averia Libre",variants:["300","300italic","regular","italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Averia Sans Libre",variants:["300","300italic","regular","italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Averia Serif Libre",variants:["300","300italic","regular","italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Bad Script",variants:["regular"],subsets:["cyrillic","latin"]},{kind:"webfonts#webfont",family:"Balthazar",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Bangers",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Basic",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Battambang",variants:["regular","700"],subsets:["khmer"]},{kind:"webfonts#webfont",family:"Baumans",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Bayon",variants:["regular"],subsets:["khmer"]},{kind:"webfonts#webfont",family:"Belgrano",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Belleza",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"BenchNine",variants:["300","regular","700"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Bentham",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Berkshire Swash",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Bevan",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Bigelow Rules",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Bigshot One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Bilbo",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Bilbo Swash Caps",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Bitter",variants:["regular","italic","700"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Black Ops One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Bokor",variants:["regular"],subsets:["khmer"]},{kind:"webfonts#webfont",family:"Bonbon",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Boogaloo",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Bowlby One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Bowlby One SC",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Brawler",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Bree Serif",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Bubblegum Sans",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Bubbler One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Buda",variants:["300"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Buenard",variants:["regular","700"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Butcherman",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Butterfly Kids",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Cabin",variants:["regular","italic","500","500italic","600","600italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Cabin Condensed",variants:["regular","500","600","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Cabin Sketch",variants:["regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Caesar Dressing",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Cagliostro",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Calligraffitti",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Cambo",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Candal",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Cantarell",variants:["regular","italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Cantata One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Cantora One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Capriola",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Cardo",variants:["regular","italic","700"],subsets:["greek-ext","latin-ext","latin","greek"]},{kind:"webfonts#webfont",family:"Carme",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Carrois Gothic",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Carrois Gothic SC",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Carter One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Caudex",variants:["regular","italic","700","700italic"],subsets:["greek-ext","latin-ext","latin","greek"]},{kind:"webfonts#webfont",family:"Cedarville Cursive",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Ceviche One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Changa One",variants:["regular","italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Chango",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Chau Philomene One",variants:["regular","italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Chela One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Chelsea Market",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Chenla",variants:["regular"],subsets:["khmer"]},{kind:"webfonts#webfont",family:"Cherry Cream Soda",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Cherry Swash",variants:["regular","700"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Chewy",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Chicle",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Chivo",variants:["regular","italic","900","900italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Cinzel",variants:["regular","700","900"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Cinzel Decorative",variants:["regular","700","900"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Clicker Script",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Coda",variants:["regular","800"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Coda Caption",variants:["800"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Codystar",variants:["300","regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Combo",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Comfortaa",variants:["300","regular","700"],subsets:["cyrillic","latin-ext","latin","greek","cyrillic-ext"]},{kind:"webfonts#webfont",family:"Coming Soon",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Concert One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Condiment",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Content",variants:["regular","700"],subsets:["khmer"]},{kind:"webfonts#webfont",family:"Contrail One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Convergence",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Cookie",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Copse",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Corben",variants:["regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Courgette",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Cousine",variants:["regular","italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Coustard",variants:["regular","900"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Covered By Your Grace",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Crafty Girls",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Creepster",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Crete Round",variants:["regular","italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Crimson Text",variants:["regular","italic","600","600italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Croissant One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Crushed",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Cuprum",variants:["regular","italic","700","700italic"],subsets:["cyrillic","latin-ext","latin"]},{kind:"webfonts#webfont",family:"Cutive",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Cutive Mono",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Damion",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Dancing Script",variants:["regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Dangrek",variants:["regular"],subsets:["khmer"]},{kind:"webfonts#webfont",family:"Dawning of a New Day",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Days One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Delius",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Delius Swash Caps",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Delius Unicase",variants:["regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Della Respira",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Devonshire",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Didact Gothic",variants:["regular"],subsets:["cyrillic","greek-ext","latin-ext","latin","greek","cyrillic-ext"]},{kind:"webfonts#webfont",family:"Diplomata",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Diplomata SC",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Doppio One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Dorsa",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Dosis",variants:["200","300","regular","500","600","700","800"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Dr Sugiyama",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Droid Sans",variants:["regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Droid Sans Mono",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Droid Serif",variants:["regular","italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Duru Sans",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Dynalight",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"EB Garamond",variants:["regular"],subsets:["cyrillic","latin-ext","latin","vietnamese","cyrillic-ext"]},{kind:"webfonts#webfont",family:"Eagle Lake",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Eater",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Economica",variants:["regular","italic","700","700italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Electrolize",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Emblema One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Emilys Candy",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Engagement",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Englebert",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Enriqueta",variants:["regular","700"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Erica One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Esteban",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Euphoria Script",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Ewert",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Exo",variants:["100","100italic","200","200italic","300","300italic","regular","italic","500","500italic","600","600italic","700","700italic","800","800italic","900","900italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Expletus Sans",variants:["regular","italic","500","500italic","600","600italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Fanwood Text",variants:["regular","italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Fascinate",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Fascinate Inline",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Faster One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Fasthand",variants:["regular"],subsets:["khmer"]},{kind:"webfonts#webfont",family:"Federant",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Federo",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Felipa",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Fenix",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Finger Paint",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Fjord One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Flamenco",variants:["300","regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Flavors",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Fondamento",variants:["regular","italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Fontdiner Swanky",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Forum",variants:["regular"],subsets:["cyrillic","latin-ext","latin","cyrillic-ext"]},{kind:"webfonts#webfont",family:"Francois One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Freckle Face",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Fredericka the Great",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Fredoka One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Freehand",variants:["regular"],subsets:["khmer"]},{kind:"webfonts#webfont",family:"Fresca",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Frijole",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Fugaz One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"GFS Didot",variants:["regular"],subsets:["greek"]},{kind:"webfonts#webfont",family:"GFS Neohellenic",variants:["regular","italic","700","700italic"],subsets:["greek"]},{kind:"webfonts#webfont",family:"Gafata",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Galdeano",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Galindo",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Gentium Basic",variants:["regular","italic","700","700italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Gentium Book Basic",variants:["regular","italic","700","700italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Geo",variants:["regular","italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Geostar",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Geostar Fill",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Germania One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Gilda Display",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Give You Glory",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Glass Antiqua",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Glegoo",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Gloria Hallelujah",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Goblin One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Gochi Hand",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Gorditas",variants:["regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Goudy Bookletter 1911",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Graduate",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Gravitas One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Great Vibes",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Griffy",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Gruppo",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Gudea",variants:["regular","italic","700"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Habibi",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Hammersmith One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Hanalei",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Hanalei Fill",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Handlee",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Hanuman",variants:["regular","700"],subsets:["khmer"]},{kind:"webfonts#webfont",family:"Happy Monkey",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Headland One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Henny Penny",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Herr Von Muellerhoff",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Holtwood One SC",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Homemade Apple",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Homenaje",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"IM Fell DW Pica",variants:["regular","italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"IM Fell DW Pica SC",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"IM Fell Double Pica",variants:["regular","italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"IM Fell Double Pica SC",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"IM Fell English",variants:["regular","italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"IM Fell English SC",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"IM Fell French Canon",variants:["regular","italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"IM Fell French Canon SC",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"IM Fell Great Primer",variants:["regular","italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"IM Fell Great Primer SC",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Iceberg",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Iceland",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Imprima",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Inconsolata",variants:["regular","700"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Inder",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Indie Flower",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Inika",variants:["regular","700"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Irish Grover",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Istok Web",variants:["regular","italic","700","700italic"],subsets:["cyrillic","latin-ext","latin","cyrillic-ext"]},{kind:"webfonts#webfont",family:"Italiana",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Italianno",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Jacques Francois",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Jacques Francois Shadow",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Jim Nightshade",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Jockey One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Jolly Lodger",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Josefin Sans",variants:["100","100italic","300","300italic","regular","italic","600","600italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Josefin Slab",variants:["100","100italic","300","300italic","regular","italic","600","600italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Joti One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Judson",variants:["regular","italic","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Julee",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Julius Sans One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Junge",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Jura",variants:["300","regular","500","600"],subsets:["cyrillic","greek-ext","latin-ext","latin","greek","cyrillic-ext"]},{kind:"webfonts#webfont",family:"Just Another Hand",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Just Me Again Down Here",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Kameron",variants:["regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Karla",variants:["regular","italic","700","700italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Kaushan Script",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Keania One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Kelly Slab",variants:["regular"],subsets:["cyrillic","latin-ext","latin"]},{kind:"webfonts#webfont",family:"Kenia",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Khmer",variants:["regular"],subsets:["khmer"]},{kind:"webfonts#webfont",family:"Kite One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Knewave",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Kotta One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Koulen",variants:["regular"],subsets:["khmer"]},{kind:"webfonts#webfont",family:"Kranky",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Kreon",variants:["300","regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Kristi",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Krona One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"La Belle Aurore",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Lancelot",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Lato",variants:["100","100italic","300","300italic","regular","italic","700","700italic","900","900italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"League Script",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Leckerli One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Ledger",variants:["regular"],subsets:["cyrillic","latin-ext","latin"]},{kind:"webfonts#webfont",family:"Lekton",variants:["regular","italic","700"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Lemon",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Life Savers",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Lilita One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Limelight",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Linden Hill",variants:["regular","italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Lobster",variants:["regular"],subsets:["cyrillic","latin-ext","latin","cyrillic-ext"]},{kind:"webfonts#webfont",family:"Lobster Two",variants:["regular","italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Londrina Outline",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Londrina Shadow",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Londrina Sketch",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Londrina Solid",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Lora",variants:["regular","italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Love Ya Like A Sister",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Loved by the King",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Lovers Quarrel",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Luckiest Guy",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Lusitana",variants:["regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Lustria",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Macondo",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Macondo Swash Caps",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Magra",variants:["regular","700"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Maiden Orange",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Mako",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Marcellus",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Marcellus SC",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Marck Script",variants:["regular"],subsets:["cyrillic","latin-ext","latin"]},{kind:"webfonts#webfont",family:"Margarine",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Marko One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Marmelad",variants:["regular"],subsets:["cyrillic","latin-ext","latin"]},{kind:"webfonts#webfont",family:"Marvel",variants:["regular","italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Mate",variants:["regular","italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Mate SC",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Maven Pro",variants:["regular","500","700","900"],subsets:["latin"]},{kind:"webfonts#webfont",family:"McLaren",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Meddon",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"MedievalSharp",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Medula One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Megrim",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Meie Script",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Merienda",variants:["regular","700"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Merienda One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Merriweather",variants:["300","regular","700","900"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Metal",variants:["regular"],subsets:["khmer"]},{kind:"webfonts#webfont",family:"Metal Mania",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Metamorphous",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Metrophobic",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Michroma",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Miltonian",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Miltonian Tattoo",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Miniver",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Miss Fajardose",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Modern Antiqua",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Molengo",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Molle",variants:["italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Monofett",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Monoton",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Monsieur La Doulaise",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Montaga",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Montez",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Montserrat",variants:["regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Montserrat Alternates",variants:["regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Montserrat Subrayada",variants:["regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Moul",variants:["regular"],subsets:["khmer"]},{kind:"webfonts#webfont",family:"Moulpali",variants:["regular"],subsets:["khmer"]},{kind:"webfonts#webfont",family:"Mountains of Christmas",variants:["regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Mouse Memoirs",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Mr Bedfort",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Mr Dafoe",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Mr De Haviland",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Mrs Saint Delafield",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Mrs Sheppards",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Muli",variants:["300","300italic","regular","italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Mystery Quest",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Neucha",variants:["regular"],subsets:["cyrillic","latin"]},{kind:"webfonts#webfont",family:"Neuton",variants:["200","300","regular","italic","700","800"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"News Cycle",variants:["regular","700"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Niconne",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Nixie One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Nobile",variants:["regular","italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Nokora",variants:["regular","700"],subsets:["khmer"]},{kind:"webfonts#webfont",family:"Norican",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Nosifer",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Nothing You Could Do",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Noticia Text",variants:["regular","italic","700","700italic"],subsets:["latin-ext","latin","vietnamese"]},{kind:"webfonts#webfont",family:"Nova Cut",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Nova Flat",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Nova Mono",variants:["regular"],subsets:["latin","greek"]},{kind:"webfonts#webfont",family:"Nova Oval",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Nova Round",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Nova Script",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Nova Slim",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Nova Square",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Numans",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Nunito",variants:["300","regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Odor Mean Chey",variants:["regular"],subsets:["khmer"]},{kind:"webfonts#webfont",family:"Offside",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Old Standard TT",variants:["regular","italic","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Oldenburg",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Oleo Script",variants:["regular","700"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Oleo Script Swash Caps",variants:["regular","700"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Open Sans",variants:["300","300italic","regular","italic","600","600italic","700","700italic","800","800italic"],subsets:["cyrillic","greek-ext","latin-ext","latin","vietnamese","greek","cyrillic-ext"]},{kind:"webfonts#webfont",family:"Open Sans Condensed",variants:["300","300italic","700"],subsets:["cyrillic","greek-ext","latin-ext","latin","vietnamese","greek","cyrillic-ext"]},{kind:"webfonts#webfont",family:"Oranienbaum",variants:["regular"],subsets:["cyrillic","latin-ext","latin","cyrillic-ext"]},{kind:"webfonts#webfont",family:"Orbitron",variants:["regular","500","700","900"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Oregano",variants:["regular","italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Orienta",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Original Surfer",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Oswald",variants:["300","regular","700"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Over the Rainbow",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Overlock",variants:["regular","italic","700","700italic","900","900italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Overlock SC",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Ovo",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Oxygen",variants:["300","regular","700"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Oxygen Mono",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"PT Mono",variants:["regular"],subsets:["cyrillic","latin-ext","latin","cyrillic-ext"]},{kind:"webfonts#webfont",family:"PT Sans",variants:["regular","italic","700","700italic"],subsets:["cyrillic","latin-ext","latin"]},{kind:"webfonts#webfont",family:"PT Sans Caption",variants:["regular","700"],subsets:["cyrillic","latin-ext","latin"]},{kind:"webfonts#webfont",family:"PT Sans Narrow",variants:["regular","700"],subsets:["cyrillic","latin-ext","latin"]},{kind:"webfonts#webfont",family:"PT Serif",variants:["regular","italic","700","700italic"],subsets:["cyrillic","latin"]},{kind:"webfonts#webfont",family:"PT Serif Caption",variants:["regular","italic"],subsets:["cyrillic","latin"]},{kind:"webfonts#webfont",family:"Pacifico",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Paprika",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Parisienne",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Passero One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Passion One",variants:["regular","700","900"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Patrick Hand",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Patua One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Paytone One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Peralta",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Permanent Marker",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Petit Formal Script",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Petrona",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Philosopher",variants:["regular","italic","700","700italic"],subsets:["cyrillic","latin"]},{kind:"webfonts#webfont",family:"Piedra",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Pinyon Script",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Pirata One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Plaster",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Play",variants:["regular","700"],subsets:["cyrillic","greek-ext","latin-ext","latin","greek","cyrillic-ext"]},{kind:"webfonts#webfont",family:"Playball",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Playfair Display",variants:["regular","italic","700","700italic","900","900italic"],subsets:["cyrillic","latin-ext","latin"]},{kind:"webfonts#webfont",family:"Playfair Display SC",variants:["regular","italic","700","700italic","900","900italic"],subsets:["cyrillic","latin-ext","latin"]},{kind:"webfonts#webfont",family:"Podkova",variants:["regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Poiret One",variants:["regular"],subsets:["cyrillic","latin-ext","latin"]},{kind:"webfonts#webfont",family:"Poller One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Poly",variants:["regular","italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Pompiere",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Pontano Sans",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Port Lligat Sans",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Port Lligat Slab",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Prata",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Preahvihear",variants:["regular"],subsets:["khmer"]},{kind:"webfonts#webfont",family:"Press Start 2P",variants:["regular"],subsets:["cyrillic","latin-ext","latin","greek"]},{kind:"webfonts#webfont",family:"Princess Sofia",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Prociono",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Prosto One",variants:["regular"],subsets:["cyrillic","latin-ext","latin"]},{kind:"webfonts#webfont",family:"Puritan",variants:["regular","italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Purple Purse",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Quando",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Quantico",variants:["regular","italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Quattrocento",variants:["regular","700"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Quattrocento Sans",variants:["regular","italic","700","700italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Questrial",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Quicksand",variants:["300","regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Quintessential",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Qwigley",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Racing Sans One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Radley",variants:["regular","italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Raleway",variants:["100","200","300","regular","500","600","700","800","900"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Raleway Dots",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Rambla",variants:["regular","italic","700","700italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Rammetto One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Ranchers",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Rancho",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Rationale",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Redressed",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Reenie Beanie",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Revalia",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Ribeye",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Ribeye Marrow",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Righteous",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Risque",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Rochester",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Rock Salt",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Rokkitt",variants:["regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Romanesco",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Ropa Sans",variants:["regular","italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Rosario",variants:["regular","italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Rosarivo",variants:["regular","italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Rouge Script",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Ruda",variants:["regular","700","900"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Rufina",variants:["regular","700"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Ruge Boogie",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Ruluko",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Rum Raisin",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Ruslan Display",variants:["regular"],subsets:["cyrillic","latin-ext","latin","cyrillic-ext"]},{kind:"webfonts#webfont",family:"Russo One",variants:["regular"],subsets:["cyrillic","latin-ext","latin"]},{kind:"webfonts#webfont",family:"Ruthie",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Rye",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Sacramento",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Sail",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Salsa",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Sanchez",variants:["regular","italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Sancreek",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Sansita One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Sarina",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Satisfy",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Scada",variants:["regular","italic","700","700italic"],subsets:["cyrillic","latin-ext","latin"]},{kind:"webfonts#webfont",family:"Schoolbell",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Seaweed Script",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Sevillana",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Seymour One",variants:["regular"],subsets:["cyrillic","latin-ext","latin"]},{kind:"webfonts#webfont",family:"Shadows Into Light",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Shadows Into Light Two",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Shanti",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Share",variants:["regular","italic","700","700italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Share Tech",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Share Tech Mono",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Shojumaru",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Short Stack",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Siemreap",variants:["regular"],subsets:["khmer"]},{kind:"webfonts#webfont",family:"Sigmar One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Signika",variants:["300","regular","600","700"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Signika Negative",variants:["300","regular","600","700"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Simonetta",variants:["regular","italic","900","900italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Sirin Stencil",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Six Caps",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Skranji",variants:["regular","700"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Slackey",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Smokum",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Smythe",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Sniglet",variants:["800"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Snippet",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Snowburst One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Sofadi One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Sofia",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Sonsie One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Sorts Mill Goudy",variants:["regular","italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Source Code Pro",variants:["200","300","regular","600","700","900"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Source Sans Pro",variants:["200","200italic","300","300italic","regular","italic","600","600italic","700","700italic","900","900italic"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Special Elite",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Spicy Rice",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Spinnaker",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Spirax",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Squada One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Stalemate",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Stalinist One",variants:["regular"],subsets:["cyrillic","latin-ext","latin"]},{kind:"webfonts#webfont",family:"Stardos Stencil",variants:["regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Stint Ultra Condensed",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Stint Ultra Expanded",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Stoke",variants:["300","regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Strait",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Sue Ellen Francisco",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Sunshiney",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Supermercado One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Suwannaphum",variants:["regular"],subsets:["khmer"]},{kind:"webfonts#webfont",family:"Swanky and Moo Moo",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Syncopate",variants:["regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Tangerine",variants:["regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Taprom",variants:["regular"],subsets:["khmer"]},{kind:"webfonts#webfont",family:"Telex",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Tenor Sans",variants:["regular"],subsets:["cyrillic","latin-ext","latin","cyrillic-ext"]},{kind:"webfonts#webfont",family:"Text Me One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"The Girl Next Door",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Tienne",variants:["regular","700","900"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Tinos",variants:["regular","italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Titan One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Titillium Web",variants:["200","200italic","300","300italic","regular","italic","600","600italic","700","700italic","900"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Trade Winds",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Trocchi",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Trochut",variants:["regular","italic","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Trykker",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Tulpen One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Ubuntu",variants:["300","300italic","regular","italic","500","500italic","700","700italic"],subsets:["cyrillic","greek-ext","latin-ext","latin","greek","cyrillic-ext"]},{kind:"webfonts#webfont",family:"Ubuntu Condensed",variants:["regular"],subsets:["cyrillic","greek-ext","latin-ext","latin","greek","cyrillic-ext"]},{kind:"webfonts#webfont",family:"Ubuntu Mono",variants:["regular","italic","700","700italic"],subsets:["cyrillic","greek-ext","latin-ext","latin","greek","cyrillic-ext"]},{kind:"webfonts#webfont",family:"Ultra",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Uncial Antiqua",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Underdog",variants:["regular"],subsets:["cyrillic","latin-ext","latin"]},{kind:"webfonts#webfont",family:"Unica One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"UnifrakturCook",variants:["700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"UnifrakturMaguntia",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Unkempt",variants:["regular","700"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Unlock",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Unna",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"VT323",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Vampiro One",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Varela",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Varela Round",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Vast Shadow",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Vibur",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Vidaloka",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Viga",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Voces",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Volkhov",variants:["regular","italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Vollkorn",variants:["regular","italic","700","700italic"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Voltaire",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Waiting for the Sunrise",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Wallpoet",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Walter Turncoat",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Warnes",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Wellfleet",variants:["regular"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Wire One",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Yanone Kaffeesatz",variants:["200","300","regular","700"],subsets:["latin-ext","latin"]},{kind:"webfonts#webfont",family:"Yellowtail",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Yeseva One",variants:["regular"],subsets:["cyrillic","latin-ext","latin"]},{kind:"webfonts#webfont",family:"Yesteryear",variants:["regular"],subsets:["latin"]},{kind:"webfonts#webfont",family:"Zeyada",variants:["regular"],subsets:["latin"]}]},BFHLanguagesList={om:"Afaan Oromoo",aa:"Afaraf",af:"Afrikaans",ak:"Akan",an:"aragonés",ig:"Asụsụ Igbo",gn:"Avañe'ẽ",ae:"avesta",ay:"aymar aru",az:"azərbaycan dili",id:"Bahasa Indonesia",ms:"bahasa Melayu",bm:"bamanankan",jv:"basa Jawa",su:"Basa Sunda",bi:"Bislama",bs:"bosanski jezik",br:"brezhoneg",ca:"català",ch:"Chamoru",ny:"chiCheŵa",sn:"chiShona",co:"corsu",cy:"Cymraeg",da:"dansk",se:"Davvisámegiella",de:"Deutsch",nv:"Diné bizaad",et:"eesti",na:"Ekakairũ Naoero",en:"English",es:"español",eo:"Esperanto",eu:"euskara",ee:"Eʋegbe",to:"faka Tonga",mg:"fiteny malagasy",fr:"français",fy:"Frysk",ff:"Fulfulde",fo:"føroyskt",ga:"Gaeilge",gv:"Gaelg",sm:"gagana fa'a Samoa",gl:"galego",sq:"gjuha shqipe",gd:"Gàidhlig",ki:"Gĩkũyũ",ha:"Hausa",ho:"Hiri Motu",hr:"hrvatski jezik",io:"Ido",rw:"Ikinyarwanda",rn:"Ikirundi",ia:"Interlingua",nd:"isiNdebele",nr:"isiNdebele",xh:"isiXhosa",zu:"isiZulu",it:"italiano",ik:"Iñupiaq",pl:"polski",mh:"Kajin M̧ajeļ",kl:"kalaallisut",kr:"Kanuri",kw:"Kernewek",kg:"KiKongo",sw:"Kiswahili",ht:"Kreyòl ayisyen",kj:"Kuanyama",ku:"Kurdî",la:"latine",lv:"latviešu valoda",lt:"lietuvių kalba",ro:"limba română",li:"Limburgs",ln:"Lingála",lg:"Luganda",lb:"Lëtzebuergesch",hu:"magyar",mt:"Malti",nl:"Nederlands",no:"Norsk",nb:"Norsk bokmål",nn:"Norsk nynorsk",uz:"O'zbek",oc:"occitan",ie:"Interlingue",hz:"Otjiherero",ng:"Owambo",pt:"português",ty:"Reo Tahiti",rm:"rumantsch grischun",qu:"Runa Simi",sc:"sardu",za:"Saɯ cueŋƅ",st:"Sesotho",tn:"Setswana",ss:"SiSwati",sl:"slovenski jezik",sk:"slovenčina",so:"Soomaaliga",fi:"suomi",sv:"Svenska",mi:"te reo Māori",vi:"Tiếng Việt",lu:"Tshiluba",ve:"Tshivenḓa",tw:"Twi",tk:"Türkmen",tr:"Türkçe",ug:"Uyƣurqə",vo:"Volapük",fj:"vosa Vakaviti",wa:"walon",tl:"Wikang Tagalog",wo:"Wollof",ts:"Xitsonga",yo:"Yorùbá",sg:"yângâ tî sängö",is:"Íslenska",cs:"čeština",el:"ελληνικά",av:"авар мацӀ",ab:"аҧсуа бызшәа",ba:"башҡорт теле",be:"беларуская мова",bg:"български език",os:"ирон æвзаг",kv:"коми кыв",ky:"Кыргызча",mk:"македонски јазик",mn:"монгол",ce:"нохчийн мотт",ru:"русский язык",sr:"српски језик",tt:"татар теле",tg:"тоҷикӣ",uk:"українська мова",cv:"чӑваш чӗлхи",cu:"ѩзыкъ словѣньскъ",kk:"қазақ тілі",hy:"Հայերեն",yi:"ייִדיש",he:"עברית",ur:"اردو",ar:"العربية",fa:"فارسی",ps:"پښتو",ks:"कश्मीरी",ne:"नेपाली",pi:"पाऴि",bh:"भोजपुरी",mr:"मराठी",sa:"संस्कृतम्",sd:"सिन्धी",hi:"हिन्दी",as:"অসমীয়া",bn:"বাংলা",pa:"ਪੰਜਾਬੀ",gu:"ગુજરાતી",or:"ଓଡ଼ିଆ",ta:"தமிழ்",te:"తెలుగు",kn:"ಕನ್ನಡ",ml:"മലയാളം",si:"සිංහල",th:"ไทย",lo:"ພາສາລາວ",bo:"བོད་ཡིག",dz:"རྫོང་ཁ",my:"ဗမာစာ",ka:"ქართული",ti:"ትግርኛ",am:"አማርኛ",iu:"ᐃᓄᒃᑎᑐᑦ",oj:"ᐊᓂᔑᓈᐯᒧᐎᓐ",cr:"ᓀᐦᐃᔭᐍᐏᐣ",km:"ខ្មែរ",zh:"中文 (Zhōngwén)",ja:"日本語 (にほんご)",ii:"ꆈꌠ꒿ Nuosuhxop",ko:"한국어 (韓國語)"},BFHPhoneFormatList={AF:"+93 0dd ddd dddd",AL:"+355 0dd ddd ddd",DZ:"+213 0ddd dd dd dd",AS:"+1 (ddd) ddd-dddd",AD:"+376 ddddddddd",AO:"+244 ddd ddd ddd",AI:"+1 (ddd) ddd-dddd",AQ:"+672 ddddddddd",AG:"+1 (ddd) ddd-dddd",AR:"+54 ddddddddd",AM:"+374 0dd dddddd",AW:"+297 ddd dddd",AU:"+61 ddd ddd ddd",AT:"+43 0dddd ddddddddd",AZ:"+994 ddddddddd",BH:"+973 ddddddddd",BD:"+880 ddddddddd",BB:"+1 ddddddddd",BY:"+375 ddddddddd",BE:"+32 ddddddddd",BZ:"+501 ddddddddd",BJ:"+229 ddddddddd",BM:"+1 (ddd) ddd-dddd",BT:"+975 ddddddddd",BO:"+591 ddddddddd",BA:"+387 ddddddddd",BW:"+267 ddddddddd",BV:"+0 ddddddddd",BR:"+55 ddddddddd",IO:"+0 ddddddddd",VG:"+1 (ddd) ddd-dddd",BN:"+673 ddddddddd",BG:"+359 ddddddddd",BF:"+226 ddddddddd",BI:"+257 ddddddddd",CI:"+225 ddddddddd",KH:"+855 ddddddddd",CM:"+237 ddddddddd",CA:"+1 (ddd) ddd-dddd",CV:"+238 ddddddddd",KY:"+1 (ddd) ddd-dddd",CF:"+236 ddddddddd",TD:"+235 ddddddddd",CL:"+56 ddddddddd",CN:"+86 ddddddddd",CX:"+61 ddddddddd",CC:"+61 ddddddddd",CO:"+57 ddddddddd",KM:"+269 ddddddddd",CG:"+242 ddddddddd",CK:"+682 ddddddddd",CR:"+506 ddddddddd",HR:"+385 ddddddddd",CU:"+53 ddddddddd",CY:"+357 ddddddddd",CZ:"+420 ddddddddd",CD:"+243 ddddddddd",DK:"+45 ddddddddd",DJ:"+253 ddddddddd",DM:"+1 (ddd) ddd-dddd",DO:"+1 (ddd) ddd-dddd",TL:"+670 ddddddddd",EC:"+593 ddddddddd",EG:"+20 ddddddddd",SV:"+503 ddddddddd",GQ:"+240 ddddddddd",ER:"+291 ddddddddd",EE:"+372 ddddddddd",ET:"+251 ddddddddd",FO:"+298 ddddddddd",FK:"+500 ddddddddd",FJ:"+679 ddddddddd",FI:"+358 ddddddddd",MK:"+389 ddddddddd",FR:"+33 d dd dd dd dd",GF:"+594 ddddddddd",PF:"+689 ddddddddd",TF:"+262 ddddddddd",GA:"+241 ddddddddd",GE:"+995 ddddddddd",DE:"+49 ddddddddd",GH:"+233 ddddddddd",GI:"+350 ddddddddd",GR:"+30 ddddddddd",GL:"+299 ddddddddd",GD:"+1 (ddd) ddd-dddd",GP:"+590 ddddddddd",GU:"+1 (ddd) ddd-dddd",GT:"+502 ddddddddd",GN:"+224 ddddddddd",GW:"+245 ddddddddd",GY:"+592 ddddddddd",HT:"+509 ddddddddd",HM:"+0 ddddddddd",HN:"+504 ddddddddd",HK:"+852 ddddddddd",HU:"+36 ddddddddd",IS:"+354 ddddddddd",IN:"+91 ddddddddd",ID:"+62 ddddddddd",IR:"+98 ddddddddd",IQ:"+964 ddddddddd",IE:"+353 ddddddddd",IL:"+972 ddddddddd",IT:"+39 ddddddddd",JM:"+1 (ddd) ddd-dddd",JP:"+81 ddddddddd",JO:"+962 ddddddddd",KZ:"+7 ddddddddd",KE:"+254 ddddddddd",KI:"+686 ddddddddd",KW:"+965 ddddddddd",KG:"+996 ddddddddd",LA:"+856 ddddddddd",LV:"+371 ddddddddd",LB:"+961 ddddddddd",LS:"+266 ddddddddd",LR:"+231 ddddddddd",LY:"+218 ddddddddd",LI:"+423 ddddddddd",LT:"+370 ddddddddd",LU:"+352 ddddddddd",MO:"+853 ddddddddd",MG:"+261 ddddddddd",MW:"+265 ddddddddd",MY:"+60 ddddddddd",MV:"+960 ddddddddd",ML:"+223 ddddddddd",MT:"+356 ddddddddd",MH:"+692 ddddddddd",MQ:"+596 ddddddddd",MR:"+222 ddddddddd",MU:"+230 ddddddddd",YT:"+262 ddddddddd",MX:"+52 ddddddddd",FM:"+691 ddddddddd",MD:"+373 ddddddddd",MC:"+377 ddddddddd",MN:"+976 ddddddddd",MS:"+1 (ddd) ddd-dddd",MA:"+212 ddddddddd",MZ:"+258 ddddddddd",MM:"+95 ddddddddd",NA:"+264 ddddddddd",NR:"+674 ddddddddd",NP:"+977 ddddddddd",NL:"+31 ddddddddd",AN:"+599 ddddddddd",NC:"+687 ddddddddd",NZ:"+64 ddddddddd",NI:"+505 ddddddddd",NE:"+227 ddddddddd",NG:"+234 ddddddddd",NU:"+683 ddddddddd",NF:"+672 ddddddddd",KP:"+850 ddddddddd",MP:"+1 (ddd) ddd-dddd",NO:"+47 ddddddddd",OM:"+968 ddddddddd",PK:"+92 ddddddddd",PW:"+680 ddddddddd",PA:"+507 ddddddddd",PG:"+675 ddddddddd",PY:"+595 ddddddddd",PE:"+51 ddddddddd",PH:"+63 ddddddddd",PN:"+870 ddddddddd",PL:"+48 ddddddddd",PT:"+351 ddddddddd",PR:"+1 (ddd) ddd-dddd",QA:"+974 ddddddddd",RE:"+262 ddddddddd",RO:"+40 ddddddddd",RU:"+7 ddddddddd",RW:"+250 ddddddddd",ST:"+239 ddddddddd",SH:"+290 ddddddddd",KN:"+1 (ddd) ddd-dddd",LC:"+1 (ddd) ddd-dddd",PM:"+508 ddddddddd",VC:"+1 (ddd) ddd-dddd",WS:"+685 ddddddddd",SM:"+378 ddddddddd",SA:"+966 ddddddddd",SN:"+221 ddddddddd",SC:"+248 ddddddddd",SL:"+232 ddddddddd",SG:"+65 ddddddddd",SK:"+421 ddddddddd",SI:"+386 ddddddddd",SB:"+677 ddddddddd",SO:"+252 ddddddddd",ZA:"+27 ddddddddd",GS:"+0 ddddddddd",KR:"+82 ddddddddd",ES:"+34 ddddddddd",LK:"+94 ddddddddd",SD:"+249 ddddddddd",SR:"+597 ddddddddd",SJ:"+0 ddddddddd",SZ:"+268 ddddddddd",SE:"+46 ddddddddd",CH:"+41 ddddddddd",SY:"+963 ddddddddd",TW:"+886 ddddddddd",TJ:"+992 ddddddddd",TZ:"+255 ddddddddd",TH:"+66 ddddddddd",BS:"+1 (ddd) ddd-dddd",GM:"+220 ddddddddd",TG:"+228 ddddddddd",TK:"+690 ddddddddd",TO:"+676 ddddddddd",TT:"+1 (ddd) ddd-dddd",TN:"+216 ddddddddd",TR:"+90 ddddddddd",TM:"+993 ddddddddd",TC:"+1 (ddd) ddd-dddd",TV:"+688 ddddddddd",VI:"+1 (ddd) ddd-dddd",UG:"+256 ddddddddd",UA:"+380 ddddddddd",AE:"+971 ddddddddd",GB:"+44 (ddd) dddd dddd",US:"+1 (ddd) ddd-dddd",UM:"+0 ddddddddd",UY:"+598 ddddddddd",UZ:"+998 ddddddddd",VU:"+678 ddddddddd",VA:"+39 ddddddddd",VE:"+58 ddddddddd",VN:"+84 ddddddddd",WF:"+681 ddddddddd",EH:"+0 ddddddddd",YE:"+967 ddddddddd",YU:"+0 ddddddddd",ZM:"+260 ddddddddd",ZW:"+263 ddddddddd"},BFHStatesList={AF:{1:{code:"BAL",name:"Balkh"},2:{code:"BAM",name:"Bamian"},3:{code:"BDG",name:"Badghis"},4:{code:"BDS",name:"Badakhshan"},5:{code:"BGL",name:"Baghlan"},6:{code:"FRA",name:"Farah"},7:{code:"FYB",name:"Faryab"},8:{code:"GHA",name:"Ghazni"},9:{code:"GHO",name:"Ghowr"},10:{code:"HEL",name:"Helmand"},11:{code:"HER",name:"Herat"},12:{code:"JOW",name:"Jowzjan"},13:{code:"KAB",name:"Kabul"},14:{code:"KAN",name:"Kandahar"},15:{code:"KAP",name:"Kapisa"},16:{code:"KDZ",name:"Kondoz"},17:{code:"KHO",name:"Khost"},18:{code:"KNR",name:"Konar"},19:{code:"LAG",name:"Laghman"},20:{code:"LOW",name:"Lowgar"},21:{code:"NAN",name:"Nangrahar"},22:{code:"NIM",name:"Nimruz"},23:{code:"NUR",name:"Nurestan"},24:{code:"ORU",name:"Oruzgan"},25:{code:"PAR",name:"Parwan"},26:{code:"PIA",name:"Paktia"},27:{code:"PKA",name:"Paktika"},28:{code:"SAM",name:"Samangan"},29:{code:"SAR",name:"Sar-e Pol"},30:{code:"TAK",name:"Takhar"},31:{code:"WAR",name:"Wardak"},32:{code:"ZAB",name:"Zabol"}},AL:{1:{code:"BR",name:"Berat"},2:{code:"BU",name:"Bulqize"},3:{code:"DI",name:"Diber"},4:{code:"DL",name:"Delvine"},5:{code:"DR",name:"Durres"},6:{code:"DV",name:"Devoll"},7:{code:"EL",name:"Elbasan"},8:{code:"ER",name:"Kolonje"},9:{code:"FR",name:"Fier"},10:{code:"GJ",name:"Gjirokaster"},11:{code:"GR",name:"Gramsh"},12:{code:"HA",name:"Has"},13:{code:"KA",name:"Kavaje"},14:{code:"KB",name:"Kurbin"},15:{code:"KC",name:"Kucove"},16:{code:"KO",name:"Korce"},17:{code:"KR",name:"Kruje"},18:{code:"KU",name:"Kukes"},19:{code:"LB",name:"Librazhd"},20:{code:"LE",name:"Lezhe"},21:{code:"LU",name:"Lushnje"},22:{code:"MK",name:"Mallakaster"},23:{code:"MM",name:"Malesi e Madhe"},24:{code:"MR",name:"Mirdite"},25:{code:"MT",name:"Mat"},26:{code:"PG",name:"Pogradec"},27:{code:"PQ",name:"Peqin"},28:{code:"PR",name:"Permet"},29:{code:"PU",name:"Puke"},30:{code:"SH",name:"Shkoder"},31:{code:"SK",name:"Skrapar"},32:{code:"SR",name:"Sarande"},33:{code:"TE",name:"Tepelene"},34:{code:"TP",name:"Tropoje"},35:{code:"TR",name:"Tirane"},36:{code:"VL",name:"Vlore"}},DZ:{1:{code:"ADE",name:"Ain Defla"},2:{code:"ADR",name:"Adrar"},3:{code:"ALG",name:"Alger"},4:{code:"ANN",name:"Annaba"},5:{code:"ATE",name:"Ain Temouchent"},6:{code:"BAT",name:"Batna"},7:{code:"BBA",name:"Bordj Bou Arreridj"},8:{code:"BEC",name:"Bechar"},9:{code:"BEJ",name:"Bejaia"},10:{code:"BIS",name:"Biskra"},11:{code:"BLI",name:"Blida"},12:{code:"BMD",name:"Boumerdes"},13:{code:"BOA",name:"Bouira"},14:{code:"CHL",name:"Chlef"},15:{code:"CON",name:"Constantine"},16:{code:"DJE",name:"Djelfa"},17:{code:"EBA",name:"El Bayadh"},18:{code:"EOU",name:"El Oued"},19:{code:"ETA",name:"El Tarf"},20:{code:"GHA",name:"Ghardaia"},21:{code:"GUE",name:"Guelma"},22:{code:"ILL",name:"Illizi"},23:{code:"JIJ",name:"Jijel"},24:{code:"KHE",name:"Khenchela"},25:{code:"LAG",name:"Laghouat"},26:{code:"MED",name:"Medea"},27:{code:"MIL",name:"Mila"},28:{code:"MOS",name:"Mostaganem"},29:{code:"MSI",name:"M'Sila"},30:{code:"MUA",name:"Muaskar"},31:{code:"NAA",name:"Naama"},32:{code:"OEB",name:"Oum el-Bouaghi"},33:{code:"ORA",name:"Oran"},34:{code:"OUA",name:"Ouargla"},35:{code:"REL",name:"Relizane"},36:{code:"SAH",name:"Souk Ahras"},37:{code:"SAI",name:"Saida"},38:{code:"SBA",name:"Sidi Bel Abbes"},39:{code:"SET",name:"Setif"},40:{code:"SKI",name:"Skikda"},41:{code:"TAM",name:"Tamanghasset"},42:{code:"TEB",name:"Tebessa"},43:{code:"TIA",name:"Tiaret"},44:{code:"TIN",name:"Tindouf"},45:{code:"TIP",name:"Tipaza"},46:{code:"TIS",name:"Tissemsilt"},47:{code:"TLE",name:"Tlemcen"},48:{code:"TOU",name:"Tizi Ouzou"}},AS:{1:{code:"E",name:"Eastern"},2:{code:"M",name:"Manu'a"},3:{code:"R",name:"Rose Island"},4:{code:"S",name:"Swains Island"},5:{code:"W",name:"Western"}},AD:{1:{code:"ALV",name:"Andorra la Vella"},2:{code:"CAN",name:"Canillo"},3:{code:"ENC",name:"Encamp"},4:{code:"ESE",name:"Escaldes-Engordany"},5:{code:"LMA",name:"La Massana"},6:{code:"ORD",name:"Ordino"},7:{code:"SJL",name:"Sant Julià de Lòria"}},AO:{1:{code:"BGO",name:"Bengo"},2:{code:"BGU",name:"Benguela"},3:{code:"BIE",name:"Bie"},4:{code:"CAB",name:"Cabinda"},5:{code:"CCU",name:"Cuando-Cubango"},6:{code:"CNO",name:"Cuanza Norte"},7:{code:"CUS",name:"Cuanza Sul"},8:{code:"CNN",name:"Cunene"},9:{code:"HUA",name:"Huambo"},10:{code:"HUI",name:"Huila"},11:{code:"LUA",name:"Luanda"},12:{code:"LNO",name:"Lunda Norte"},13:{code:"LSU",name:"Lunda Sul"},14:{code:"MAL",name:"Malange"},15:{code:"MOX",name:"Moxico"},16:{code:"NAM",name:"Namibe"},17:{code:"UIG",name:"Uige"},18:{code:"ZAI",name:"Zaire"}},AI:{1:{code:"ANG",name:"Anguillita"},2:{code:"ANG",name:"Anguila"},3:{code:"DOG",name:"Dog"},4:{code:"LIT",name:"Little Scrub"},5:{code:"PRI",name:"Prickly Pear"},6:{code:"SAN",name:"Sandy"},7:{code:"SCR",name:"Scrub"},8:{code:"SEA",name:"Seal"},9:{code:"SOM",name:"Sombrero"}},AQ:{1:{code:"ASG",name:"Saint George"},2:{code:"ASH",name:"Saint Philip"},3:{code:"ASJ",name:"Saint John"},4:{code:"ASL",name:"Saint Paul"},5:{code:"ASM",name:"Saint Mary"},6:{code:"ASR",name:"Saint Peter"},7:{code:"BAR",name:"Barbuda"},8:{code:"RED",name:"Redonda"}},AR:{1:{code:"AN",name:"Antartida e Islas del Atlantico"},2:{code:"BA",name:"Buenos Aires"},3:{code:"CA",name:"Catamarca"},4:{code:"CH",name:"Chaco"},5:{code:"CU",name:"Chubut"},6:{code:"CO",name:"Cordoba"},7:{code:"CR",name:"Corrientes"},8:{code:"CF",name:"Capital Federal"},9:{code:"ER",name:"Entre Rios"},10:{code:"FO",name:"Formosa"},11:{code:"JU",name:"Jujuy"},12:{code:"LP",name:"La Pampa"},13:{code:"LR",name:"La Rioja"},14:{code:"ME",name:"Mendoza"},15:{code:"MI",name:"Misiones"},16:{code:"NE",name:"Neuquen"},17:{code:"RN",name:"Rio Negro"},18:{code:"SA",name:"Salta"},19:{code:"SJ",name:"San Juan"},20:{code:"SL",name:"San Luis"},21:{code:"SC",name:"Santa Cruz"},22:{code:"SF",name:"Santa Fe"},23:{code:"SD",name:"Santiago del Estero"},24:{code:"TF",name:"Tierra del Fuego"},25:{code:"TU",name:"Tucuman"}},AM:{1:{code:"AGT",name:"Aragatsotn"},2:{code:"ARR",name:"Ararat"},3:{code:"ARM",name:"Armavir"},4:{code:"GEG",name:"Geghark 'unik'"},5:{code:"KOT",name:"Kotayk'"},6:{code:"LOR",name:"Lorri"},7:{code:"SHI",name:"Shirak"},8:{code:"SYU",name:"Syunik'"},9:{code:"TAV",name:"Tavush"},10:{code:"VAY",name:"Vayots' Dzor"},11:{code:"YER",name:"Yerevan"}},AW:{1:{code:"ARU",name:"Aruba"},2:{code:"DRU",name:"Druif Beach"},3:{code:"MAN",name:"Manchebo Beach"},4:{code:"NOO",name:"Noord"},5:{code:"ORA",name:"Oranjestad"},6:{code:"PAL",name:"Palm Beach"},7:{code:"ROO",name:"Rooi Thomas"},8:{code:"SIN",name:"Sint Nicolaas"},9:{code:"SIN",name:"Sint Nicolas"},10:{code:"WAY",name:"Wayaca"}},AU:{1:{code:"ACT",name:"Australian Capital Territory"},2:{code:"NSW",name:"New South Wales"},3:{code:"NT",name:"Northern Territory"},4:{code:"QLD",name:"Queensland"},5:{code:"SA",name:"South Australia"},6:{code:"TAS",name:"Tasmania"},7:{code:"VIC",name:"Victoria"},8:{code:"WA",name:"Western Australia"}},AT:{1:{code:"BUR",name:"Burgenland"},2:{code:"KAR",name:"Krnten"},3:{code:"NOS",name:"Niederöesterreich"},4:{code:"OOS",name:"Oberöesterreich"},5:{code:"SAL",name:"Salzburg"},6:{code:"STE",name:"Steiermark"},7:{code:"TIR",name:"Tirol"},8:{code:"VOR",name:"Vorarlberg"},9:{code:"WIE",name:"Wien"}},AZ:{1:{code:"AB",name:"Ali Bayramli"},2:{code:"ABS",name:"Abseron"},3:{code:"AGC",name:"AgcabAdi"},4:{code:"AGM",name:"Agdam"},5:{code:"AGS",name:"Agdas"},6:{code:"AGA",name:"Agstafa"},7:{code:"AGU",name:"Agsu"},8:{code:"AST",name:"Astara"},9:{code:"BA",name:"Baki"},10:{code:"BAB",name:"BabAk"},11:{code:"BAL",name:"BalakAn"},12:{code:"BAR",name:"BArdA"},13:{code:"BEY",name:"Beylaqan"},14:{code:"BIL",name:"Bilasuvar"},15:{code:"CAB",name:"Cabrayil"},16:{code:"CAL",name:"Calilabab"},17:{code:"CUL",name:"Culfa"},18:{code:"DAS",name:"Daskasan"},19:{code:"DAV",name:"Davaci"},20:{code:"FUZ",name:"Fuzuli"},21:{code:"GA",name:"Ganca"},22:{code:"GAD",name:"Gadabay"},23:{code:"GOR",name:"Goranboy"},24:{code:"GOY",name:"Goycay"},25:{code:"HAC",name:"Haciqabul"},26:{code:"IMI",name:"Imisli"},27:{code:"ISM",name:"Ismayilli"},28:{code:"KAL",name:"Kalbacar"},29:{code:"KUR",name:"Kurdamir"},30:{code:"LA",name:"Lankaran"},31:{code:"LAC",name:"Lacin"},32:{code:"LAN",name:"Lankaran"},33:{code:"LER",name:"Lerik"},34:{code:"MAS",name:"Masalli"},35:{code:"MI",name:"Mingacevir"},36:{code:"NA",name:"Naftalan"},37:{code:"NX",name:"Naxcivan"},38:{code:"NEF",name:"Neftcala"},39:{code:"OGU",name:"Oguz"},40:{code:"ORD",name:"Ordubad"},41:{code:"QAB",name:"Qabala"},42:{code:"QAX",name:"Qax"},43:{code:"QAZ",name:"Qazax"},44:{code:"QOB",name:"Qobustan"},45:{code:"QBA",name:"Quba"},46:{code:"QBI",name:"Qubadli"},47:{code:"QUS",name:"Qusar"},48:{code:"SA",name:"Saki"},49:{code:"SAT",name:"Saatli"},50:{code:"SAB",name:"Sabirabad"},51:{code:"SAD",name:"Sadarak"},52:{code:"SAH",name:"Sahbuz"},53:{code:"SAK",name:"Saki"},54:{code:"SAL",name:"Salyan"},55:{code:"SM",name:"Sumqayit"},56:{code:"SMI",name:"Samaxi"},57:{code:"SKR",name:"Samkir"},58:{code:"SMX",name:"Samux"},59:{code:"SAR",name:"Sarur"},60:{code:"SIY",name:"Siyazan"},61:{code:"SS",name:"Susa"},62:{code:"SUS",name:"Susa"},63:{code:"TAR",name:"Tartar"},64:{code:"TOV",name:"Tovuz"},65:{code:"UCA",name:"Ucar"},66:{code:"XA",name:"Xankandi"},67:{code:"XAC",name:"Xacmaz"},68:{code:"XAN",name:"Xanlar"},69:{code:"XIZ",name:"Xizi"},70:{code:"XCI",name:"Xocali"},71:{code:"XVD",name:"Xocavand"},72:{code:"YAR",name:"Yardimli"},73:{code:"YEV",name:"Yevlax"},74:{code:"ZAN",name:"Zangilan"},75:{code:"ZAQ",name:"Zaqatala"},76:{code:"ZAR",name:"Zardab"}},BS:{1:{code:"ACK",name:"Acklins"},2:{code:"BER",name:"Berry Islands"},3:{code:"BIM",name:"Bimini"},4:{code:"BLK",name:"Black Point"},5:{code:"CAT",name:"Cat Island"},6:{code:"CAB",name:"Central Abaco"},7:{code:"CAN",name:"Central Andros"},8:{code:"CEL",name:"Central Eleuthera"},9:{code:"FRE",name:"City of Freeport"},10:{code:"CRO",name:"Crooked Island"},11:{code:"EGB",name:"East Grand Bahama"},12:{code:"EXU",name:"Exuma"},13:{code:"GRD",name:"Grand Cay"},14:{code:"HAR",name:"Harbour Island"},15:{code:"HOP",name:"Hope Town"},16:{code:"INA",name:"Inagua"},17:{code:"LNG",name:"Long Island"},18:{code:"MAN",name:"Mangrove Cay"},19:{code:"MAY",name:"Mayaguana"},20:{code:"MOO",name:"Moore's Island"},21:{code:"NAB",name:"North Abaco"},22:{code:"NAN",name:"North Andros"},23:{code:"NEL",name:"North Eleuthera"},24:{code:"RAG",name:"Ragged Island"},25:{code:"RUM",name:"Rum Cay"},26:{code:"SAL",name:"San Salvador"},27:{code:"SAB",name:"South Abaco"},28:{code:"SAN",name:"South Andros"},29:{code:"SEL",name:"South Eleuthera"},30:{code:"SWE",name:"Spanish Wells"},31:{code:"WGB",name:"West Grand Bahama"}},BH:{1:{code:"CAP",name:"Capital"},2:{code:"CEN",name:"Central"},3:{code:"MUH",name:"Muharraq"},4:{code:"NOR",name:"Northern"},5:{code:"SOU",name:"Southern"}},BD:{1:{code:"BAR",name:"Barisal"},2:{code:"CHI",name:"Chittagong"},3:{code:"DHA",name:"Dhaka"},4:{code:"KHU",name:"Khulna"},5:{code:"RAJ",name:"Rajshahi"},6:{code:"SYL",name:"Sylhet"}},BB:{1:{code:"CC",name:"Christ Church"},2:{code:"AND",name:"Saint Andrew"},3:{code:"GEO",name:"Saint George"},4:{code:"JAM",name:"Saint James"},5:{code:"JOH",name:"Saint John"},6:{code:"JOS",name:"Saint Joseph"},7:{code:"LUC",name:"Saint Lucy"},8:{code:"MIC",name:"Saint Michael"},9:{code:"PET",name:"Saint Peter"},10:{code:"PHI",name:"Saint Philip"},11:{code:"THO",name:"Saint Thomas"}},BY:{1:{code:"BR",name:"Brestskaya (Brest)"},2:{code:"HO",name:"Homyel'skaya (Homyel')"},3:{code:"HM",name:"Horad Minsk"},4:{code:"HR",name:"Hrodzyenskaya (Hrodna)"},5:{code:"MA",name:"Mahilyowskaya (Mahilyow)"},6:{code:"MI",name:"Minskaya"},7:{code:"VI",name:"Vitsyebskaya (Vitsyebsk)"}},BE:{1:{code:"VAN",name:"Antwerpen"},2:{code:"WBR",name:"Brabant Wallon"},3:{code:"WHT",name:"Hainaut"},4:{code:"WLG",name:"Liege"},5:{code:"VLI",name:"Limburg"},6:{code:"WLX",name:"Luxembourg"},7:{code:"WNA",name:"Namur"},8:{code:"VOV",name:"Oost-Vlaanderen"},9:{code:"VBR",name:"Vlaams Brabant"},10:{code:"VWV",name:"West-Vlaanderen"}},BZ:{1:{code:"BZ",name:"Belize"},2:{code:"CY",name:"Cayo"},3:{code:"CR",name:"Corozal"},4:{code:"OW",name:"Orange Walk"},5:{code:"SC",name:"Stann Creek"},6:{code:"TO",name:"Toledo"}},BJ:{1:{code:"AL",name:"Alibori"},2:{code:"AK",name:"Atakora"},3:{code:"AQ",name:"Atlantique"},4:{code:"BO",name:"Borgou"},5:{code:"CO",name:"Collines"},6:{code:"DO",name:"Donga"},7:{code:"KO",name:"Kouffo"},8:{code:"LI",name:"Littoral"},9:{code:"MO",name:"Mono"},10:{code:"OU",name:"Oueme"},11:{code:"PL",name:"Plateau"},12:{code:"ZO",name:"Zou"}},BM:{1:{code:"DS",name:"Devonshire"},2:{code:"HC",name:"Hamilton City"},3:{code:"HA",name:"Hamilton"},4:{code:"PG",name:"Paget"},5:{code:"PB",name:"Pembroke"},6:{code:"GC",name:"Saint George City"},7:{code:"SG",name:"Saint George's"},8:{code:"SA",name:"Sandys"},9:{code:"SM",name:"Smith's"},10:{code:"SH",name:"Southampton"},11:{code:"WA",name:"Warwick"}},BT:{1:{code:"BUM",name:"Bumthang"},2:{code:"CHU",name:"Chukha"},3:{code:"DAG",name:"Dagana"},4:{code:"GAS",name:"Gasa"},5:{code:"HAA",name:"Haa"},6:{code:"LHU",name:"Lhuntse"},7:{code:"MON",name:"Mongar"},8:{code:"PAR",name:"Paro"},9:{code:"PEM",name:"Pemagatshel"},10:{code:"PUN",name:"Punakha"},11:{code:"SJO",name:"Samdrup Jongkhar"},12:{code:"SAT",name:"Samtse"},13:{code:"SAR",name:"Sarpang"},14:{code:"THI",name:"Thimphu"},15:{code:"TRG",name:"Trashigang"},16:{code:"TRY",name:"Trashiyangste"},17:{code:"TRO",name:"Trongsa"},18:{code:"TSI",name:"Tsirang"},19:{code:"WPH",name:"Wangdue Phodrang"},20:{code:"ZHE",name:"Zhemgang"}},BO:{1:{code:"BEN",name:"Beni"},2:{code:"CHU",name:"Chuquisaca"},3:{code:"COC",name:"Cochabamba"},4:{code:"LPZ",name:"La Paz"},5:{code:"ORU",name:"Oruro"},6:{code:"PAN",name:"Pando"},7:{code:"POT",name:"Potosi"},8:{code:"SCZ",name:"Santa Cruz"},9:{code:"TAR",name:"Tarija"}},BA:{1:{code:"BRO",name:"Brcko district"},2:{code:"FBP",name:"Bosanskopodrinjski Kanton"},3:{code:"FHN",name:"Hercegovacko-neretvanski Kanton"},4:{code:"FPO",name:"Posavski Kanton"},5:{code:"FSA",name:"Kanton Sarajevo"},6:{code:"FSB",name:"Srednjebosanski Kanton"},7:{code:"FTU",name:"Tuzlanski Kanton"},8:{code:"FUS",name:"Unsko-Sanski Kanton"},9:{code:"FZA",name:"Zapadnobosanska"},10:{code:"FZE",name:"Zenicko-Dobojski Kanton"},11:{code:"FZH",name:"Zapadnohercegovacka Zupanija"},12:{code:"SBI",name:"Bijeljina"},13:{code:"SBL",name:"Banja Luka"},14:{code:"SDO",name:"Doboj"},15:{code:"SFO",name:"Foca"},16:{code:"SSR",name:"Sarajevo-Romanija or Sokolac"},17:{code:"STR",name:"Trebinje"},18:{code:"SVL",name:"Vlasenica"}},BW:{1:{code:"CE",name:"Central"},2:{code:"GH",name:"Ghanzi"},3:{code:"KD",name:"Kgalagadi"},4:{code:"KT",name:"Kgatleng"},5:{code:"KW",name:"Kweneng"},6:{code:"NG",name:"Ngamiland"},7:{code:"NE",name:"North East"},8:{code:"NW",name:"North West"},9:{code:"SE",name:"South East"},10:{code:"SO",name:"Southern"}},BR:{1:{code:"AC",name:"Acre"},2:{code:"AL",name:"Alagoas"},3:{code:"AP",name:"Amapa"},4:{code:"AM",name:"Amazonas"},5:{code:"BA",name:"Bahia"},6:{code:"CE",name:"Ceara"},7:{code:"DF",name:"Distrito Federal"},8:{code:"ES",name:"Espirito Santo"},9:{code:"GO",name:"Goias"},10:{code:"MA",name:"Maranhao"},11:{code:"MT",name:"Mato Grosso"},12:{code:"MS",name:"Mato Grosso do Sul"},13:{code:"MG",name:"Minas Gerais"},14:{code:"PA",name:"Para"},15:{code:"PB",name:"Paraiba"},16:{code:"PR",name:"Parana"},17:{code:"PE",name:"Pernambuco"},18:{code:"PI",name:"Piaui"},19:{code:"RJ",name:"Rio de Janeiro"},20:{code:"RN",name:"Rio Grande do Norte"},21:{code:"RS",name:"Rio Grande do Sul"},22:{code:"RO",name:"Rondonia"},23:{code:"RR",name:"Roraima"},24:{code:"SC",name:"Santa Catarina"},25:{code:"SP",name:"Sao Paulo"},26:{code:"SE",name:"Sergipe"},27:{code:"TO",name:"Tocantins"}},IO:{1:{code:"DG",name:"Diego Garcia"},2:{code:"DI",name:"Danger Island"},3:{code:"EA",name:"Eagle Islands"},4:{code:"EG",name:"Egmont Islands"},5:{code:"NI",name:"Nelsons Island"},6:{code:"PB",name:"Peros Banhos"},7:{code:"SI",name:"Salomon Islands"},8:{code:"TB",name:"Three Brothers"}},BN:{1:{code:"BEL",name:"Belait"},2:{code:"BRM",name:"Brunei and Muara"},3:{code:"TEM",name:"Temburong"},4:{code:"TUT",name:"Tutong"}},BG:{1:{code:"BG-01",name:"Blagoevgrad"},2:{code:"BG-02",name:"Burgas"},3:{code:"BG-03",name:"Dobrich"},4:{code:"BG-04",name:"Gabrovo"},5:{code:"BG-05",name:"Haskovo"},6:{code:"BG-06",name:"Kardjali"},7:{code:"BG-07",name:"Kyustendil"},8:{code:"BG-08",name:"Lovech"},9:{code:"BG-09",name:"Montana"},10:{code:"BG-10",name:"Pazardjik"},11:{code:"BG-11",name:"Pernik"},12:{code:"BG-12",name:"Pleven"},13:{code:"BG-13",name:"Plovdiv"},14:{code:"BG-14",name:"Razgrad"},15:{code:"BG-15",name:"Shumen"},16:{code:"BG-16",name:"Silistra"},17:{code:"BG-17",name:"Sliven"},18:{code:"BG-18",name:"Smolyan"},19:{code:"BG-19",name:"Sofia"},20:{code:"BG-20",name:"Sofia - town"},21:{code:"BG-21",name:"Stara Zagora"},22:{code:"BG-22",name:"Targovishte"},23:{code:"BG-23",name:"Varna"},24:{code:"BG-24",name:"Veliko Tarnovo"},25:{code:"BG-25",name:"Vidin"},26:{code:"BG-26",name:"Vratza"},27:{code:"BG-27",name:"Yambol"}},BF:{1:{code:"BAL",name:"Bale"},2:{code:"BAM",name:"Bam"},3:{code:"BAN",name:"Banwa"},4:{code:"BAZ",name:"Bazega"},5:{code:"BOR",name:"Bougouriba"},6:{code:"BLG",name:"Boulgou"},7:{code:"BOK",name:"Boulkiemde"},8:{code:"COM",name:"Comoe"},9:{code:"GAN",name:"Ganzourgou"},10:{code:"GNA",name:"Gnagna"},11:{code:"GOU",name:"Gourma"},12:{code:"HOU",name:"Houet"},13:{code:"IOA",name:"Ioba"},14:{code:"KAD",name:"Kadiogo"},15:{code:"KEN",name:"Kenedougou"},16:{code:"KOD",name:"Komondjari"},17:{code:"KOP",name:"Kompienga"},18:{code:"KOS",name:"Kossi"},19:{code:"KOL",name:"Koulpelogo"},20:{code:"KOT",name:"Kouritenga"},21:{code:"KOW",name:"Kourweogo"},22:{code:"LER",name:"Leraba"},23:{code:"LOR",name:"Loroum"},24:{code:"MOU",name:"Mouhoun"},25:{code:"NAH",name:"Nahouri"},26:{code:"NAM",name:"Namentenga"},27:{code:"NAY",name:"Nayala"},28:{code:"NOU",name:"Noumbiel"},29:{code:"OUB",name:"Oubritenga"},30:{code:"OUD",name:"Oudalan"},31:{code:"PAS",name:"Passore"},32:{code:"PON",name:"Poni"},33:{code:"SAG",name:"Sanguie"},34:{code:"SAM",name:"Sanmatenga"},35:{code:"SEN",name:"Seno"},36:{code:"SIS",name:"Sissili"},37:{code:"SOM",name:"Soum"},38:{code:"SOR",name:"Sourou"},39:{code:"TAP",name:"Tapoa"},40:{code:"TUY",name:"Tuy"},41:{code:"YAG",name:"Yagha"},42:{code:"YAT",name:"Yatenga"},43:{code:"ZIR",name:"Ziro"},44:{code:"ZOD",name:"Zondoma"},45:{code:"ZOW",name:"Zoundweogo"}},BI:{1:{code:"BB",name:"Bubanza"},2:{code:"BJ",name:"Bujumbura"},3:{code:"BR",name:"Bururi"},4:{code:"CA",name:"Cankuzo"},5:{code:"CI",name:"Cibitoke"},6:{code:"GI",name:"Gitega"},7:{code:"KR",name:"Karuzi"},8:{code:"KY",name:"Kayanza"},9:{code:"KI",name:"Kirundo"},10:{code:"MA",name:"Makamba"},11:{code:"MU",name:"Muramvya"},12:{code:"MY",name:"Muyinga"},13:{code:"MW",name:"Mwaro"},14:{code:"NG",name:"Ngozi"},15:{code:"RT",name:"Rutana"},16:{code:"RY",name:"Ruyigi"}},KH:{1:{code:"BA",name:"Battambang"},2:{code:"BM",name:"Banteay Meanchey"},3:{code:"KB",name:"Keb"},4:{code:"KK",name:"Kaoh Kong"},5:{code:"KL",name:"Kandal"},6:{code:"KM",name:"Kampong Cham"},7:{code:"KN",name:"Kampong Chhnang"},8:{code:"KO",name:"Kampong Som"},9:{code:"KP",name:"Kampot"},10:{code:"KR",name:"Kratie"},11:{code:"KT",name:"Kampong Thom"},12:{code:"KU",name:"Kampong Speu"},13:{code:"MK",name:"Mondul Kiri"},14:{code:"OM",name:"Oddar Meancheay"},15:{code:"PA",name:"Pailin"},16:{code:"PG",name:"Prey Veng"},17:{code:"PP",name:"Phnom Penh"},18:{code:"PR",name:"Preah Vihear"},19:{code:"PS",name:"Preah Seihanu (Kompong Som or Si)"},20:{code:"PU",name:"Pursat"},21:{code:"RK",name:"Ratanak Kiri"},22:{code:"SI",name:"Siemreap"},23:{code:"SR",name:"Svay Rieng"},24:{code:"ST",name:"Stung Treng"},25:{code:"TK",name:"Takeo"}},CM:{1:{code:"ADA",name:"Adamawa (Adamaoua)"},2:{code:"CEN",name:"Centre"},3:{code:"EST",name:"East (Est)"},4:{code:"EXN",name:"Extrême-Nord"},5:{code:"LIT",name:"Littoral"},6:{code:"NOR",name:"North (Nord)"},7:{code:"NOT",name:"Northwest (Nord-Ouest)"},8:{code:"OUE",name:"West (Ouest)"},9:{code:"SUD",name:"South (Sud)"},10:{code:"SOU",name:"Southwest (Sud-Ouest)"}},CA:{1:{code:"AB",name:"Alberta"},2:{code:"BC",name:"British Columbia"},3:{code:"MB",name:"Manitoba"},4:{code:"NB",name:"New Brunswick"},5:{code:"NL",name:"Newfoundland and Labrador"},6:{code:"NT",name:"Northwest Territories"},7:{code:"NS",name:"Nova Scotia"},8:{code:"NU",name:"Nunavut"},9:{code:"ON",name:"Ontario"},10:{code:"PE",name:"Prince Edward Island"},11:{code:"QC",name:"Québec"},12:{code:"SK",name:"Saskatchewan"},13:{code:"YT",name:"Yukon Territory"}},CV:{1:{code:"BV",name:"Boa Vista"},2:{code:"BR",name:"Brava"},3:{code:"CS",name:"Calheta de Sao Miguel"},4:{code:"MA",name:"Maio"},5:{code:"MO",name:"Mosteiros"},6:{code:"PA",name:"Paul"},7:{code:"PN",name:"Porto Novo"},8:{code:"PR",name:"Praia"},9:{code:"RG",name:"Ribeira Grande"},10:{code:"SL",name:"Sal"},11:{code:"CA",name:"Santa Catarina"},12:{code:"CR",name:"Santa Cruz"},13:{code:"SD",name:"Sao Domingos"},14:{code:"SF",name:"Sao Filipe"},15:{code:"SN",name:"Sao Nicolau"},16:{code:"SV",name:"Sao Vicente"},17:{code:"TA",name:"Tarrafal"}},KY:{1:{code:"CR",name:"Creek"},2:{code:"EA",name:"Eastern"},3:{code:"ML",name:"Midland"},4:{code:"ST",name:"South Town"},5:{code:"SP",name:"Spot Bay"},6:{code:"SK",name:"Stake Bay"},7:{code:"WD",name:"West End"},8:{code:"WN",name:"Western"}},CF:{1:{code:"BAN",name:"Bangui"},2:{code:"BBA",name:"Bamingui-Bangoran"},3:{code:"BKO",name:"Basse-Kotto"},4:{code:"HKO",name:"Haute-Kotto"},5:{code:"HMB",name:"Haut-Mbomou"},6:{code:"KEM",name:"Kemo"},7:{code:"LOB",name:"Lobaye"},8:{code:"MBO",name:"Mbomou"},9:{code:"MKD",name:"Mambéré-Kadéï"},10:{code:"NGR",name:"Nana-Grebizi"},11:{code:"NMM",name:"Nana-Mambere"},12:{code:"OMP",name:"Ombella-M'Poko"},13:{code:"OPE",name:"Ouham-Pende"},14:{code:"OUH",name:"Ouham"},15:{code:"OUK",name:"Ouaka"},16:{code:"SMB",name:"Sangha-Mbaere"},17:{code:"VAK",name:"Vakaga"}},TD:{1:{code:"BA",name:"Batha"},2:{code:"BI",name:"Biltine"},3:{code:"BE",name:"Borkou-Ennedi-Tibesti"},4:{code:"CB",name:"Chari-Baguirmi"},5:{code:"GU",name:"Guera"},6:{code:"KA",name:"Kanem"},7:{code:"LA",name:"Lac"},8:{code:"LC",name:"Logone Occidental"},9:{code:"LR",name:"Logone Oriental"},10:{code:"MK",name:"Mayo-Kebbi"},11:{code:"MC",name:"Moyen-Chari"},12:{code:"OU",name:"Ouaddai"},13:{code:"SA",name:"Salamat"},14:{code:"TA",name:"Tandjile"}},CL:{1:{code:"AI",name:"Aisen del General Carlos Ibanez"},2:{code:"AN",name:"Antofagasta"},3:{code:"AR",name:"Araucania"},4:{code:"AT",name:"Atacama"},5:{code:"BI",name:"Bio-Bio"},6:{code:"CO",name:"Coquimbo"},7:{code:"LI",name:"Libertador General Bernardo O'Hi"},8:{code:"LL",name:"Los Lagos"},9:{code:"MA",name:"Magallanes y de la Antartica Chi"},10:{code:"ML",name:"Maule"},11:{code:"RM",name:"Region Metropolitana"},12:{code:"TA",name:"Tarapaca"},13:{code:"VS",name:"Valparaiso"}},CN:{1:{code:"AN",name:"Anhui"},2:{code:"BE",name:"Beijing"},3:{code:"CH",name:"Chongqing"},4:{code:"FU",name:"Fujian"},5:{code:"GA",name:"Gansu"},6:{code:"GU",name:"Guangdong"},7:{code:"GX",name:"Guangxi"},8:{code:"GZ",name:"Guizhou"},9:{code:"HA",name:"Hainan"},10:{code:"HB",name:"Hebei"},11:{code:"HL",name:"Heilongjiang"},12:{code:"HE",name:"Henan"},13:{code:"HK",name:"Hong Kong"},14:{code:"HU",name:"Hubei"},15:{code:"HN",name:"Hunan"},16:{code:"IM",name:"Inner Mongolia"},17:{code:"JI",name:"Jiangsu"},18:{code:"JX",name:"Jiangxi"},19:{code:"JL",name:"Jilin"},20:{code:"LI",name:"Liaoning"},21:{code:"MA",name:"Macau"},22:{code:"NI",name:"Ningxia"},23:{code:"SH",name:"Shaanxi"},24:{code:"SA",name:"Shandong"},25:{code:"SG",name:"Shanghai"},26:{code:"SX",name:"Shanxi"},27:{code:"SI",name:"Sichuan"},28:{code:"TI",name:"Tianjin"},29:{code:"XI",name:"Xinjiang"},30:{code:"YU",name:"Yunnan"},31:{code:"ZH",name:"Zhejiang"}},CC:{1:{code:"D",name:"Direction Island"},2:{code:"H",name:"Home Island"},3:{code:"O",name:"Horsburgh Island"},4:{code:"S",name:"South Island"},5:{code:"W",name:"West Island"}},CO:{1:{code:"AMZ",name:"Amazonas"},2:{code:"ANT",name:"Antioquia"},3:{code:"ARA",name:"Arauca"},4:{code:"ATL",name:"Atlantico"},5:{code:"BDC",name:"Bogota D.C."},6:{code:"BOL",name:"Bolivar"},7:{code:"BOY",name:"Boyaca"},8:{code:"CAL",name:"Caldas"},9:{code:"CAQ",name:"Caqueta"},10:{code:"CAS",name:"Casanare"},11:{code:"CAU",name:"Cauca"},12:{code:"CES",name:"Cesar"},13:{code:"CHO",name:"Choco"},14:{code:"COR",name:"Cordoba"},15:{code:"CAM",name:"Cundinamarca"},16:{code:"GNA",name:"Guainia"},17:{code:"GJR",name:"Guajira"},18:{code:"GVR",name:"Guaviare"},19:{code:"HUI",name:"Huila"},20:{code:"MAG",name:"Magdalena"},21:{code:"MET",name:"Meta"},22:{code:"NAR",name:"Narino"},23:{code:"NDS",name:"Norte de Santander"},24:{code:"PUT",name:"Putumayo"},25:{code:"QUI",name:"Quindio"},26:{code:"RIS",name:"Risaralda"},27:{code:"SAP",name:"San Andres y Providencia"},28:{code:"SAN",name:"Santander"},29:{code:"SUC",name:"Sucre"},30:{code:"TOL",name:"Tolima"},31:{code:"VDC",name:"Valle del Cauca"},32:{code:"VAU",name:"Vaupes"},33:{code:"VIC",name:"Vichada"}},KM:{1:{code:"G",name:"Grande Comore"},2:{code:"A",name:"Anjouan"},3:{code:"M",name:"Moheli"}},CG:{1:{code:"BO",name:"Bouenza"},2:{code:"BR",name:"Brazzaville"},3:{code:"CU",name:"Cuvette"},4:{code:"CO",name:"Cuvette-Ouest"},5:{code:"KO",name:"Kouilou"},6:{code:"LE",name:"Lekoumou"},7:{code:"LI",name:"Likouala"},8:{code:"NI",name:"Niari"},9:{code:"PL",name:"Plateaux"},10:{code:"PO",name:"Pool"},11:{code:"SA",name:"Sangha"}},CK:{1:{code:"AI",name:"Aitutaki"},2:{code:"AT",name:"Atiu"},3:{code:"MA",name:"Manuae"},4:{code:"MG",name:"Mangaia"},5:{code:"MK",name:"Manihiki"},6:{code:"MT",name:"Mitiaro"},7:{code:"MU",name:"Mauke"},8:{code:"NI",name:"Nassau Island"},9:{code:"PA",name:"Palmerston"},10:{code:"PE",name:"Penrhyn"},11:{code:"PU",name:"Pukapuka"},12:{code:"RK",name:"Rakahanga"},13:{code:"RR",name:"Rarotonga"},14:{code:"SU",name:"Surwarrow"},15:{code:"TA",name:"Takutea"}},CR:{1:{code:"AL",name:"Alajuela"},2:{code:"CA",name:"Cartago"},3:{code:"GU",name:"Guanacaste"},4:{code:"HE",name:"Heredia"},5:{code:"LI",name:"Limon"},6:{code:"PU",name:"Puntarenas"},7:{code:"SJ",name:"San Jose"}},CI:{1:{code:"ABE",name:"Abengourou"},2:{code:"ABI",name:"Abidjan"},3:{code:"ABO",name:"Aboisso"},4:{code:"ADI",name:"Adiake"},5:{code:"ADZ",name:"Adzope"},6:{code:"AGB",name:"Agboville"},7:{code:"AGN",name:"Agnibilekrou"},8:{code:"ALE",name:"Alepe"},9:{code:"BOC",name:"Bocanda"},10:{code:"BAN",name:"Bangolo"},11:{code:"BEO",name:"Beoumi"},12:{code:"BIA",name:"Biankouma"},13:{code:"BDK",name:"Bondoukou"},14:{code:"BGN",name:"Bongouanou"},15:{code:"BFL",name:"Bouafle"},16:{code:"BKE",name:"Bouake"},17:{code:"BNA",name:"Bouna"},18:{code:"BDL",name:"Boundiali"},19:{code:"DKL",name:"Dabakala"},20:{code:"DBU",name:"Dabou"},21:{code:"DAL",name:"Daloa"},22:{code:"DAN",name:"Danane"},23:{code:"DAO",name:"Daoukro"},24:{code:"DIM",name:"Dimbokro"},25:{code:"DIV",name:"Divo"},26:{code:"DUE",name:"Duekoue"},27:{code:"FER",name:"Ferkessedougou"},28:{code:"GAG",name:"Gagnoa"},29:{code:"GBA",name:"Grand-Bassam"},30:{code:"GLA",name:"Grand-Lahou"},31:{code:"GUI",name:"Guiglo"},32:{code:"ISS",name:"Issia"},33:{code:"JAC",name:"Jacqueville"},34:{code:"KAT",name:"Katiola"},35:{code:"KOR",name:"Korhogo"},36:{code:"LAK",name:"Lakota"},37:{code:"MAN",name:"Man"},38:{code:"MKN",name:"Mankono"},39:{code:"MBA",name:"Mbahiakro"},40:{code:"ODI",name:"Odienne"},41:{code:"OUM",name:"Oume"},42:{code:"SAK",name:"Sakassou"},43:{code:"SPE",name:"San-Pedro"},44:{code:"SAS",name:"Sassandra"},45:{code:"SEG",name:"Seguela"},46:{code:"SIN",name:"Sinfra"},47:{code:"SOU",name:"Soubre"},48:{code:"TAB",name:"Tabou"},49:{code:"TAN",name:"Tanda"},50:{code:"TIE",name:"Tiebissou"},51:{code:"TIN",name:"Tingrela"},52:{code:"TIA",name:"Tiassale"},53:{code:"TBA",name:"Touba"},54:{code:"TLP",name:"Toulepleu"},55:{code:"TMD",name:"Toumodi"},56:{code:"VAV",name:"Vavoua"},57:{code:"YAM",name:"Yamoussoukro"},58:{code:"ZUE",name:"Zuenoula"}},HR:{1:{code:"BB",name:"Bjelovar-Bilogora"},2:{code:"CZ",name:"City of Zagreb"},3:{code:"DN",name:"Dubrovnik-Neretva"},4:{code:"IS",name:"Istra"},5:{code:"KA",name:"Karlovac"},6:{code:"KK",name:"Koprivnica-Krizevci"},7:{code:"KZ",name:"Krapina-Zagorje"},8:{code:"LS",name:"Lika-Senj"},9:{code:"ME",name:"Medimurje"},10:{code:"OB",name:"Osijek-Baranja"},11:{code:"PS",name:"Pozega-Slavonia"},12:{code:"PG",name:"Primorje-Gorski Kotar"},13:{code:"SI",name:"Sibenik"},14:{code:"SM",name:"Sisak-Moslavina"},15:{code:"SB",name:"Slavonski Brod-Posavina"},16:{code:"SD",name:"Split-Dalmatia"},17:{code:"VA",name:"Varazdin"},18:{code:"VP",name:"Virovitica-Podravina"},19:{code:"VS",name:"Vukovar-Srijem"},20:{code:"ZK",name:"Zadar-Knin"},21:{code:"ZA",name:"Zagreb"}},CU:{1:{code:"CA",name:"Camaguey"},2:{code:"CD",name:"Ciego de Avila"},3:{code:"CI",name:"Cienfuegos"},4:{code:"CH",name:"Ciudad de La Habana"},5:{code:"GR",name:"Granma"},6:{code:"GU",name:"Guantanamo"},7:{code:"HO",name:"Holguin"},8:{code:"IJ",name:"Isla de la Juventud"},9:{code:"LH",name:"La Habana"},10:{code:"LT",name:"Las Tunas"},11:{code:"MA",name:"Matanzas"},12:{code:"PR",name:"Pinar del Rio"},13:{code:"SS",name:"Sancti Spiritus"},14:{code:"SC",name:"Santiago de Cuba"},15:{code:"VC",name:"Villa Clara"}},CY:{1:{code:"F",name:"Famagusta"},2:{code:"K",name:"Kyrenia"},3:{code:"A",name:"Larnaca"},4:{code:"I",name:"Limassol"},5:{code:"N",name:"Nicosia"},6:{code:"P",name:"Paphos"}},CZ:{1:{code:"A",name:"Hlavní město Praha"},2:{code:"B",name:"Jihomoravský"},3:{code:"C",name:"Jihočeský"},4:{code:"E",name:"Pardubický"},5:{code:"H",name:"Královéhradecký"},6:{code:"J",name:"Vysočina"},7:{code:"K",name:"Karlovarský"},8:{code:"L",name:"Liberecký"},9:{code:"M",name:"Olomoucký"},10:{code:"P",name:"Plzeňský"},11:{code:"S",name:"Středočeský"},12:{code:"T",name:"Moravskoslezský"},13:{code:"U",name:"Ústecký"},14:{code:"Z",name:"Zlínský"}},DK:{1:{code:"AR",name:"Arhus"},2:{code:"BH",name:"Bornholm"},3:{code:"CO",name:"Copenhagen"},4:{code:"FO",name:"Faroe Islands"},5:{code:"FR",name:"Frederiksborg"},6:{code:"FY",name:"Fyn"},7:{code:"KO",name:"Kobenhavn"},8:{code:"NO",name:"Nordjylland"},9:{code:"RI",name:"Ribe"},10:{code:"RK",name:"Ringkobing"},11:{code:"RO",name:"Roskilde"},12:{code:"SO",name:"Sonderjylland"},13:{code:"ST",name:"Storstrom"},14:{code:"VK",name:"Vejle"},15:{code:"VJ",name:"Vestjælland"},16:{code:"VB",name:"Viborg"}},DJ:{1:{code:"S",name:"'Ali Sabih"},2:{code:"K",name:"Dikhil"},3:{code:"J",name:"Djibouti"},4:{code:"O",name:"Obock"},5:{code:"T",name:"Tadjoura"}},DM:{1:{code:"AND",name:"Saint Andrew Parish"},2:{code:"DAV",name:"Saint David Parish"},3:{code:"GEO",name:"Saint George Parish"},4:{code:"JOH",name:"Saint John Parish"},5:{code:"JOS",name:"Saint Joseph Parish"},6:{code:"LUK",name:"Saint Luke Parish"},7:{code:"MAR",name:"Saint Mark Parish"},8:{code:"PAT",name:"Saint Patrick Parish"},9:{code:"PAU",name:"Saint Paul Parish"},10:{code:"PET",name:"Saint Peter Parish"}},DO:{1:{code:"DN",name:"Distrito Nacional"},2:{code:"AZ",name:"Azua"},3:{code:"BC",name:"Baoruco"},4:{code:"BH",name:"Barahona"},5:{code:"DJ",name:"Dajabon"},6:{code:"DU",name:"Duarte"},7:{code:"EL",name:"Elias Pina"},8:{code:"SY",name:"El Seybo"},9:{code:"ET",name:"Espaillat"},10:{code:"HM",name:"Hato Mayor"},11:{code:"IN",name:"Independencia"},12:{code:"AL",name:"La Altagracia"},13:{code:"RO",name:"La Romana"},14:{code:"VE",name:"La Vega"},15:{code:"MT",name:"Maria Trinidad Sanchez"},16:{code:"MN",name:"Monsenor Nouel"},17:{code:"MC",name:"Monte Cristi"},18:{code:"MP",name:"Monte Plata"},19:{code:"PD",name:"Pedernales"},20:{code:"PR",name:"Peravia (Bani)"},21:{code:"PP",name:"Puerto Plata"},22:{code:"SL",name:"Salcedo"},23:{code:"SM",name:"Samana"},24:{code:"SH",name:"Sanchez Ramirez"},25:{code:"SC",name:"San Cristobal"},26:{code:"JO",name:"San Jose de Ocoa"},27:{code:"SJ",name:"San Juan"},28:{code:"PM",name:"San Pedro de Macoris"},29:{code:"SA",name:"Santiago"},30:{code:"ST",name:"Santiago Rodriguez"},31:{code:"SD",name:"Santo Domingo"},32:{code:"VA",name:"Valverde"}},TP:{1:{code:"AL",name:"Aileu"},2:{code:"AN",name:"Ainaro"},3:{code:"BA",name:"Baucau"},4:{code:"BO",name:"Bobonaro"},5:{code:"CO",name:"Cova Lima"},6:{code:"DI",name:"Dili"},7:{code:"ER",name:"Ermera"},8:{code:"LA",name:"Lautem"},9:{code:"LI",name:"Liquica"},10:{code:"MT",name:"Manatuto"},11:{code:"MF",name:"Manufahi"},12:{code:"OE",name:"Oecussi"},13:{code:"VI",name:"Viqueque"}},EC:{1:{code:"AZU",name:"Azuay"},2:{code:"BOL",name:"Bolivar"},3:{code:"CAN",name:"Cañar"},4:{code:"CAR",name:"Carchi"},5:{code:"CHI",name:"Chimborazo"},6:{code:"COT",name:"Cotopaxi"},7:{code:"EOR",name:"El Oro"},8:{code:"ESM",name:"Esmeraldas"},9:{code:"GPS",name:"Galápagos"},10:{code:"GUA",name:"Guayas"},11:{code:"IMB",name:"Imbabura"},12:{code:"LOJ",name:"Loja"},13:{code:"LRO",name:"Los Ríos"},14:{code:"MAN",name:"Manabí"},15:{code:"MSA",name:"Morona Santiago"},16:{code:"NAP",name:"Napo"},17:{code:"ORE",name:"Orellana"},18:{code:"PAS",name:"Pastaza"},19:{code:"PIC",name:"Pichincha"},20:{code:"SUC",name:"Sucumbíos"},21:{code:"TUN",name:"Tungurahua"},22:{code:"ZCH",name:"Zamora Chinchipe"}},EG:{1:{code:"DHY",name:"Ad Daqahliyah"},2:{code:"BAM",name:"Al Bahr al Ahmar"},3:{code:"BHY",name:"Al Buhayrah"},4:{code:"FYM",name:"Al Fayyum"},5:{code:"GBY",name:"Al Gharbiyah"},6:{code:"IDR",name:"Al Iskandariyah"},7:{code:"IML",name:"Al Isma 'iliyah"},8:{code:"JZH",name:"Al Jizah"},9:{code:"MFY",name:"Al Minufiyah"},10:{code:"MNY",name:"Al Minya"},11:{code:"QHR",name:"Al Qahirah"},12:{code:"QLY",name:"Al Qalyubiyah"},13:{code:"WJD",name:"Al Wadi al Jadid"},14:{code:"SHQ",name:"Ash Sharqiyah"},15:{code:"SWY",name:"As Suways"},16:{code:"ASW",name:"Aswan"},17:{code:"ASY",name:"Asyut"},18:{code:"BSW",name:"Bani Suwayf"},19:{code:"BSD",name:"Bur Sa'id"},20:{code:"DMY",name:"Dumyat"},21:{code:"JNS",name:"Janub Sina'"},22:{code:"KSH",name:"Kafr ash Shaykh"},23:{code:"MAT",name:"Matruh"},24:{code:"QIN",name:"Qina"},25:{code:"SHS",name:"Shamal Sina'"},26:{code:"SUH",name:"Suhaj"}},SV:{1:{code:"AH",name:"Ahuachapan"},2:{code:"CA",name:"Cabanas"},3:{code:"CH",name:"Chalatenango"},4:{code:"CU",name:"Cuscatlan"},5:{code:"LB",name:"La Libertad"},6:{code:"PZ",name:"La Paz"},7:{code:"UN",name:"La Union"},8:{code:"MO",name:"Morazan"},9:{code:"SM",name:"San Miguel"},10:{code:"SS",name:"San Salvador"},11:{code:"SV",name:"San Vicente"},12:{code:"SA",name:"Santa Ana"},13:{code:"SO",name:"Sonsonate"},14:{code:"US",name:"Usulutan"}},GQ:{1:{code:"AN",name:"Provincia Annobon"},2:{code:"BN",name:"Provincia Bioko Norte"},3:{code:"BS",name:"Provincia Bioko Sur"},4:{code:"CS",name:"Provincia Centro Sur"},5:{code:"KN",name:"Provincia Kie-Ntem"},6:{code:"LI",name:"Provincia Litoral"},7:{code:"WN",name:"Provincia Wele-Nzas"}},ER:{1:{code:"MA",name:"Central (Maekel)"},2:{code:"KE",name:"Anseba (Keren)"},3:{code:"DK",name:"Southern Red Sea (Debub-Keih-Bah)"},4:{code:"SK",name:"Northern Red Sea (Semien-Keih-Ba)"},5:{code:"DE",name:"Southern (Debub)"},6:{code:"BR",name:"Gash-Barka (Barentu)"}},EE:{1:{code:"HA",name:"Harjumaa (Tallinn)"},2:{code:"HI",name:"Hiiumaa (Kardla)"},3:{code:"IV",name:"Ida-Virumaa (Johvi)"},4:{code:"JA",name:"Jarvamaa (Paide)"},5:{code:"JO",name:"Jogevamaa (Jogeva)"},6:{code:"LV",name:"Laane-Virumaa (Rakvere)"},7:{code:"LA",name:"Laanemaa (Haapsalu)"},8:{code:"PA",name:"Parnumaa (Parnu)"},9:{code:"PO",name:"Polvamaa (Polva)"},10:{code:"RA",name:"Raplamaa (Rapla)"},11:{code:"SA",name:"Saaremaa (Kuessaare)"},12:{code:"TA",name:"Tartumaa (Tartu)"},13:{code:"VA",name:"Valgamaa (Valga)"},14:{code:"VI",name:"Viljandimaa (Viljandi)"},15:{code:"VO",name:"Vorumaa (Voru)"}},ET:{1:{code:"AF",name:"Afar"},2:{code:"AH",name:"Amhara"},3:{code:"BG",name:"Benishangul-Gumaz"},4:{code:"GB",name:"Gambela"},5:{code:"HR",name:"Hariai"},6:{code:"OR",name:"Oromia"},7:{code:"SM",name:"Somali"},8:{code:"SN",name:"Southern Nations - Nationalities"},9:{code:"TG",name:"Tigray"},10:{code:"AA",name:"Addis Ababa"},11:{code:"DD",name:"Dire Dawa"}},FO:{1:{code:"TÛR",name:"Tûrshavnar Kommuna"},2:{code:"KLA",name:"Klaksvík"},3:{code:"RUN",name:"Runavík"},4:{code:"TVØ",name:"Tvøroyri"},5:{code:"FUG",name:"Fuglafjørður"},6:{code:"SUN",name:"Sunda Kommuna"},7:{code:"VáG",name:"Vágur"},8:{code:"NES",name:"Nes"},9:{code:"VES",name:"Vestmanna"},10:{code:"MIð",name:"Miðvágur"},11:{code:"SØR",name:"Sørvágur"},12:{code:"GØT",name:"Gøtu Kommuna"},13:{code:"SJû",name:"Sjûvar Kommuna"},14:{code:"LEI",name:"Leirvík"},15:{code:"SAN",name:"Sandavágur"},16:{code:"HVA",name:"Hvalba"},17:{code:"EIð",name:"Eiði"},18:{code:"KVí",name:"Kvívík"},19:{code:"SAN",name:"Sandur"},20:{code:"SKO",name:"Skopun"},21:{code:"HVA",name:"Hvannasund"},22:{code:"SUM",name:"Sumba"},23:{code:"VIð",name:"Viðareiði"},24:{code:"POR",name:"Porkeri"},25:{code:"SKá",name:"Skálavík"},26:{code:"KUN",name:"Kunoy"},27:{code:"HÚS",name:"HÚsavík"},28:{code:"HOV",name:"Hov"},29:{code:"FáM",name:"Fámjin"},30:{code:"FUN",name:"Funningur"},31:{code:"HÚS",name:"HÚsar"},32:{code:"SKÚ",name:"SkÚvoy"},33:{code:"SVí",name:"Svínoy"},34:{code:"FUG",name:"Fugloy"}},FJ:{1:{code:"C",name:"Central Division"},2:{code:"E",name:"Eastern Division"},3:{code:"N",name:"Northern Division"},4:{code:"R",name:"Rotuma"},5:{code:"W",name:"Western Division"}},FI:{1:{code:"AL",name:"Ahvenanmaan Laani"},2:{code:"ES",name:"Etela-Suomen Laani"},3:{code:"IS",name:"Ita-Suomen Laani"},4:{code:"LS",name:"Lansi-Suomen Laani"},5:{code:"LA",name:"Lapin Lanani"},6:{code:"OU",name:"Oulun Laani"}},FR:{1:{code:"AL",name:"Alsace"},2:{code:"AQ",name:"Aquitaine"},3:{code:"AU",name:"Auvergne"},4:{code:"BR",name:"Brittany"},5:{code:"BU",name:"Burgundy"},6:{code:"CE",name:"Center Loire Valley"},7:{code:"CH",name:"Champagne"},8:{code:"CO",name:"Corse"},9:{code:"FR",name:"France Comte"},10:{code:"LA",name:"Languedoc Roussillon"},11:{code:"LI",name:"Limousin"},12:{code:"LO",name:"Lorraine"},13:{code:"MI",name:"Midi Pyrenees"},14:{code:"NO",name:"Nord Pas de Calais"},15:{code:"NR",name:"Normandy"},16:{code:"PA",name:"Paris / Ile de France"},17:{code:"PI",name:"Picardie"},18:{code:"PO",name:"Poitou Charente"},19:{code:"PR",name:"Provence"},20:{code:"RH",name:"Rhone Alps"},21:{code:"RI",name:"Riviera"},22:{code:"WE",name:"Western Loire Valley"}},FX:{1:{code:"Et",name:"Etranger"},2:{code:"01",name:"Ain"},3:{code:"02",name:"Aisne"},4:{code:"03",name:"Allier"},5:{code:"04",name:"Alpes de Haute Provence"},6:{code:"05",name:"Hautes-Alpes"},7:{code:"06",name:"Alpes Maritimes"},8:{code:"07",name:"Ardèche"},9:{code:"08",name:"Ardennes"},10:{code:"09",name:"Ariège"},11:{code:"10",name:"Aube"},12:{code:"11",name:"Aude"},13:{code:"12",name:"Aveyron"},14:{code:"13",name:"Bouches du Rhône"},15:{code:"14",name:"Calvados"},16:{code:"15",name:"Cantal"},17:{code:"16",name:"Charente"},18:{code:"17",name:"Charente Maritime"},19:{code:"18",name:"Cher"},20:{code:"19",name:"Corrèze"},21:{code:"2A",name:"Corse du Sud"},22:{code:"2B",name:"Haute Corse"},23:{code:"21",name:"Côte d'or"},24:{code:"22",name:"Côtes d'Armor"},25:{code:"23",name:"Creuse"},26:{code:"24",name:"Dordogne"},27:{code:"25",name:"Doubs"},28:{code:"26",name:"Drôme"},29:{code:"27",name:"Eure"},30:{code:"28",name:"Eure et Loir"},31:{code:"29",name:"Finistère"},32:{code:"30",name:"Gard"},33:{code:"31",name:"Haute Garonne"},34:{code:"32",name:"Gers"},35:{code:"33",name:"Gironde"},36:{code:"34",name:"Hérault"},37:{code:"35",name:"Ille et Vilaine"},38:{code:"36",name:"Indre"},39:{code:"37",name:"Indre et Loire"},40:{code:"38",name:"Isére"},41:{code:"39",name:"Jura"},42:{code:"40",name:"Landes"},43:{code:"41",name:"Loir et Cher"},44:{code:"42",name:"Loire"},45:{code:"43",name:"Haute Loire"},46:{code:"44",name:"Loire Atlantique"},47:{code:"45",name:"Loiret"},48:{code:"46",name:"Lot"},49:{code:"47",name:"Lot et Garonne"},50:{code:"48",name:"Lozère"},51:{code:"49",name:"Maine et Loire"},52:{code:"50",name:"Manche"},53:{code:"51",name:"Marne"},54:{code:"52",name:"Haute Marne"},55:{code:"53",name:"Mayenne"},56:{code:"54",name:"Meurthe et Moselle"},57:{code:"55",name:"Meuse"},58:{code:"56",name:"Morbihan"},59:{code:"57",name:"Moselle"},60:{code:"58",name:"Nièvre"},61:{code:"59",name:"Nord"},62:{code:"60",name:"Oise"},63:{code:"61",name:"Orne"},64:{code:"62",name:"Pas de Calais"},65:{code:"63",name:"Puy de Dôme"},66:{code:"64",name:"Pyrenees Atlantique"},67:{code:"65",name:"Hautes Pyrenees"},68:{code:"66",name:"Pyrenees Orientale"},69:{code:"67",name:"Bas Rhin"},70:{code:"68",name:"Haut Rhin"},71:{code:"69",name:"Rhône"},72:{code:"70",name:"Haute Saône"},73:{code:"71",name:"Saône et Loire"},74:{code:"72",name:"Sarthe"},75:{code:"73",name:"Savoie"},76:{code:"74",name:"Haute Savoie"},77:{code:"75",name:"Paris"},78:{code:"76",name:"Seine Martitime"},79:{code:"77",name:"Seine et Marne"},80:{code:"78",name:"Yvelines"},81:{code:"79",name:"Deux Sèvres"},82:{code:"80",name:"Somme"},83:{code:"81",name:"Tarn"},84:{code:"82",name:"Tarn et Garonne"},85:{code:"83",name:"Var"},86:{code:"84",name:"Vaucluse"},87:{code:"85",name:"Vendée"},88:{code:"86",name:"Vienne"},89:{code:"87",name:"Haute Vienne"},90:{code:"88",name:"Vosges"},91:{code:"89",name:"Yonne"},92:{code:"90",name:"Territoire de Belfort"},93:{code:"91",name:"Essonne"},94:{code:"92",name:"Hauts de Seine"},95:{code:"93",name:"Seine St-Denis"},96:{code:"94",name:"Val de Marne"},97:{code:"95",name:"Val d'oise"}},GF:{1:{code:"AWA",name:"Awala-Yalimapo"},2:{code:"MAN",name:"Mana"},3:{code:"SAI",name:"Saint-Laurent-Du-Maroni"},4:{code:"APA",name:"Apatou"},5:{code:"GRA",name:"Grand-Santi"},6:{code:"PAP",name:"Papaïchton"},7:{code:"SAÜ",name:"SaÜl"},8:{code:"MAR",name:"Maripasoula"},9:{code:"CAM",name:"Camopi"},10:{code:"SAI",name:"Saint-Georges"},11:{code:"OUA",name:"Ouanary"},12:{code:"RéG",name:"Régina"},13:{code:"ROU",name:"Roura"},14:{code:"SAI",name:"Saint-élie"},15:{code:"IRA",name:"Iracoubo"},16:{code:"SIN",name:"Sinnamary"},17:{code:"KOU",name:"Kourou"},18:{code:"MAC",name:"Macouria"},19:{code:"MON",name:"Montsinéry-Tonnegrande"},20:{code:"MAT",name:"Matoury"},21:{code:"CAY",name:"Cayenne"},22:{code:"REM",name:"Remire-Montjoly"}},PF:{1:{code:"M",name:"Archipel des Marquises"},2:{code:"T",name:"Archipel des Tuamotu"},3:{code:"I",name:"Archipel des Tubuai"},4:{code:"V",name:"Iles du Vent"},5:{code:"S",name:"Iles Sous-le-Vent"}},TF:{1:{code:"C",name:"Iles Crozet"},2:{code:"K",name:"Iles Kerguelen"},3:{code:"A",name:"Ile Amsterdam"},4:{code:"P",name:"Ile Saint-Paul"},5:{code:"D",name:"Adelie Land"}},GA:{1:{code:"ES",name:"Estuaire"},2:{code:"HO",name:"Haut-Ogooue"},3:{code:"MO",name:"Moyen-Ogooue"},4:{code:"NG",name:"Ngounie"},5:{code:"NY",name:"Nyanga"},6:{code:"OI",name:"Ogooue-Ivindo"},7:{code:"OL",name:"Ogooue-Lolo"},8:{code:"OM",name:"Ogooue-Maritime"},9:{code:"WN",name:"Woleu-Ntem"}},GM:{1:{code:"BJ",name:"Banjul"},2:{code:"BS",name:"Basse"},3:{code:"BR",name:"Brikama"},4:{code:"JA",name:"Janjangbure"},5:{code:"KA",name:"Kanifeng"},6:{code:"KE",name:"Kerewan"},7:{code:"KU",name:"Kuntaur"},8:{code:"MA",name:"Mansakonko"},9:{code:"LR",name:"Lower River"},10:{code:"CR",name:"Central River"},11:{code:"NB",name:"North Bank"},12:{code:"UR",name:"Upper River"},13:{code:"WE",name:"Western"}},GE:{1:{code:"AB",name:"Abkhazia"},2:{code:"AJ",name:"Ajaria"},3:{code:"GU",name:"Guria"},4:{code:"IM",name:"Imereti"},5:{code:"KA",name:"Kakheti"},6:{code:"KK",name:"Kvemo Kartli"},7:{code:"MM",name:"Mtskheta-Mtianeti"},8:{code:"RL",name:"Racha Lechkhumi and Kvemo Svanet"},9:{code:"SJ",name:"Samtskhe-Javakheti"},10:{code:"SK",name:"Shida Kartli"},11:{code:"SZ",name:"Samegrelo-Zemo Svaneti"},12:{code:"TB",name:"Tbilisi"}},DE:{1:{code:"BAW",name:"Baden-Württemberg"},2:{code:"BAY",name:"Bayern"},3:{code:"BER",name:"Berlin"},4:{code:"BRG",name:"Brandenburg"},5:{code:"BRE",name:"Bremen"},6:{code:"HAM",name:"Hamburg"},7:{code:"HES",name:"Hessen"},8:{code:"MEC",name:"Mecklenburg-Vorpommern"},9:{code:"NDS",name:"Niedersachsen"},10:{code:"NRW",name:"Nordrhein-Westfalen"},11:{code:"RHE",name:"Rheinland-Pfalz"},12:{code:"SAR",name:"Saarland"},13:{code:"SAS",name:"Sachsen"},14:{code:"SAC",name:"Sachsen-Anhalt"},15:{code:"SCN",name:"Schleswig-Holstein"},16:{code:"THE",name:"Thüringen"}},GH:{1:{code:"AS",name:"Ashanti Region"},2:{code:"BA",name:"Brong-Ahafo Region"},3:{code:"CE",name:"Central Region"},4:{code:"EA",name:"Eastern Region"},5:{code:"GA",name:"Greater Accra Region"},6:{code:"NO",name:"Northern Region"},7:{code:"UE",name:"Upper East Region"},8:{code:"UW",name:"Upper West Region"},9:{code:"VO",name:"Volta Region"},10:{code:"WE",name:"Western Region"}},GI:{1:{code:"EAS",name:"East Side"},2:{code:"NOR",name:"North District"},3:{code:"REC",name:"Reclamation Areas"},4:{code:"SAN",name:"Sandpits Area"},5:{code:"SOU",name:"South District"},6:{code:"TOW",name:"Town Area"},7:{code:"UPP",name:"Upper Town"},8:{code:"OTH",name:"Other"}},GR:{1:{code:"AT",name:"Attica"},2:{code:"CN",name:"Central Greece"},3:{code:"CM",name:"Central Macedonia"},4:{code:"CR",name:"Crete"},5:{code:"EM",name:"East Macedonia and Thrace"},6:{code:"EP",name:"Epirus"},7:{code:"II",name:"Ionian Islands"},8:{code:"NA",name:"North Aegean"},9:{code:"PP",name:"Peloponnesos"},10:{code:"SA",name:"South Aegean"},11:{code:"TH",name:"Thessaly"},12:{code:"WG",name:"West Greece"},13:{code:"WM",name:"West Macedonia"}},GL:{1:{code:"A",name:"Avannaa"},2:{code:"T",name:"Tunu"},3:{code:"K",name:"Kitaa"}},86:{1:{code:"A",name:"Saint Andrew"},2:{code:"D",name:"Saint David"},3:{code:"G",name:"Saint George"},4:{code:"J",name:"Saint John"},5:{code:"M",name:"Saint Mark"},6:{code:"P",name:"Saint Patrick"},7:{code:"C",name:"Carriacou"},8:{code:"Q",name:"Petit Martinique"}},GP:{1:{code:"ARR",name:"Arrondissements Of The Guadeloup"},2:{code:"CAN",name:"Cantons Of The Guadeloup Depart"},3:{code:"COM",name:"Communes Of The Guadeloup Depart"}},GU:{1:{code:"AGA",name:"Agana Heights"},2:{code:"AGA",name:"Agat"},3:{code:"ASA",name:"Asan Maina"},4:{code:"BAR",name:"Barrigada"},5:{code:"CHA",name:"Chalan Pago Ordot"},6:{code:"DED",name:"Dededo"},7:{code:"HAG",name:"HagÅtña"},8:{code:"INA",name:"Inarajan"},9:{code:"MAN",name:"Mangilao"},10:{code:"MER",name:"Merizo"},11:{code:"MON",name:"Mongmong Toto Maite"},12:{code:"PIT",name:"Piti"},13:{code:"SAN",name:"Santa Rita"},14:{code:"SIN",name:"Sinajana"},15:{code:"TAL",name:"Talofofo"},16:{code:"TAM",name:"Tamuning"},17:{code:"UMA",name:"Umatac"},18:{code:"YIG",name:"Yigo"},19:{code:"YON",name:"Yona"}},GT:{1:{code:"AV",name:"Alta Verapaz"},2:{code:"BV",name:"Baja Verapaz"},3:{code:"CM",name:"Chimaltenango"},4:{code:"CQ",name:"Chiquimula"},5:{code:"PE",name:"El Peten"},6:{code:"PR",name:"El Progreso"},7:{code:"QC",name:"El Quiche"},8:{code:"ES",name:"Escuintla"},9:{code:"GU",name:"Guatemala"},10:{code:"HU",name:"Huehuetenango"},11:{code:"IZ",name:"Izabal"},12:{code:"JA",name:"Jalapa"},13:{code:"JU",name:"Jutiapa"},14:{code:"QZ",name:"Quetzaltenango"},15:{code:"RE",name:"Retalhuleu"},16:{code:"ST",name:"Sacatepequez"},17:{code:"SM",name:"San Marcos"},18:{code:"SR",name:"Santa Rosa"},19:{code:"SO",name:"Solola"},20:{code:"SU",name:"Suchitepequez"},21:{code:"TO",name:"Totonicapan"},22:{code:"ZA",name:"Zacapa"}},GN:{1:{code:"CNK",name:"Conakry"},2:{code:"BYL",name:"Beyla"},3:{code:"BFA",name:"Boffa"},4:{code:"BOK",name:"Boke"},5:{code:"COY",name:"Coyah"},6:{code:"DBL",name:"Dabola"},7:{code:"DLB",name:"Dalaba"},8:{code:"DGR",name:"Dinguiraye"},9:{code:"DBR",name:"Dubreka"},10:{code:"FRN",name:"Faranah"},11:{code:"FRC",name:"Forecariah"},12:{code:"FRI",name:"Fria"},13:{code:"GAO",name:"Gaoual"},14:{code:"GCD",name:"Gueckedou"},15:{code:"KNK",name:"Kankan"},16:{code:"KRN",name:"Kerouane"},17:{code:"KND",name:"Kindia"},18:{code:"KSD",name:"Kissidougou"},19:{code:"KBA",name:"Koubia"},20:{code:"KDA",name:"Koundara"},21:{code:"KRA",name:"Kouroussa"},22:{code:"LAB",name:"Labe"},23:{code:"LLM",name:"Lelouma"},24:{code:"LOL",name:"Lola"},25:{code:"MCT",name:"Macenta"},26:{code:"MAL",name:"Mali"},27:{code:"MAM",name:"Mamou"},28:{code:"MAN",name:"Mandiana"},29:{code:"NZR",name:"Nzerekore"},30:{code:"PIT",name:"Pita"},31:{code:"SIG",name:"Siguiri"},32:{code:"TLM",name:"Telimele"},33:{code:"TOG",name:"Tougue"},34:{code:"YOM",name:"Yomou"}},GW:{1:{code:"BF",name:"Bafata Region"},2:{code:"BB",name:"Biombo Region"},3:{code:"BS",name:"Bissau Region"},4:{code:"BL",name:"Bolama Region"},5:{code:"CA",name:"Cacheu Region"},6:{code:"GA",name:"Gabu Region"},7:{code:"OI",name:"Oio Region"},8:{code:"QU",name:"Quinara Region"},9:{code:"TO",name:"Tombali Region"}},GY:{1:{code:"BW",name:"Barima-Waini"},2:{code:"CM",name:"Cuyuni-Mazaruni"},3:{code:"DM",name:"Demerara-Mahaica"},4:{code:"EC",name:"East Berbice-Corentyne"},5:{code:"EW",name:"Essequibo Islands-West Demerara"},6:{code:"MB",name:"Mahaica-Berbice"},7:{code:"PM",name:"Pomeroon-Supenaam"},8:{code:"PI",name:"Potaro-Siparuni"},9:{code:"UD",name:"Upper Demerara-Berbice"},10:{code:"UT",name:"Upper Takutu-Upper Essequibo"}},HT:{1:{code:"AR",name:"Artibonite"},2:{code:"CE",name:"Centre"},3:{code:"GA",name:"Grand'Anse"},4:{code:"ND",name:"Nord"},5:{code:"NE",name:"Nord-Est"},6:{code:"NO",name:"Nord-Ouest"},7:{code:"OU",name:"Ouest"},8:{code:"SD",name:"Sud"},9:{code:"SE",name:"Sud-Est"}},HM:{1:{code:"F",name:"Flat Island"},2:{code:"M",name:"McDonald Island"},3:{code:"S",name:"Shag Island"},4:{code:"H",name:"Heard Island"}},HN:{1:{code:"AT",name:"Atlantida"},2:{code:"CH",name:"Choluteca"},3:{code:"CL",name:"Colon"},4:{code:"CM",name:"Comayagua"},5:{code:"CP",name:"Copan"},6:{code:"CR",name:"Cortes"},7:{code:"PA",name:"El Paraiso"},8:{code:"FM",name:"Francisco Morazan"},9:{code:"GD",name:"Gracias a Dios"},10:{code:"IN",name:"Intibuca"},11:{code:"IB",name:"Islas de la Bahia (Bay Islands)"},12:{code:"PZ",name:"La Paz"},13:{code:"LE",name:"Lempira"},14:{code:"OC",name:"Ocotepeque"},15:{code:"OL",name:"Olancho"},16:{code:"SB",name:"Santa Barbara"},17:{code:"VA",name:"Valle"},18:{code:"YO",name:"Yoro"}},HK:{1:{code:"HCW",name:"Central and Western Hong Kong Is"},2:{code:"HEA",name:"Eastern Hong Kong Island"},3:{code:"HSO",name:"Southern Hong Kong Island"},4:{code:"HWC",name:"Wan Chai Hong Kong Island"},5:{code:"KKC",name:"Kowloon City Kowloon"},6:{code:"KKT",name:"Kwun Tong Kowloon"},7:{code:"KSS",name:"Sham Shui Po Kowloon"},8:{code:"KWT",name:"Wong Tai Sin Kowloon"},9:{code:"KYT",name:"Yau Tsim Mong Kowloon"},10:{code:"NIS",name:"Islands New Territories"},11:{code:"NKT",name:"Kwai Tsing New Territories"},12:{code:"NNO",name:"North New Territories"},13:{code:"NSK",name:"Sai Kung New Territories"},14:{code:"NST",name:"Sha Tin New Territories"},15:{code:"NTP",name:"Tai Po New Territories"},16:{code:"NTW",name:"Tsuen Wan New Territories"},17:{code:"NTM",name:"Tuen Mun New Territories"},18:{code:"NYL",name:"Yuen Long New Territories"}},HU:{1:{code:"BK",name:"Bacs-Kiskun"},2:{code:"BA",name:"Baranya"},3:{code:"BE",name:"Bekes"},4:{code:"BS",name:"Bekescsaba"},5:{code:"BZ",name:"Borsod-Abauj-Zemplen"},6:{code:"BU",name:"Budapest"},7:{code:"CS",name:"Csongrad"},8:{code:"DE",name:"Debrecen"},9:{code:"DU",name:"Dunaujvaros"},10:{code:"EG",name:"Eger"},11:{code:"FE",name:"Fejer"},12:{code:"GY",name:"Gyor"},13:{code:"GM",name:"Gyor-Moson-Sopron"},14:{code:"HB",name:"Hajdu-Bihar"},15:{code:"HE",name:"Heves"},16:{code:"HO",name:"Hodmezovasarhely"},17:{code:"JN",name:"Jasz-Nagykun-Szolnok"},18:{code:"KA",name:"Kaposvar"},19:{code:"KE",name:"Kecskemet"},20:{code:"KO",name:"Komarom-Esztergom"},21:{code:"MI",name:"Miskolc"},22:{code:"NA",name:"Nagykanizsa"},23:{code:"NO",name:"Nograd"},24:{code:"NY",name:"Nyiregyhaza"},25:{code:"PE",name:"Pecs"},26:{code:"PS",name:"Pest"},27:{code:"SO",name:"Somogy"},28:{code:"SP",name:"Sopron"},29:{code:"SS",name:"Szabolcs-Szatmar-Bereg"},30:{code:"SZ",name:"Szeged"},31:{code:"SE",name:"Szekesfehervar"},32:{code:"SL",name:"Szolnok"},33:{code:"SM",name:"Szombathely"},34:{code:"TA",name:"Tatabanya"},35:{code:"TO",name:"Tolna"},36:{code:"VA",name:"Vas"},37:{code:"VE",name:"Veszprem"},38:{code:"ZA",name:"Zala"},39:{code:"ZZ",name:"Zalaegerszeg"}},IS:{1:{code:"AL",name:"Austurland"},2:{code:"HF",name:"Hofuoborgarsvaeoi"},3:{code:"NE",name:"Norourland eystra"},4:{code:"NV",name:"Norourland vestra"},5:{code:"SL",name:"Suourland"},6:{code:"SN",name:"Suournes"},7:{code:"VF",name:"Vestfiroir"},8:{code:"VL",name:"Vesturland"}},IN:{1:{code:"AN",name:"Andaman and Nicobar Islands"},2:{code:"AP",name:"Andhra Pradesh"},3:{code:"AR",name:"Arunachal Pradesh"},4:{code:"AS",name:"Assam"},5:{code:"BI",name:"Bihar"},6:{code:"CH",name:"Chandigarh"},7:{code:"DA",name:"Dadra and Nagar Haveli"},8:{code:"DM",name:"Daman and Diu"},9:{code:"DE",name:"Delhi"},10:{code:"GO",name:"Goa"},11:{code:"GU",name:"Gujarat"},12:{code:"HA",name:"Haryana"},13:{code:"HP",name:"Himachal Pradesh"},14:{code:"JA",name:"Jammu and Kashmir"},15:{code:"KA",name:"Karnataka"},16:{code:"KE",name:"Kerala"},17:{code:"LI",name:"Lakshadweep Islands"},18:{code:"MP",name:"Madhya Pradesh"},19:{code:"MA",name:"Maharashtra"},20:{code:"MN",name:"Manipur"},21:{code:"ME",name:"Meghalaya"},22:{code:"MI",name:"Mizoram"},23:{code:"NA",name:"Nagaland"},24:{code:"OR",name:"Orissa"},25:{code:"PO",name:"Pondicherry"},26:{code:"PU",name:"Punjab"},27:{code:"RA",name:"Rajasthan"},28:{code:"SI",name:"Sikkim"},29:{code:"TN",name:"Tamil Nadu"},30:{code:"TR",name:"Tripura"},31:{code:"UP",name:"Uttar Pradesh"},32:{code:"WB",name:"West Bengal"}},ID:{1:{code:"DA",name:"Daista Aceh"},2:{code:"SU",name:"Sumatera Utara"},3:{code:"SB",name:"Sumatera Barat"},4:{code:"SI",name:"Riau"},5:{code:"JA",name:"Jambi"},6:{code:"SS",name:"Sumatera Selatan"},7:{code:"BE",name:"Bengkulu"},8:{code:"LA",name:"Lampung"},9:{code:"JK",name:"Dki Jakarta"},10:{code:"JB",name:"Jawa Barat"},11:{code:"JT",name:"Jawa Tengah"},12:{code:"DY",name:"Daista Yogyakarta"},13:{code:"JT",name:"Jawa Timur"},14:{code:"KB",name:"Kalimantan Barat"},15:{code:"KT",name:"Kalimantan Tengah"},16:{code:"KI",name:"Kalimantan Timur"},17:{code:"KS",name:"Kalimantan Selatan"},18:{code:"BA",name:"Bali"},19:{code:"NB",name:"Nusa Tenggara Barat"},20:{code:"NT",name:"Nusa Tenggara Timur"},21:{code:"SN",name:"Sulawesi Selatan"},22:{code:"ST",name:"Sulawesi Tengah"},23:{code:"SA",name:"Sulawesi Utara"},24:{code:"SG",name:"Sulawesi Tenggara"},25:{code:"MA",name:"Maluku"},26:{code:"MU",name:"Maluku Utara"},27:{code:"IJ",name:"Irian Jaya Timur"},28:{code:"IT",name:"Irian Jaya Tengah"},29:{code:"IB",name:"Irian Jawa Barat"},30:{code:"BT",name:"Banten"},31:{code:"BB",name:"Bangka Belitung"},32:{code:"GO",name:"Gorontalo"}},IR:{1:{code:"ARD",name:"Ardabil"},2:{code:"BSH",name:"Bushehr"},3:{code:"CMB",name:"Chahar Mahaal and Bakhtiari"},4:{code:"EAZ",name:"East Azarbaijan"},5:{code:"EFH",name:"Esfahan"},6:{code:"FAR",name:"Fars"},7:{code:"GIL",name:"Gilan"},8:{code:"GLS",name:"Golestan"},9:{code:"HMD",name:"Hamadan"},10:{code:"HRM",name:"Hormozgan"},11:{code:"ILM",name:"Ilam"},12:{code:"KBA",name:"Kohkiluyeh and Buyer Ahmad"},13:{code:"KRB",name:"Kerman"},14:{code:"KRD",name:"Kurdistan"},15:{code:"KRM",name:"Kermanshah"},16:{code:"KZT",name:"Khuzestan"},17:{code:"LRS",name:"Lorestan"},18:{code:"MKZ",name:"Markazi"},19:{code:"MZD",name:"Mazandaran"},20:{code:"NKH",name:"North Khorasan"},21:{code:"QAZ",name:"Qazvin"},22:{code:"QOM",name:"Qom"},23:{code:"RKH",name:"Razavi Khorasan"},24:{code:"SBL",name:"Sistan and Baluchistan"},25:{code:"SKH",name:"South Khorasan"},26:{code:"SMN",name:"Semnan"},27:{code:"TEH",name:"Tehran"},28:{code:"WEZ",name:"West Azarbaijan"},29:{code:"YZD",name:"Yazd"},30:{code:"ZAN",name:"Zanjan"}},IQ:{1:{code:"AB",name:"Al Anbar"},2:{code:"AL",name:"Arbil"},3:{code:"BA",name:"Al Basrah"},4:{code:"BB",name:"Babil"},5:{code:"BD",name:"Baghdad"},6:{code:"DH",name:"Dahuk"},7:{code:"DQ",name:"Dhi Qar"},8:{code:"DY",name:"Diyala"},9:{code:"KB",name:"Al Karbala"},10:{code:"MU",name:"Al Muthanna"},11:{code:"MY",name:"Maysan"},12:{code:"NJ",name:"An Najaf"},13:{code:"NN",name:"Ninawa"},14:{code:"QA",name:"Al Qadisyah"},15:{code:"SD",name:"Salah ad Din"},16:{code:"SL",name:"As Sulaymaniyah"},17:{code:"TM",name:"At Ta'mim"},18:{code:"WS",name:"Wasit"}},IE:{1:{code:"CA",name:"Carlow"},2:{code:"CV",name:"Cavan"},3:{code:"CL",name:"Clare"},4:{code:"CO",name:"Cork"},5:{code:"DO",name:"Donegal"},6:{code:"DU",name:"Dublin"},7:{code:"GA",name:"Galway"},8:{code:"KE",name:"Kerry"},9:{code:"KI",name:"Kildare"},10:{code:"KL",name:"Kilkenny"},11:{code:"LA",name:"Laois"},12:{code:"LE",name:"Leitrim"},13:{code:"LI",name:"Limerick"},14:{code:"LO",name:"Longford"},15:{code:"LU",name:"Louth"},16:{code:"MA",name:"Mayo"},17:{code:"ME",name:"Meath"},18:{code:"MO",name:"Monaghan"},19:{code:"OF",name:"Offaly"},20:{code:"RO",name:"Roscommon"},21:{code:"SL",name:"Sligo"},22:{code:"TI",name:"Tipperary"},23:{code:"WA",name:"Waterford"},24:{code:"WE",name:"Westmeath"},25:{code:"WX",name:"Wexford"},26:{code:"WI",name:"Wicklow"}},IL:{1:{code:"BS",name:"Be'er Sheva"},2:{code:"BH",name:"Bika'at Hayarden"},3:{code:"EA",name:"Eilat and Arava"},4:{code:"GA",name:"Galil"},5:{code:"HA",name:"Haifa"},6:{code:"JM",name:"Jehuda Mountains"},7:{code:"JE",name:"Jerusalem"},8:{code:"NE",name:"Negev"},10:{code:"SE",name:"Semaria"},11:{code:"SH",name:"Sharon"},12:{code:"TA",name:"Tel Aviv (Gosh Dan)"}},IT:{1:{code:"AG",name:"Agrigento"},2:{code:"AL",name:"Alessandria"},3:{code:"AN",name:"Ancona"},4:{code:"AO",name:"Aosta"},5:{code:"AR",name:"Arezzo"},6:{code:"AP",name:"Ascoli Piceno"},7:{code:"AT",name:"Asti"},8:{code:"AV",name:"Avellino"},9:{code:"BA",name:"Bari"},10:{code:"BL",name:"Belluno"},11:{code:"BN",name:"Benevento"},12:{code:"BG",name:"Bergamo"},13:{code:"BI",name:"Biella"},14:{code:"BO",name:"Bologna"},15:{code:"BZ",name:"Bolzano"},16:{code:"BS",name:"Brescia"},17:{code:"BR",name:"Brindisi"},18:{code:"CA",name:"Cagliari"},19:{code:"CL",name:"Caltanissetta"},20:{code:"CB",name:"Campobasso"},21:{code:"CE",name:"Caserta"},22:{code:"CT",name:"Catania"},23:{code:"CZ",name:"Catanzaro"},24:{code:"CH",name:"Chieti"},25:{code:"CO",name:"Como"},26:{code:"CS",name:"Cosenza"},27:{code:"CR",name:"Cremona"},28:{code:"KR",name:"Crotone"},29:{code:"CN",name:"Cuneo"},30:{code:"EN",name:"Enna"},31:{code:"FE",name:"Ferrara"},32:{code:"FI",name:"Firenze"},33:{code:"FG",name:"Foggia"},34:{code:"FO",name:"Forlì"},35:{code:"FR",name:"Frosinone"},36:{code:"GE",name:"Genova"},37:{code:"GO",name:"Gorizia"},38:{code:"GR",name:"Grosseto"},39:{code:"IM",name:"Imperia"},40:{code:"IS",name:"Isernia"},41:{code:"AQ",name:"Aquila"},42:{code:"SP",name:"La Spezia"},43:{code:"LT",name:"Latina"},44:{code:"LE",name:"Lecce"},45:{code:"LC",name:"Lecco"},46:{code:"LI",name:"Livorno"},47:{code:"LO",name:"Lodi"},48:{code:"LU",name:"Lucca"},49:{code:"MC",name:"Macerata"},50:{code:"MN",name:"Mantova"},51:{code:"MS",name:"Massa-Carrara"},52:{code:"MT",name:"Matera"},53:{code:"ME",name:"Messina"},54:{code:"MI",name:"Milano"},55:{code:"MO",name:"Modena"},56:{code:"NA",name:"Napoli"},57:{code:"NO",name:"Novara"},58:{code:"NU",name:"Nuoro"},59:{code:"OR",name:"Oristano"},60:{code:"PD",name:"Padova"},61:{code:"PA",name:"Palermo"},62:{code:"PR",name:"Parma"},63:{code:"PG",name:"Perugia"},64:{code:"PV",name:"Pavia"},65:{code:"PU",name:"Pesaro Urbino"},66:{code:"PE",name:"Pescara"},67:{code:"PC",name:"Piacenza"},68:{code:"PI",name:"Pisa"},69:{code:"PT",name:"Pistoia"},70:{code:"PN",name:"Pordenone"},71:{code:"PZ",name:"Potenza"},72:{code:"PO",name:"Prato"},73:{code:"RG",name:"Ragusa"},74:{code:"RA",name:"Ravenna"},75:{code:"RC",name:"Reggio Calabria"},76:{code:"RE",name:"Reggio Emilia"},77:{code:"RI",name:"Rieti"},78:{code:"RN",name:"Rimini"},79:{code:"RM",name:"Roma"},80:{code:"RO",name:"Rovigo"},81:{code:"SA",name:"Salerno"},82:{code:"SS",name:"Sassari"},83:{code:"SV",name:"Savona"},84:{code:"SI",name:"Siena"},85:{code:"SR",name:"Siracusa"},86:{code:"SO",name:"Sondrio"},87:{code:"TA",name:"Taranto"},88:{code:"TE",name:"Teramo"},89:{code:"TR",name:"Terni"},90:{code:"TO",name:"Torino"},91:{code:"TP",name:"Trapani"},92:{code:"TN",name:"Trento"},93:{code:"TV",name:"Treviso"},94:{code:"TS",name:"Trieste"},95:{code:"UD",name:"Udine"},96:{code:"VA",name:"Varese"},97:{code:"VE",name:"Venezia"},98:{code:"VB",name:"Verbania"},99:{code:"VC",name:"Vercelli"},100:{code:"VR",name:"Verona"},101:{code:"VV",name:"Vibo Valentia"},102:{code:"VI",name:"Vicenza"},103:{code:"VT",name:"Viterbo"},104:{code:"CI",name:"Carbonia-Iglesias"},105:{code:"VS",name:"Medio Campidano"},106:{code:"OG",name:"Ogliastra"},107:{code:"OT",name:"Olbia-Tempio"},108:{code:"MB",name:"Monza e Brianza"},109:{code:"FM",name:"Fermo"},110:{code:"BT",name:"Barletta-Andria-Trani"}},JM:{1:{code:"CLA",name:"Clarendon Parish"},2:{code:"HAN",name:"Hanover Parish"},3:{code:"KIN",name:"Kingston Parish"},4:{code:"MAN",name:"Manchester Parish"},5:{code:"POR",name:"Portland Parish"},6:{code:"AND",name:"Saint Andrew Parish"},7:{code:"ANN",name:"Saint Ann Parish"},8:{code:"CAT",name:"Saint Catherine Parish"},9:{code:"ELI",name:"Saint Elizabeth Parish"},10:{code:"JAM",name:"Saint James Parish"},11:{code:"MAR",name:"Saint Mary Parish"},12:{code:"THO",name:"Saint Thomas Parish"},13:{code:"TRL",name:"Trelawny Parish"},14:{code:"WML",name:"Westmoreland Parish"}},JP:{1:{code:"AI",name:"Aichi"},2:{code:"AK",name:"Akita"},3:{code:"AO",name:"Aomori"},4:{code:"CH",name:"Chiba"},5:{code:"EH",name:"Ehime"},6:{code:"FK",name:"Fukui"},7:{code:"FU",name:"Fukuoka"},8:{code:"FS",name:"Fukushima"},9:{code:"GI",name:"Gifu"},10:{code:"GU",name:"Gumma"},11:{code:"HI",name:"Hiroshima"},12:{code:"HO",name:"Hokkaido"},13:{code:"HY",name:"Hyogo"},14:{code:"IB",name:"Ibaraki"},15:{code:"IS",name:"Ishikawa"},16:{code:"IW",name:"Iwate"},17:{code:"KA",name:"Kagawa"},18:{code:"KG",name:"Kagoshima"},19:{code:"KN",name:"Kanagawa"},20:{code:"KO",name:"Kochi"},21:{code:"KU",name:"Kumamoto"},22:{code:"KY",name:"Kyoto"},23:{code:"MI",name:"Mie"},24:{code:"MY",name:"Miyagi"},25:{code:"MZ",name:"Miyazaki"},26:{code:"NA",name:"Nagano"},27:{code:"NG",name:"Nagasaki"},28:{code:"NR",name:"Nara"},29:{code:"NI",name:"Niigata"},30:{code:"OI",name:"Oita"},31:{code:"OK",name:"Okayama"},32:{code:"ON",name:"Okinawa"},33:{code:"OS",name:"Osaka"},34:{code:"SA",name:"Saga"},35:{code:"SI",name:"Saitama"},36:{code:"SH",name:"Shiga"},37:{code:"SM",name:"Shimane"},38:{code:"SZ",name:"Shizuoka"},39:{code:"TO",name:"Tochigi"},40:{code:"TS",name:"Tokushima"},41:{code:"TK",name:"Tokyo"},42:{code:"TT",name:"Tottori"},43:{code:"TY",name:"Toyama"},44:{code:"WA",name:"Wakayama"},45:{code:"YA",name:"Yamagata"},46:{code:"YM",name:"Yamaguchi"},47:{code:"YN",name:"Yamanashi"}},JO:{1:{code:"AM",name:"'Amman"},2:{code:"AJ",name:"Ajlun"},3:{code:"AA",name:"Al'Aqabah"},4:{code:"AB",name:"Al Balqa'"},5:{code:"AK",name:"Al Karak"},6:{code:"AL",name:"Al Mafraq"},7:{code:"AT",name:"At Tafilah"},8:{code:"AZ",name:"Az Zarqa'"},9:{code:"IR",name:"Irbid"},10:{code:"JA",name:"Jarash"},11:{code:"MA",name:"Ma'an"},12:{code:"MD",name:"Madaba"}},KZ:{1:{code:"AL",name:"Almaty"},2:{code:"AC",name:"Almaty City"},3:{code:"AM",name:"Aqmola"},4:{code:"AQ",name:"Aqtobe"},5:{code:"AS",name:"Astana City"},6:{code:"AT",name:"Atyrau"},7:{code:"BA",name:"Batys Qazaqstan"},8:{code:"BY",name:"Bayqongyr City"},9:{code:"MA",name:"Mangghystau"},10:{code:"ON",name:"Ongtustik Qazaqstan"},11:{code:"PA",name:"Pavlodar"},12:{code:"QA",name:"Qaraghandy"},13:{code:"QO",name:"Qostanay"},14:{code:"QY",name:"Qyzylorda"},15:{code:"SH",name:"Shyghys Qazaqstan"},16:{code:"SO",name:"Soltustik Qazaqstan"},17:{code:"ZH",name:"Zhambyl"}},KE:{1:{code:"CE",name:"Central"},2:{code:"CO",name:"Coast"},3:{code:"EA",name:"Eastern"},4:{code:"NA",name:"Nairobi Area"},5:{code:"NE",name:"North Eastern"},6:{code:"NY",name:"Nyanza"},7:{code:"RV",name:"Rift Valley"},8:{code:"WE",name:"Western"}},KI:{1:{code:"AG",name:"Abaiang"},2:{code:"AM",name:"Abemama"},3:{code:"AK",name:"Aranuka"},4:{code:"AO",name:"Arorae"},5:{code:"BA",name:"Banaba"},6:{code:"BE",name:"Beru"},7:{code:"bT",name:"Butaritari"},8:{code:"KA",name:"Kanton"},9:{code:"KR",name:"Kiritimati"},10:{code:"KU",name:"Kuria"},11:{code:"MI",name:"Maiana"},12:{code:"MN",name:"Makin"},13:{code:"ME",name:"Marakei"},14:{code:"NI",name:"Nikunau"},15:{code:"NO",name:"Nonouti"},16:{code:"ON",name:"Onotoa"},17:{code:"TT",name:"Tabiteuea"},18:{code:"TR",name:"Tabuaeran"},19:{code:"TM",name:"Tamana"},20:{code:"TW",name:"Tarawa"},21:{code:"TE",name:"Teraina"}},KP:{1:{code:"CHA",name:"Chagang-do"},2:{code:"HAB",name:"Hamgyong-bukto"},3:{code:"HAN",name:"Hamgyong-namdo"},4:{code:"HWB",name:"Hwanghae-bukto"},5:{code:"HWN",name:"Hwanghae-namdo"},6:{code:"KAN",name:"Kangwon-do"},7:{code:"PYB",name:"P'yongan-bukto"},8:{code:"PYN",name:"P'yongan-namdo"},9:{code:"YAN",name:"Ryanggang-do (Yanggang-do)"},10:{code:"NAJ",name:"Rason Directly Governed City"},11:{code:"PYO",name:"P'yongyang Special City"}},KR:{1:{code:"CO",name:"Ch'ungch'ong-bukto"},2:{code:"CH",name:"Ch'ungch'ong-namdo"},3:{code:"CD",name:"Cheju-do"},4:{code:"CB",name:"Cholla-bukto"},5:{code:"CN",name:"Cholla-namdo"},6:{code:"IG",name:"Inch'on-gwangyoksi"},7:{code:"KA",name:"Kangwon-do"},8:{code:"KG",name:"Kwangju-gwangyoksi"},9:{code:"KD",name:"Kyonggi-do"},10:{code:"KB",name:"Kyongsang-bukto"},11:{code:"KN",name:"Kyongsang-namdo"},12:{code:"PG",name:"Pusan-gwangyoksi"},13:{code:"SO",name:"Soul-t'ukpyolsi"},14:{code:"TA",name:"Taegu-gwangyoksi"},15:{code:"TG",name:"Taejon-gwangyoksi"}},KW:{1:{code:"AL",name:"Al'Asimah"},2:{code:"AA",name:"Al Ahmadi"},3:{code:"AF",name:"Al Farwaniyah"},4:{code:"AJ",name:"Al Jahra'"},5:{code:"HA",name:"Hawalli"}},KG:{1:{code:"GB",name:"Bishkek"},2:{code:"B",name:"Batken"},3:{code:"C",name:"Chu"},4:{code:"J",name:"Jalal-Abad"},5:{code:"N",name:"Naryn"},6:{code:"O",name:"Osh"},7:{code:"T",name:"Talas"},8:{code:"Y",name:"Ysyk-Kol"}},LA:{1:{code:"VT",name:"Vientiane"},2:{code:"AT",name:"Attapu"},3:{code:"BK",name:"Bokeo"},4:{code:"BL",name:"Bolikhamxai"},5:{code:"CH",name:"Champasak"},6:{code:"HO",name:"Houaphan"},7:{code:"KH",name:"Khammouan"},8:{code:"LM",name:"Louang Namtha"},9:{code:"LP",name:"Louangphabang"},10:{code:"OU",name:"Oudomxai"},11:{code:"PH",name:"Phongsali"},12:{code:"SL",name:"Salavan"},13:{code:"SV",name:"Savannakhet"},14:{code:"VI",name:"Vientiane"},15:{code:"XA",name:"Xaignabouli"},16:{code:"XE",name:"Xekong"},17:{code:"XI",name:"Xiangkhoang"},18:{code:"XN",name:"Xaisomboun"}},LV:{1:{code:"AIZ",name:"Aizkraukles Rajons"},2:{code:"ALU",name:"Aluksnes Rajons"},3:{code:"BAL",name:"Balvu Rajons"},4:{code:"BAU",name:"Bauskas Rajons"},5:{code:"CES",name:"Cesu Rajons"},6:{code:"DGR",name:"Daugavpils Rajons"},7:{code:"DOB",name:"Dobeles Rajons"},8:{code:"GUL",name:"Gulbenes Rajons"},9:{code:"JEK",name:"Jekabpils Rajons"},10:{code:"JGR",name:"Jelgavas Rajons"},11:{code:"KRA",name:"Kraslavas Rajons"},12:{code:"KUL",name:"Kuldigas Rajons"},13:{code:"LPR",name:"Liepajas Rajons"},14:{code:"LIM",name:"Limbazu Rajons"},15:{code:"LUD",name:"Ludzas Rajons"},16:{code:"MAD",name:"Madonas Rajons"},17:{code:"OGR",name:"Ogres Rajons"},18:{code:"PRE",name:"Preilu Rajons"},19:{code:"RZR",name:"Rezeknes Rajons"},20:{code:"RGR",name:"Rigas Rajons"},21:{code:"SAL",name:"Saldus Rajons"},22:{code:"TAL",name:"Talsu Rajons"},23:{code:"TUK",name:"Tukuma Rajons"},24:{code:"VLK",name:"Valkas Rajons"},25:{code:"VLM",name:"Valmieras Rajons"},26:{code:"VSR",name:"Ventspils Rajons"},27:{code:"DGV",name:"Daugavpils"},28:{code:"JGV",name:"Jelgava"},29:{code:"JUR",name:"Jurmala"},30:{code:"LPK",name:"Liepaja"},31:{code:"RZK",name:"Rezekne"},32:{code:"RGA",name:"Riga"},33:{code:"VSL",name:"Ventspils"}},LB:{1:{code:"BIN",name:"Bint Jbeil"},2:{code:"HAS",name:"Hasbaya"},3:{code:"MAR",name:"Marjeyoun"},4:{code:"NAB",name:"Nabatieh"},5:{code:"BAA",name:"Baalbek"},6:{code:"HER",name:"Hermel"},7:{code:"RAS",name:"Rashaya"},8:{code:"WES",name:"Western Beqaa"},9:{code:"ZAH",name:"Zahle"},10:{code:"AKK",name:"Akkar"},11:{code:"BAT",name:"Batroun"},12:{code:"BSH",name:"Bsharri"},13:{code:"KOU",name:"Koura"},14:{code:"MIN",name:"Miniyeh-Danniyeh"},15:{code:"TRI",name:"Tripoli"},16:{code:"ZGH",name:"Zgharta"},17:{code:"ALE",name:"Aley"},18:{code:"BAA",name:"Baabda"},19:{code:"BYB",name:"Byblos"},20:{code:"CHO",name:"Chouf"},21:{code:"KES",name:"Kesrwan"},22:{code:"MAT",name:"Matn"},23:{code:"JEZ",name:"Jezzine"},24:{code:"SID",name:"Sidon"},25:{code:"TYR",name:"Tyre"}},LS:{1:{code:"BE",name:"Berea"},2:{code:"BB",name:"Butha-Buthe"},3:{code:"LE",name:"Leribe"},4:{code:"MF",name:"Mafeteng"},5:{code:"MS",name:"Maseru"},6:{code:"MH",name:"Mohale's Hoek"},7:{code:"MK",name:"Mokhotlong"},8:{code:"QN",name:"Qacha's Nek"},9:{code:"QT",name:"Quthing"},10:{code:"TT",name:"Thaba-Tseka"}},LR:{1:{code:"BI",name:"Bomi"},2:{code:"BG",name:"Bong"},3:{code:"GB",name:"Grand Bassa"},4:{code:"CM",name:"Grand Cape Mount"},5:{code:"GG",name:"Grand Gedeh"},6:{code:"GK",name:"Grand Kru"},7:{code:"LO",name:"Lofa"},8:{code:"MG",name:"Margibi"},9:{code:"ML",name:"Maryland"},10:{code:"MS",name:"Montserrado"},11:{code:"NB",name:"Nimba"},12:{code:"RC",name:"River Cess"},13:{code:"SN",name:"Sinoe"}},LY:{1:{code:"AJ",name:"Ajdabiya"},2:{code:"AZ",name:"Al 'Aziziyah"},3:{code:"FA",name:"Al Fatih"},4:{code:"JA",name:"Al Jabal al Akhdar"},5:{code:"JU",name:"Al Jufrah"},6:{code:"KH",name:"Al Khums"},7:{code:"KU",name:"Al Kufrah"},8:{code:"NK",name:"An Nuqat al Khams"},9:{code:"AS",name:"Ash Shati'"},10:{code:"AW",name:"Awbari"},11:{code:"ZA",name:"Az Zawiyah"},12:{code:"BA",name:"Banghazi"},13:{code:"DA",name:"Darnah"},14:{code:"GD",name:"Ghadamis"},15:{code:"GY",name:"Gharyan"},16:{code:"MI",name:"Misratah"},17:{code:"MZ",name:"Murzuq"},18:{code:"SB",name:"Sabha"},19:{code:"SW",name:"Sawfajjin"},20:{code:"SU",name:"Surt"},21:{code:"TL",name:"Tarabulus (Tripoli)"},22:{code:"TH",name:"Tarhunah"},23:{code:"TU",name:"Tubruq"},24:{code:"YA",name:"Yafran"},25:{code:"ZL",name:"Zlitan"}},LI:{1:{code:"V",name:"Vaduz"},2:{code:"A",name:"Schaan"},3:{code:"B",name:"Balzers"},4:{code:"N",name:"Triesen"},5:{code:"E",name:"Eschen"},6:{code:"M",name:"Mauren"},7:{code:"T",name:"Triesenberg"},8:{code:"R",name:"Ruggell"},9:{code:"G",name:"Gamprin"},10:{code:"L",name:"Schellenberg"},11:{code:"P",name:"Planken"}},LT:{1:{code:"AL",name:"Alytus"},2:{code:"KA",name:"Kaunas"},3:{code:"KL",name:"Klaipeda"},4:{code:"MA",name:"Marijampole"},5:{code:"PA",name:"Panevezys"},6:{code:"SI",name:"Siauliai"},7:{code:"TA",name:"Taurage"},8:{code:"TE",name:"Telsiai"},9:{code:"UT",name:"Utena"},10:{code:"VI",name:"Vilnius"}},LU:{1:{code:"DD",name:"Diekirch"},2:{code:"DC",name:"Clervaux"},3:{code:"DR",name:"Redange"},4:{code:"DV",name:"Vianden"},5:{code:"DW",name:"Wiltz"},6:{code:"GG",name:"Grevenmacher"},7:{code:"GE",name:"Echternach"},8:{code:"GR",name:"Remich"},9:{code:"LL",name:"Luxembourg"},10:{code:"LC",name:"Capellen"},11:{code:"LE",name:"Esch-sur-Alzette"},12:{code:"LM",name:"Mersch"}},MO:{1:{code:"OLF",name:"Our Lady Fatima Parish"},2:{code:"ANT",name:"St. Anthony Parish"},3:{code:"LAZ",name:"St. Lazarus Parish"},4:{code:"CAT",name:"Cathedral Parish"},5:{code:"LAW",name:"St. Lawrence Parish"}},MK:{1:{code:"AER",name:"Aerodrom"},2:{code:"ARA",name:"Aračinovo"},3:{code:"BER",name:"Berovo"},4:{code:"BIT",name:"Bitola"},5:{code:"BOG",name:"Bogdanci"},6:{code:"BOG",name:"Bogovinje"},7:{code:"BOS",name:"Bosilovo"},8:{code:"BRV",name:"Brvenica"},9:{code:"BUT",name:"Butel"},10:{code:"ČAI",name:"Čair"},11:{code:"ČAš",name:"Čaška"},12:{code:"CEN",name:"Centar"},13:{code:"CEN",name:"Centar Župa"},14:{code:"Češ",name:"Češinovo-Obleš"},15:{code:"ČUČ",name:"Čučer-Sandevo"},16:{code:"DEB",name:"Debar"},17:{code:"DEB",name:"Debarca"},18:{code:"DEL",name:"Delčevo"},19:{code:"DEM",name:"Demir Hisar"},20:{code:"DEM",name:"Demir Kapija"},21:{code:"DOL",name:"Dolneni"},22:{code:"DRU",name:"Drugovo"},23:{code:"GAZ",name:"Gazi Baba"},24:{code:"GEV",name:"Gevgelija"},25:{code:"GJO",name:"Gjorče Petrov"},26:{code:"GOS",name:"Gostivar"},27:{code:"GRA",name:"Gradsko"},28:{code:"ILI",name:"Ilinden"},29:{code:"JEG",name:"Jegunovce"},30:{code:"KAR",name:"Karbinci"},31:{code:"KAR",name:"Karpoš"},32:{code:"KAV",name:"Kavadarci"},33:{code:"KIČ",name:"Kičevo"},34:{code:"KIS",name:"Kisela Voda"},35:{code:"KOč",name:"Kočani"},36:{code:"KON",name:"Konče"},37:{code:"KRA",name:"Kratovo"},38:{code:"KRI",name:"Kriva Palanka"},39:{code:"KRI",name:"Krivogaštani"},40:{code:"KRU",name:"Kruševo"},41:{code:"KUM",name:"Kumanovo"},42:{code:"LIP",name:"Lipkovo"},43:{code:"LOZ",name:"Lozovo"},44:{code:"MAK",name:"Makedonska Kamenica"},45:{code:"MAK",name:"Makedonski Brod"},46:{code:"MAV",name:"Mavrovo and Rostuša"},47:{code:"MOG",name:"Mogila"},48:{code:"NEG",name:"Negotino"},49:{code:"NOV",name:"Novaci"},50:{code:"NOV",name:"Novo Selo"},51:{code:"OHR",name:"Ohrid"},52:{code:"OSL",name:"Oslomej"},53:{code:"PEH",name:"Pehčevo"},54:{code:"PET",name:"Petrovec"},55:{code:"PLA",name:"Plasnica"},56:{code:"PRI",name:"Prilep"},57:{code:"PRO",name:"Probištip"},58:{code:"RAD",name:"Radoviš"},59:{code:"RAN",name:"Rankovce"},60:{code:"RES",name:"Resen"},61:{code:"ROS",name:"Rosoman"},62:{code:"SAR",name:"Saraj"},63:{code:"SOP",name:"Sopište"},64:{code:"STA",name:"Star Dojran"},65:{code:"STA",name:"Staro Nagoričane"},66:{code:"ŠTI",name:"Štip"},67:{code:"STR",name:"Struga"},68:{code:"STR",name:"Strumica"},69:{code:"STU",name:"Studeničani"},70:{code:"ŠUT",name:"Šuto Orizari"},71:{code:"SVE",name:"Sveti Nikole"},72:{code:"TEA",name:"Tearce"},73:{code:"TET",name:"Tetovo"},74:{code:"VAL",name:"Valandovo"},75:{code:"VAS",name:"Vasilevo"},76:{code:"VEL",name:"Veles"},77:{code:"VEV",name:"Vevčani"},78:{code:"VIN",name:"Vinica"},79:{code:"VRA",name:"Vraneštica"},80:{code:"VRA",name:"Vrapčište"},81:{code:"ZAJ",name:"Zajas"},82:{code:"ZEL",name:"Zelenikovo"},83:{code:"ŽEL",name:"Želino"},84:{code:"ZRN",name:"Zrnovci"}},MG:{1:{code:"AN",name:"Antananarivo"},2:{code:"AS",name:"Antsiranana"},3:{code:"FN",name:"Fianarantsoa"},4:{code:"MJ",name:"Mahajanga"},5:{code:"TM",name:"Toamasina"},6:{code:"TL",name:"Toliara"}},MW:{1:{code:"BLK",name:"Balaka"},2:{code:"BLT",name:"Blantyre"},3:{code:"CKW",name:"Chikwawa"},4:{code:"CRD",name:"Chiradzulu"},5:{code:"CTP",name:"Chitipa"},6:{code:"DDZ",name:"Dedza"},7:{code:"DWA",name:"Dowa"},8:{code:"KRG",name:"Karonga"},9:{code:"KSG",name:"Kasungu"},10:{code:"LKM",name:"Likoma"},11:{code:"LLG",name:"Lilongwe"},12:{code:"MCG",name:"Machinga"},13:{code:"MGC",name:"Mangochi"},14:{code:"MCH",name:"Mchinji"},15:{code:"MLJ",name:"Mulanje"},16:{code:"MWZ",name:"Mwanza"},17:{code:"MZM",name:"Mzimba"},18:{code:"NTU",name:"Ntcheu"},19:{code:"NKB",name:"Nkhata Bay"},20:{code:"NKH",name:"Nkhotakota"},21:{code:"NSJ",name:"Nsanje"},22:{code:"NTI",name:"Ntchisi"},23:{code:"PHL",name:"Phalombe"},24:{code:"RMP",name:"Rumphi"},25:{code:"SLM",name:"Salima"},26:{code:"THY",name:"Thyolo"},27:{code:"ZBA",name:"Zomba"}},MY:{1:{code:"Johor",name:"Johor"},2:{code:"Kedah",name:"Kedah"},3:{code:"Kelantan",name:"Kelantan"},4:{code:"Labuan",name:"Labuan"},5:{code:"Melaka",name:"Melaka"},6:{code:"Negeri Sembilan",name:"Negeri Sembilan"},7:{code:"Pahang",name:"Pahang"},8:{code:"Perak",name:"Perak"},9:{code:"Perlis",name:"Perlis"},10:{code:"Pulau Pinang",name:"Pulau Pinang"},11:{code:"Sabah",name:"Sabah"},12:{code:"Sarawak",name:"Sarawak"},13:{code:"Selangor",name:"Selangor"},14:{code:"Terengganu",name:"Terengganu"},15:{code:"Kuala Lumpur",name:"Kuala Lumpur"}},MV:{1:{code:"AAD",name:"Ari Atoll Dheknu"},2:{code:"AAU",name:"Ari Atoll Uthuru"},3:{code:"ADD",name:"Addu"},4:{code:"FAA",name:"Faadhippolhu"},5:{code:"FEA",name:"Felidhe Atoll"},6:{code:"FMU",name:"Fua Mulaku"},7:{code:"HAD",name:"Huvadhu Atoll Dhekunu"},8:{code:"HAU",name:"Huvadhu Atoll Uthuru"},9:{code:"HDH",name:"Hadhdhunmathi"},10:{code:"KLH",name:"Kolhumadulu"},11:{code:"MAA",name:"Male Atoll"},12:{code:"MAD",name:"Maalhosmadulu Dhekunu"},13:{code:"MAU",name:"Maalhosmadulu Uthuru"},14:{code:"MLD",name:"Miladhunmadulu Dhekunu"},15:{code:"MLU",name:"Miladhunmadulu Uthuru"},16:{code:"MUA",name:"Mulaku Atoll"},17:{code:"NAD",name:"Nilandhe Atoll Dhekunu"},18:{code:"NAU",name:"Nilandhe Atoll Uthuru"},19:{code:"THD",name:"Thiladhunmathi Dhekunu"},20:{code:"THU",name:"Thiladhunmathi Uthuru"}},ML:{1:{code:"GA",name:"Gao"},2:{code:"KY",name:"Kayes"},3:{code:"KD",name:"Kidal"},4:{code:"KL",name:"Koulikoro"},5:{code:"MP",name:"Mopti"},6:{code:"SG",name:"Segou"},7:{code:"SK",name:"Sikasso"},8:{code:"TB",name:"Tombouctou"},9:{code:"CD",name:"Bamako Capital District"}},MT:{1:{code:"ATT",name:"Attard"},2:{code:"BAL",name:"Balzan"},3:{code:"BGU",name:"Birgu"},4:{code:"BKK",name:"Birkirkara"},5:{code:"BRZ",name:"Birzebbuga"},6:{code:"BOR",name:"Bormla"},7:{code:"DIN",name:"Dingli"},8:{code:"FGU",name:"Fgura"},9:{code:"FLO",name:"Floriana"},10:{code:"GDJ",name:"Gudja"},11:{code:"GZR",name:"Gzira"},12:{code:"GRG",name:"Gargur"},13:{code:"GXQ",name:"Gaxaq"},14:{code:"HMR",name:"Hamrun"},15:{code:"IKL",name:"Iklin"},16:{code:"ISL",name:"Isla"},17:{code:"KLK",name:"Kalkara"},18:{code:"KRK",name:"Kirkop"},19:{code:"LIJ",name:"Lija"},20:{code:"LUQ",name:"Luqa"},21:{code:"MRS",name:"Marsa"},22:{code:"MKL",name:"Marsaskala"},23:{code:"MXL",name:"Marsaxlokk"},24:{code:"MDN",name:"Mdina"},25:{code:"MEL",name:"Melliea"},26:{code:"MGR",name:"Mgarr"},27:{code:"MST",name:"Mosta"},28:{code:"MQA",name:"Mqabba"},29:{code:"MSI",name:"Msida"},30:{code:"MTF",name:"Mtarfa"},31:{code:"NAX",name:"Naxxar"},32:{code:"PAO",name:"Paola"},33:{code:"PEM",name:"Pembroke"},34:{code:"PIE",name:"Pieta"},35:{code:"QOR",name:"Qormi"},36:{code:"QRE",name:"Qrendi"},37:{code:"RAB",name:"Rabat"},38:{code:"SAF",name:"Safi"},39:{code:"SGI",name:"San Giljan"},40:{code:"SLU",name:"Santa Lucija"},41:{code:"SPB",name:"San Pawl il-Bahar"},42:{code:"SGW",name:"San Gwann"},43:{code:"SVE",name:"Santa Venera"},44:{code:"SIG",name:"Siggiewi"},45:{code:"SLM",name:"Sliema"},46:{code:"SWQ",name:"Swieqi"},47:{code:"TXB",name:"Ta Xbiex"},48:{code:"TRX",name:"Tarxien"},49:{code:"VLT",name:"Valletta"},50:{code:"XGJ",name:"Xgajra"},51:{code:"ZBR",name:"Zabbar"},52:{code:"ZBG",name:"Zebbug"},53:{code:"ZJT",name:"Zejtun"},54:{code:"ZRQ",name:"Zurrieq"},55:{code:"FNT",name:"Fontana"},56:{code:"GHJ",name:"Ghajnsielem"},57:{code:"GHR",name:"Gharb"},58:{code:"GHS",name:"Ghasri"},59:{code:"KRC",name:"Kercem"},60:{code:"MUN",name:"Munxar"},61:{code:"NAD",name:"Nadur"},62:{code:"QAL",name:"Qala"},63:{code:"VIC",name:"Victoria"},64:{code:"SLA",name:"San Lawrenz"},65:{code:"SNT",name:"Sannat"},66:{code:"ZAG",name:"Xagra"},67:{code:"XEW",name:"Xewkija"},68:{code:"ZEB",name:"Zebbug"}},MH:{1:{code:"ALG",name:"Ailinginae"},2:{code:"ALL",name:"Ailinglaplap"},3:{code:"ALK",name:"Ailuk"},4:{code:"ARN",name:"Arno"},5:{code:"AUR",name:"Aur"},6:{code:"BKR",name:"Bikar"},7:{code:"BKN",name:"Bikini"},8:{code:"BKK",name:"Bokak"},9:{code:"EBN",name:"Ebon"},10:{code:"ENT",name:"Enewetak"},11:{code:"EKB",name:"Erikub"},12:{code:"JBT",name:"Jabat"},13:{code:"JLT",name:"Jaluit"},14:{code:"JEM",name:"Jemo"},15:{code:"KIL",name:"Kili"},16:{code:"KWJ",name:"Kwajalein"},17:{code:"LAE",name:"Lae"},18:{code:"LIB",name:"Lib"},19:{code:"LKP",name:"Likiep"},20:{code:"MJR",name:"Majuro"},21:{code:"MLP",name:"Maloelap"},22:{code:"MJT",name:"Mejit"},23:{code:"MIL",name:"Mili"},24:{code:"NMK",name:"Namorik"},25:{code:"NAM",name:"Namu"},26:{code:"RGL",name:"Rongelap"},27:{code:"RGK",name:"Rongrik"},28:{code:"TOK",name:"Toke"},29:{code:"UJA",name:"Ujae"},30:{code:"UJL",name:"Ujelang"},31:{code:"UTK",name:"Utirik"},32:{code:"WTH",name:"Wotho"},33:{code:"WTJ",name:"Wotje"}},MQ:{1:{code:"LAJ",name:"L'Ajoupa-Bouillon"},2:{code:"LES",name:"Les Anses-d'Arlet"},3:{code:"BAS",name:"Basse-Pointe"},4:{code:"BEL",name:"Bellefontaine"},5:{code:"LE",name:"Le Carbet"},6:{code:"CAS",name:"Case-Pilote"},7:{code:"LE",name:"Le Diamant"},8:{code:"DUC",name:"Ducos"},9:{code:"FON",name:"Fonds-Saint-Denis"},10:{code:"FOR",name:"Fort-De-France"},11:{code:"LE",name:"Le François"},12:{code:"GRA",name:"Grand'Rivière"},13:{code:"GRO",name:"Gros-Morne"},14:{code:"LE",name:"Le Lamentin"},15:{code:"LE",name:"Le Lorrain"},16:{code:"MAC",name:"Macouba"},17:{code:"LE",name:"Le Marigot"},18:{code:"LE",name:"Le Marin"},19:{code:"LE",name:"Le Morne-Rouge"},20:{code:"LE",name:"Le Morne-Vert"},21:{code:"LE",name:"Le Prêcheur"},22:{code:"RIV",name:"Rivière-Pilote"},23:{code:"RIV",name:"Rivière-Salée"},24:{code:"LE",name:"Le Robert"},25:{code:"SAI",name:"Sainte-Anne"},26:{code:"SAI",name:"Sainte-Luce"},27:{code:"SAI",name:"Sainte-Marie"},28:{code:"SAI",name:"Saint-Esprit"},29:{code:"SAI",name:"Saint-Joseph"},30:{code:"SAI",name:"Saint-Pierre"},31:{code:"SCH",name:"Schœlcher"},32:{code:"LA",name:"La Trinité"},33:{code:"LES",name:"Les Trois-Îlets"},34:{code:"LE",name:"Le Vauclin"}},MR:{1:{code:"AD",name:"Adrar"},2:{code:"AS",name:"Assaba"},3:{code:"BR",name:"Brakna"},4:{code:"DN",name:"Dakhlet Nouadhibou"},5:{code:"GO",name:"Gorgol"},6:{code:"GM",name:"Guidimaka"},7:{code:"HC",name:"Hodh Ech Chargui"},8:{code:"HG",name:"Hodh El Gharbi"},9:{code:"IN",name:"Inchiri"},10:{code:"TA",name:"Tagant"},11:{code:"TZ",name:"Tiris Zemmour"},12:{code:"TR",name:"Trarza"},13:{code:"NO",name:"Nouakchott"}},MU:{1:{code:"AG",name:"Agalega Islands"},2:{code:"BL",name:"Black River"},3:{code:"BR",name:"Beau Bassin-Rose Hill"},4:{code:"CC",name:"Cargados Carajos Shoals (Saint B)"},5:{code:"CU",name:"Curepipe"},6:{code:"FL",name:"Flacq"},7:{code:"GP",name:"Grand Port"},8:{code:"MO",name:"Moka"},9:{code:"PA",name:"Pamplemousses"},10:{code:"PL",name:"Port Louis"},11:{code:"PU",name:"Port Louis"},12:{code:"PW",name:"Plaines Wilhems"},13:{code:"QB",name:"Quatre Bornes"},14:{code:"RO",name:"Rodrigues"},15:{code:"RR",name:"Riviere du Rempart"},16:{code:"SA",name:"Savanne"},17:{code:"VP",name:"Vacoas-Phoenix"}},YT:{1:{code:"DZA",name:"Dzaoudzi"},2:{code:"PAM",name:"Pamandzi"},3:{code:"MAM",name:"Mamoudzou"},4:{code:"DEM",name:"Dembeni"},5:{code:"BAN",name:"Bandrele"},6:{code:"KAN",name:"Kani-Kéli"},7:{code:"BOU",name:"Bouéni"},8:{code:"CHI",name:"Chirongui"},9:{code:"SAD",name:"Sada"},10:{code:"OUA",name:"Ouangani"},11:{code:"CHI",name:"Chiconi"},12:{code:"TSI",name:"Tsingoni"},13:{code:"MTS",name:"M'Tsangamouji"},14:{code:"ACO",name:"Acoua"},15:{code:"MTS",name:"Mtsamboro"},16:{code:"BAN",name:"Bandraboua"},17:{code:"KOU",name:"Koungou"}},MX:{1:{code:"AGU",name:"Aguascalientes"},2:{code:"BCN",name:"Baja California Norte"},3:{code:"BCS",name:"Baja California Sur"},4:{code:"CAM",name:"Campeche"},5:{code:"CHP",name:"Chiapas"},6:{code:"CHH",name:"Chihuahua"},7:{code:"COA",name:"Coahuila de Zaragoza"},8:{code:"COL",name:"Colima"},9:{code:"DIF",name:"Distrito Federal"},10:{code:"DUR",name:"Durango"},11:{code:"GUA",name:"Guanajuato"},12:{code:"GRO",name:"Guerrero"},13:{code:"HID",name:"Hidalgo"},14:{code:"JAL",name:"Jalisco"},15:{code:"MEX",name:"Mexico"},16:{code:"MIC",name:"Michoacan de Ocampo"},17:{code:"MOR",name:"Morelos"},18:{code:"NAY",name:"Nayarit"},19:{code:"NLE",name:"Nuevo Leon"},20:{code:"OAX",name:"Oaxaca"},21:{code:"PUE",name:"Puebla"},22:{code:"QUE",name:"Queretaro de Arteaga"},23:{code:"ROO",name:"Quintana Roo"},24:{code:"SLP",name:"San Luis Potosi"},25:{code:"SIN",name:"Sinaloa"},26:{code:"SON",name:"Sonora"},27:{code:"TAB",name:"Tabasco"},28:{code:"TAM",name:"Tamaulipas"},29:{code:"TLA",name:"Tlaxcala"},30:{code:"VER",name:"Veracruz-Llave"},31:{code:"YUC",name:"Yucatan"},32:{code:"ZAC",name:"Zacatecas"}},FM:{1:{code:"C",name:"Chuuk"},2:{code:"K",name:"Kosrae"},3:{code:"P",name:"Pohnpei"},4:{code:"Y",name:"Yap"}},MD:{1:{code:"GA",name:"Gagauzia"},2:{code:"CU",name:"Chisinau"},3:{code:"BA",name:"Balti"},4:{code:"CA",name:"Cahul"},5:{code:"ED",name:"Edinet"},6:{code:"LA",name:"Lapusna"},7:{code:"OR",name:"Orhei"},8:{code:"SO",name:"Soroca"},9:{code:"TI",name:"Tighina"},10:{code:"UN",name:"Ungheni"},11:{code:"SN",name:"Stânga Nistrului"}},MC:{1:{code:"FV",name:"Fontvieille"},2:{code:"LC",name:"La Condamine"},3:{code:"MV",name:"Monaco-Ville"},4:{code:"MC",name:"Monte-Carlo"}},MN:{1:{code:"1",name:"Ulanbaatar"},2:{code:"035",name:"Orhon"},3:{code:"037",name:"Darhan uul"},4:{code:"039",name:"Hentiy"},5:{code:"041",name:"Hovsgol"},6:{code:"043",name:"Hovd"},7:{code:"046",name:"Uvs"},8:{code:"047",name:"Tov"},9:{code:"049",name:"Selenge"},10:{code:"051",name:"Suhbaatar"},11:{code:"053",name:"Omnogovi"},12:{code:"055",name:"Ovorhangay"},13:{code:"057",name:"Dzavhan"},14:{code:"059",name:"DundgovL"},15:{code:"061",name:"Dornod"},16:{code:"063",name:"Dornogov"},17:{code:"064",name:"Govi-Sumber"},18:{code:"065",name:"Govi-Altay"},19:{code:"067",name:"Bulgan"},20:{code:"069",name:"Bayanhongor"},21:{code:"071",name:"Bayan-Olgiy"},22:{code:"073",name:"Arhangay"}},MS:{1:{code:"A",name:"Saint Anthony"},2:{code:"G",name:"Saint Georges"},3:{code:"P",name:"Saint Peter"}},MA:{1:{code:"AGD",name:"Agadir"},2:{code:"HOC",name:"Al Hoceima"},3:{code:"AZI",name:"Azilal"},4:{code:"BME",name:"Beni Mellal"},5:{code:"BSL",name:"Ben Slimane"},6:{code:"BLM",name:"Boulemane"},7:{code:"CBL",name:"Casablanca"},8:{code:"CHA",name:"Chaouen"},9:{code:"EJA",name:"El Jadida"},10:{code:"EKS",name:"El Kelaa des Sraghna"},11:{code:"ERA",name:"Er Rachidia"},12:{code:"ESS",name:"Essaouira"},13:{code:"FES",name:"Fes"},14:{code:"FIG",name:"Figuig"},15:{code:"GLM",name:"Guelmim"},16:{code:"IFR",name:"Ifrane"},17:{code:"KEN",name:"Kenitra"},18:{code:"KHM",name:"Khemisset"},19:{code:"KHN",name:"Khenifra"},20:{code:"KHO",name:"Khouribga"},21:{code:"LYN",name:"Laayoune"},22:{code:"LAR",name:"Larache"},23:{code:"MRK",name:"Marrakech"},24:{code:"MKN",name:"Meknes"},25:{code:"NAD",name:"Nador"},26:{code:"ORZ",name:"Ouarzazate"},27:{code:"OUJ",name:"Oujda"},28:{code:"RSA",name:"Rabat-Sale"},29:{code:"SAF",name:"Safi"},30:{code:"SET",name:"Settat"},31:{code:"SKA",name:"Sidi Kacem"},32:{code:"TGR",name:"Tangier"},33:{code:"TAN",name:"Tan-Tan"},34:{code:"TAO",name:"Taounate"},35:{code:"TRD",name:"Taroudannt"},36:{code:"TAT",name:"Tata"},37:{code:"TAZ",name:"Taza"},38:{code:"TET",name:"Tetouan"},39:{code:"TIZ",name:"Tiznit"},40:{code:"ADK",name:"Ad Dakhla"},41:{code:"BJD",name:"Boujdour"},42:{code:"ESM",name:"Es Smara"}},MZ:{1:{code:"CD",name:"Cabo Delgado"},2:{code:"GZ",name:"Gaza"},3:{code:"IN",name:"Inhambane"},4:{code:"MN",name:"Manica"},5:{code:"MC",name:"Maputo (city)"},6:{code:"MP",name:"Maputo"},7:{code:"NA",name:"Nampula"},8:{code:"NI",name:"Niassa"},9:{code:"SO",name:"Sofala"},10:{code:"TE",name:"Tete"},11:{code:"ZA",name:"Zambezia"}},MM:{1:{code:"AY",name:"Ayeyarwady"},2:{code:"BG",name:"Bago"},3:{code:"MG",name:"Magway"},4:{code:"MD",name:"Mandalay"},5:{code:"SG",name:"Sagaing"},6:{code:"TN",name:"Tanintharyi"},7:{code:"YG",name:"Yangon"},8:{code:"CH",name:"Chin State"},9:{code:"KC",name:"Kachin State"},10:{code:"KH",name:"Kayah State"},11:{code:"KN",name:"Kayin State"},12:{code:"MN",name:"Mon State"},13:{code:"RK",name:"Rakhine State"},14:{code:"SH",name:"Shan State"}},NA:{1:{code:"CA",name:"Caprivi"},2:{code:"ER",name:"Erongo"},3:{code:"HA",name:"Hardap"},4:{code:"KR",name:"Karas"},5:{code:"KV",name:"Kavango"},6:{code:"KH",name:"Khomas"},7:{code:"KU",name:"Kunene"},8:{code:"OW",name:"Ohangwena"},9:{code:"OK",name:"Omaheke"},10:{code:"OT",name:"Omusati"},11:{code:"ON",name:"Oshana"},12:{code:"OO",name:"Oshikoto"},13:{code:"OJ",name:"Otjozondjupa"}},NR:{1:{code:"AO",name:"Aiwo"},2:{code:"AA",name:"Anabar"},3:{code:"AT",name:"Anetan"},4:{code:"AI",name:"Anibare"},5:{code:"BA",name:"Baiti"},6:{code:"BO",name:"Boe"},7:{code:"BU",name:"Buada"},8:{code:"DE",name:"Denigomodu"},9:{code:"EW",name:"Ewa"},10:{code:"IJ",name:"Ijuw"},11:{code:"ME",name:"Meneng"},12:{code:"NI",name:"Nibok"},13:{code:"UA",name:"Uaboe"},14:{code:"YA",name:"Yaren"}},NP:{1:{code:"BA",name:"Bagmati"},2:{code:"BH",name:"Bheri"},3:{code:"DH",name:"Dhawalagiri"},4:{code:"GA",name:"Gandaki"},5:{code:"JA",name:"Janakpur"},6:{code:"KA",name:"Karnali"},7:{code:"KO",name:"Kosi"},8:{code:"LU",name:"Lumbini"},9:{code:"MA",name:"Mahakali"},10:{code:"ME",name:"Mechi"},11:{code:"NA",name:"Narayani"},12:{code:"RA",name:"Rapti"},13:{code:"SA",name:"Sagarmatha"},14:{code:"SE",name:"Seti"}},NL:{1:{code:"DR",name:"Drenthe"},2:{code:"FL",name:"Flevoland"},3:{code:"FR",name:"Friesland"},4:{code:"GE",name:"Gelderland"},5:{code:"GR",name:"Groningen"},6:{code:"LI",name:"Limburg"},7:{code:"NB",name:"Noord Brabant"},8:{code:"NH",name:"Noord Holland"},9:{code:"OV",name:"Overijssel"},10:{code:"UT",name:"Utrecht"},11:{code:"ZE",name:"Zeeland"},12:{code:"ZH",name:"Zuid Holland"}},AN:{1:{code:"BON",name:"Bonaire"},2:{code:"CUR",name:"Curaçao"},3:{code:"SAB",name:"Saba"},4:{code:"SEU",name:"Sint Eustatius"},5:{code:"SMA",name:"Sint Maarten"}},NC:{1:{code:"L",name:"Iles Loyaute"},2:{code:"N",name:"Nord"},3:{code:"S",name:"Sud"}},NZ:{1:{code:"AUK",name:"Auckland"},2:{code:"BOP",name:"Bay of Plenty"},3:{code:"CAN",name:"Canterbury"},4:{code:"COR",name:"Coromandel"},5:{code:"GIS",name:"Gisborne"},6:{code:"FIO",name:"Fiordland"},7:{code:"HKB",name:"Hawke's Bay"},8:{code:"MBH",name:"Marlborough"},9:{code:"MWT",name:"Manawatu-Wanganui"},10:{code:"MCM",name:"Mt Cook-Mackenzie"},11:{code:"NSN",name:"Nelson"},12:{code:"NTL",name:"Northland"},13:{code:"OTA",name:"Otago"},14:{code:"STL",name:"Southland"},15:{code:"TKI",name:"Taranaki"},16:{code:"WGN",name:"Wellington"},17:{code:"WKO",name:"Waikato"},18:{code:"WAI",name:"Wairprarapa"},19:{code:"WTC",name:"West Coast"}},NI:{1:{code:"AN",name:"Atlantico Norte"},2:{code:"AS",name:"Atlantico Sur"},3:{code:"BO",name:"Boaco"},4:{code:"CA",name:"Carazo"},5:{code:"CI",name:"Chinandega"},6:{code:"CO",name:"Chontales"},7:{code:"ES",name:"Esteli"},8:{code:"GR",name:"Granada"},9:{code:"JI",name:"Jinotega"},10:{code:"LE",name:"Leon"},11:{code:"MD",name:"Madriz"},12:{code:"MN",name:"Managua"},13:{code:"MS",name:"Masaya"},14:{code:"MT",name:"Matagalpa"},15:{code:"NS",name:"Nuevo Segovia"},16:{code:"RS",name:"Rio San Juan"},17:{code:"RI",name:"Rivas"}},NE:{1:{code:"AG",name:"Agadez"},2:{code:"DF",name:"Diffa"},3:{code:"DS",name:"Dosso"},4:{code:"MA",name:"Maradi"},5:{code:"NM",name:"Niamey"},6:{code:"TH",name:"Tahoua"},7:{code:"TL",name:"Tillaberi"},8:{code:"ZD",name:"Zinder"}},NG:{1:{code:"AB",name:"Abia"},2:{code:"CT",name:"Abuja Federal Capital Territory"},3:{code:"AD",name:"Adamawa"},4:{code:"AK",name:"Akwa Ibom"},5:{code:"AN",name:"Anambra"},6:{code:"BC",name:"Bauchi"},7:{code:"BY",name:"Bayelsa"},8:{code:"BN",name:"Benue"},9:{code:"BO",name:"Borno"},10:{code:"CR",name:"Cross River"},11:{code:"DE",name:"Delta"},12:{code:"EB",name:"Ebonyi"},13:{code:"ED",name:"Edo"},14:{code:"EK",name:"Ekiti"},15:{code:"EN",name:"Enugu"},16:{code:"GO",name:"Gombe"},17:{code:"IM",name:"Imo"},18:{code:"JI",name:"Jigawa"},19:{code:"KD",name:"Kaduna"},20:{code:"KN",name:"Kano"},21:{code:"KT",name:"Katsina"},22:{code:"KE",name:"Kebbi"},23:{code:"KO",name:"Kogi"},24:{code:"KW",name:"Kwara"},25:{code:"LA",name:"Lagos"},26:{code:"NA",name:"Nassarawa"},27:{code:"NI",name:"Niger"},28:{code:"OG",name:"Ogun"},29:{code:"ONG",name:"Ondo"},30:{code:"OS",name:"Osun"},31:{code:"OY",name:"Oyo"},32:{code:"PL",name:"Plateau"},33:{code:"RI",name:"Rivers"},34:{code:"SO",name:"Sokoto"},35:{code:"TA",name:"Taraba"},36:{code:"YO",name:"Yobe"},37:{code:"ZA",name:"Zamfara"}},NU:{1:{code:"MAK",name:"Makefu"},2:{code:"TUA",name:"Tuapa"},3:{code:"NAM",name:"Namukulu"},4:{code:"HIK",name:"Hikutavake"},5:{code:"TOI",name:"Toi"},6:{code:"MUT",name:"Mutalau"},7:{code:"LAK",name:"Lakepa"},8:{code:"LIK",name:"Liku"},9:{code:"HAK",name:"Hakupu"},10:{code:"VAI",name:"Vaiea"},11:{code:"AVA",name:"Avatele"},12:{code:"TAM",name:"Tamakautoga"},13:{code:"ALO",name:"Alofi South"},14:{code:"ALO",name:"Alofi North"}},NF:{1:{code:"NOR",name:"Norfolk Island"}},MP:{1:{code:"N",name:"Northern Islands"},2:{code:"R",name:"Rota"},3:{code:"S",name:"Saipan"},4:{code:"T",name:"Tinian"}},NO:{1:{code:"AK",name:"Akershus"},2:{code:"AA",name:"Aust-Agder"},3:{code:"BU",name:"Buskerud"},4:{code:"FM",name:"Finnmark"},5:{code:"HM",name:"Hedmark"},6:{code:"HL",name:"Hordaland"},7:{code:"MR",name:"Møre og Romsdal"},8:{code:"NL",name:"Nordland"},9:{code:"NT",name:"Nord-Trøndelag"},10:{code:"OP",name:"Oppland"},11:{code:"OL",name:"Oslo"},12:{code:"RL",name:"Rogaland"},13:{code:"SJ",name:"Sogn og Fjordane"},14:{code:"ST",name:"Sør-Trøndelag"},15:{code:"SV",name:"Svalbard"},16:{code:"TM",name:"Telemark"},17:{code:"TR",name:"Troms"},18:{code:"VA",name:"Vest-Agder"},19:{code:"VF",name:"Vestfold"},20:{code:"OF",name:"Østfold"}},OM:{1:{code:"DA",name:"Ad Dakhiliyah"},2:{code:"BA",name:"Al Batinah"},3:{code:"WU",name:"Al Wusta"},4:{code:"SH",name:"Ash Sharqiyah"},5:{code:"ZA",name:"Az Zahirah"},6:{code:"MA",name:"Masqat"},7:{code:"MU",name:"Musandam"},8:{code:"ZU",name:"Zufar"}},PK:{1:{code:"B",name:"Balochistan"},2:{code:"T",name:"Federally Administered Tribal Ar"},3:{code:"I",name:"Islamabad Capital Territory"},4:{code:"N",name:"North-West Frontier"},5:{code:"P",name:"Punjab"},6:{code:"S",name:"Sindh"}},PW:{1:{code:"AM",name:"Aimeliik"},2:{code:"AR",name:"Airai"},3:{code:"AN",name:"Angaur"},4:{code:"HA",name:"Hatohobei"},5:{code:"KA",name:"Kayangel"},6:{code:"KO",name:"Koror"},7:{code:"ME",name:"Melekeok"},8:{code:"NA",name:"Ngaraard"},9:{code:"NG",name:"Ngarchelong"},10:{code:"ND",name:"Ngardmau"},11:{code:"NT",name:"Ngatpang"},12:{code:"NC",name:"Ngchesar"},13:{code:"NR",name:"Ngeremlengui"},14:{code:"NW",name:"Ngiwal"},15:{code:"PE",name:"Peleliu"},16:{code:"SO",name:"Sonsorol"}},PA:{1:{code:"BT",name:"Bocas del Toro"},2:{code:"CH",name:"Chiriqui"},3:{code:"CC",name:"Cocle"},4:{code:"CL",name:"Colon"},5:{code:"DA",name:"Darien"},6:{code:"HE",name:"Herrera"},7:{code:"LS",name:"Los Santos"},8:{code:"PA",name:"Panama"},9:{code:"SB",name:"San Blas"},10:{code:"VG",name:"Veraguas"}},PG:{1:{code:"BV",name:"Bougainville"},2:{code:"CE",name:"Central"},3:{code:"CH",name:"Chimbu"},4:{code:"EH",name:"Eastern Highlands"},5:{code:"EB",name:"East New Britain"},6:{code:"ES",name:"East Sepik"},7:{code:"EN",name:"Enga"},8:{code:"GU",name:"Gulf"},9:{code:"MD",name:"Madang"},10:{code:"MN",name:"Manus"},11:{code:"MB",name:"Milne Bay"},12:{code:"MR",name:"Morobe"},13:{code:"NC",name:"National Capital"},14:{code:"NI",name:"New Ireland"},15:{code:"NO",name:"Northern"},16:{code:"SA",name:"Sandaun"},17:{code:"SH",name:"Southern Highlands"},18:{code:"WE",name:"Western"},19:{code:"WH",name:"Western Highlands"},20:{code:"WB",name:"West New Britain"}},PY:{1:{code:"AG",name:"Alto Paraguay"},2:{code:"AN",name:"Alto Parana"},3:{code:"AM",name:"Amambay"},4:{code:"AS",name:"Asuncion"},5:{code:"BO",name:"Boqueron"},6:{code:"CG",name:"Caaguazu"},7:{code:"CZ",name:"Caazapa"},8:{code:"CN",name:"Canindeyu"},9:{code:"CE",name:"Central"},10:{code:"CC",name:"Concepcion"},11:{code:"CD",name:"Cordillera"},12:{code:"GU",name:"Guaira"},13:{code:"IT",name:"Itapua"},14:{code:"MI",name:"Misiones"},15:{code:"NE",name:"Neembucu"},16:{code:"PA",name:"Paraguari"},17:{code:"PH",name:"Presidente Hayes"},18:{code:"SP",name:"San Pedro"}},PE:{1:{code:"AM",name:"Amazonas"},2:{code:"AN",name:"Ancash"},3:{code:"AP",name:"Apurimac"},4:{code:"AR",name:"Arequipa"},5:{code:"AY",name:"Ayacucho"},6:{code:"CJ",name:"Cajamarca"},7:{code:"CL",name:"Callao"},8:{code:"CU",name:"Cusco"},9:{code:"HV",name:"Huancavelica"},10:{code:"HO",name:"Huanuco"},11:{code:"IC",name:"Ica"},12:{code:"JU",name:"Junin"},13:{code:"LD",name:"La Libertad"},14:{code:"LY",name:"Lambayeque"},15:{code:"LI",name:"Lima"},16:{code:"LO",name:"Loreto"},17:{code:"MD",name:"Madre de Dios"},18:{code:"MO",name:"Moquegua"},19:{code:"PA",name:"Pasco"},20:{code:"PI",name:"Piura"},21:{code:"PU",name:"Puno"},22:{code:"SM",name:"San Martin"},23:{code:"TA",name:"Tacna"},24:{code:"TU",name:"Tumbes"},25:{code:"UC",name:"Ucayali"}},PH:{1:{code:"ABR",name:"Abra"},2:{code:"ANO",name:"Agusan del Norte"},3:{code:"ASU",name:"Agusan del Sur"},4:{code:"AKL",name:"Aklan"},5:{code:"ALB",name:"Albay"},6:{code:"ANT",name:"Antique"},7:{code:"APY",name:"Apayao"},8:{code:"AUR",name:"Aurora"},9:{code:"BAS",name:"Basilan"},10:{code:"BTA",name:"Bataan"},11:{code:"BTE",name:"Batanes"},12:{code:"BTG",name:"Batangas"},13:{code:"BLR",name:"Biliran"},14:{code:"BEN",name:"Benguet"},15:{code:"BOL",name:"Bohol"},16:{code:"BUK",name:"Bukidnon"},17:{code:"BUL",name:"Bulacan"},18:{code:"CAG",name:"Cagayan"},19:{code:"CNO",name:"Camarines Norte"},20:{code:"CSU",name:"Camarines Sur"},21:{code:"CAM",name:"Camiguin"},22:{code:"CAP",name:"Capiz"},23:{code:"CAT",name:"Catanduanes"},24:{code:"CAV",name:"Cavite"},25:{code:"CEB",name:"Cebu"},26:{code:"CMP",name:"Compostela"},27:{code:"DNO",name:"Davao del Norte"},28:{code:"DSU",name:"Davao del Sur"},29:{code:"DOR",name:"Davao Oriental"},30:{code:"ESA",name:"Eastern Samar"},31:{code:"GUI",name:"Guimaras"},32:{code:"IFU",name:"Ifugao"},33:{code:"INO",name:"Ilocos Norte"},34:{code:"ISU",name:"Ilocos Sur"},35:{code:"ILO",name:"Iloilo"},36:{code:"ISA",name:"Isabela"},37:{code:"KAL",name:"Kalinga"},38:{code:"LAG",name:"Laguna"},39:{code:"LNO",name:"Lanao del Norte"},40:{code:"LSU",name:"Lanao del Sur"},41:{code:"UNI",name:"La Union"},42:{code:"LEY",name:"Leyte"},43:{code:"MAG",name:"Maguindanao"},44:{code:"MRN",name:"Marinduque"},45:{code:"MSB",name:"Masbate"},46:{code:"MIC",name:"Mindoro Occidental"},47:{code:"MIR",name:"Mindoro Oriental"},48:{code:"MSC",name:"Misamis Occidental"},49:{code:"MOR",name:"Misamis Oriental"},50:{code:"MOP",name:"Mountain"},51:{code:"NOC",name:"Negros Occidental"},52:{code:"NOR",name:"Negros Oriental"},53:{code:"NCT",name:"North Cotabato"},54:{code:"NSM",name:"Northern Samar"},55:{code:"NEC",name:"Nueva Ecija"},56:{code:"NVZ",name:"Nueva Vizcaya"},57:{code:"PLW",name:"Palawan"},58:{code:"PMP",name:"Pampanga"},59:{code:"PNG",name:"Pangasinan"},60:{code:"QZN",name:"Quezon"},61:{code:"QRN",name:"Quirino"},62:{code:"RIZ",name:"Rizal"},63:{code:"ROM",name:"Romblon"},64:{code:"SMR",name:"Samar"},65:{code:"SRG",name:"Sarangani"},66:{code:"SQJ",name:"Siquijor"},67:{code:"SRS",name:"Sorsogon"},68:{code:"SCO",name:"South Cotabato"},69:{code:"SLE",name:"Southern Leyte"},70:{code:"SKU",name:"Sultan Kudarat"},71:{code:"SLU",name:"Sulu"},72:{code:"SNO",name:"Surigao del Norte"},73:{code:"SSU",name:"Surigao del Sur"},74:{code:"TAR",name:"Tarlac"},75:{code:"TAW",name:"Tawi-Tawi"},76:{code:"ZBL",name:"Zambales"},77:{code:"ZNO",name:"Zamboanga del Norte"},78:{code:"ZSU",name:"Zamboanga del Sur"},79:{code:"ZSI",name:"Zamboanga Sibugay"}},PN:{1:{code:"PIT",name:"Pitcairn Island"}},PL:{1:{code:"DO",name:"Dolnośląskie"},2:{code:"KP",name:"Kujawsko-Pomorskie"},3:{code:"LL",name:"Lubelskie"},4:{code:"LU",name:"Lubuskie"},5:{code:"LO",name:"Łódzkie"},6:{code:"ML",name:"Małopolskie"},7:{code:"MZ",name:"Mazowieckie"},8:{code:"OP",name:"Opolskie"},9:{code:"PP",name:"Podkarpackie"},10:{code:"PL",name:"Podlaskie"},11:{code:"PM",name:"Pomorskie"},12:{code:"SL",name:"Śląskie"},13:{code:"SW",name:"Świętokrzyskie"},14:{code:"WM",name:"Warmińsko-Mazurskie"},15:{code:"WP",name:"Wielkopolskie"},16:{code:"ZA",name:"Zachodniopomorskie"}},PT:{1:{code:"AC",name:"Açores"},2:{code:"AV",name:"Aveiro"},3:{code:"BE",name:"Beja"},4:{code:"BR",name:"Braga"},5:{code:"BA",name:"Bragança"},6:{code:"CB",name:"Castelo Branco"},7:{code:"CO",name:"Coimbra"},8:{code:"EV",name:"évora"},9:{code:"FA",name:"Faro"},10:{code:"GU",name:"Guarda"},12:{code:"LE",name:"Leiria"},13:{code:"LI",name:"Lisboa"},14:{code:"ME",name:"Madeira"},15:{code:"PO",name:"Portalegre"},16:{code:"PR",name:"Porto"},17:{code:"SA",name:"Santarém"},18:{code:"SE",name:"SetÚbal"},19:{code:"VC",name:"Viana do Castelo"},20:{code:"VR",name:"Vila Real"},21:{code:"VI",name:"Viseu"}},PR:{1:{code:"A-A",name:"Añasco"},2:{code:"ADJ",name:"Adjuntas"},3:{code:"AGU",name:"Aguada"},4:{code:"AGU",name:"Aguadilla"},5:{code:"AGU",name:"Aguas Buenas"},6:{code:"AIB",name:"Aibonito"},7:{code:"ARE",name:"Arecibo"},8:{code:"ARR",name:"Arroyo"},9:{code:"BAR",name:"Barceloneta"},10:{code:"BAR",name:"Barranquitas"},11:{code:"BAY",name:"Bayamón"},12:{code:"CAB",name:"Cabo Rojo"},13:{code:"CAG",name:"Caguas"},14:{code:"CAM",name:"Camuy"},15:{code:"CAN",name:"Canóvanas"},16:{code:"CAR",name:"Carolina"},17:{code:"CAT",name:"Cataño"},18:{code:"CAY",name:"Cayey"},19:{code:"CEI",name:"Ceiba"},20:{code:"CIA",name:"Ciales"},21:{code:"CID",name:"Cidra"},22:{code:"COA",name:"Coamo"},23:{code:"COM",name:"Comerío"},24:{code:"COR",name:"Corozal"},25:{code:"CUL",name:"Culebra"},26:{code:"DOR",name:"Dorado"},27:{code:"FAJ",name:"Fajardo"},28:{code:"FLO",name:"Florida"},29:{code:"GUA",name:"Guayama"},30:{code:"GUA",name:"Guayanilla"},31:{code:"GUA",name:"Guaynabo"},32:{code:"GUR",name:"Gurabo"},33:{code:"GU¡",name:"Guánica"},34:{code:"HAT",name:"Hatillo"},35:{code:"HOR",name:"Hormigueros"},36:{code:"HUM",name:"Humacao"},37:{code:"ISA",name:"Isabela"},38:{code:"JAY",name:"Jayuya"},39:{code:"JUA",name:"Juana Díaz"},40:{code:"JUN",name:"Juncos"},41:{code:"LAJ",name:"Lajas"},42:{code:"LAR",name:"Lares"},43:{code:"LAS",name:"Las Marías"},44:{code:"LAS",name:"Las Piedras"},45:{code:"LOÕ",name:"Loíza"},46:{code:"LUQ",name:"Luquillo"},47:{code:"MAN",name:"Manatí"},48:{code:"MAR",name:"Maricao"},49:{code:"MAU",name:"Maunabo"},50:{code:"MAY",name:"Mayagüez"},51:{code:"MOC",name:"Moca"},52:{code:"MOR",name:"Morovis"},53:{code:"NAG",name:"Naguabo"},54:{code:"NAR",name:"Naranjito"},55:{code:"ORO",name:"Orocovis"},56:{code:"PAT",name:"Patillas"},57:{code:"PE-",name:"Peñuelas"},58:{code:"PON",name:"Ponce"},59:{code:"QUE",name:"Quebradillas"},60:{code:"RIN",name:"Rincón"},61:{code:"RIO",name:"Rio Grande"},62:{code:"SAB",name:"Sabana Grande"},63:{code:"SAL",name:"Salinas"},64:{code:"SAN",name:"San Germàn"},65:{code:"SAN",name:"San Juan"},66:{code:"SAN",name:"San Lorenzo"},67:{code:"SAN",name:"San Sebastiàn"},68:{code:"SAN",name:"Santa Isabel"},69:{code:"TOA",name:"Toa Alta"},70:{code:"TOA",name:"Toa Baja"},71:{code:"TRU",name:"Trujillo Alto"},72:{code:"UTU",name:"Utuado"},73:{code:"VEG",name:"Vega Alta"},74:{code:"VEG",name:"Vega Baja"},75:{code:"VIE",name:"Vieques"},76:{code:"VIL",name:"Villalba"},77:{code:"YAB",name:"Yabucoa"},78:{code:"YAU",name:"Yauco"}},QA:{1:{code:"DW",name:"Ad Dawhah"},2:{code:"GW",name:"Al Ghuwayriyah"},3:{code:"JM",name:"Al Jumayliyah"},4:{code:"KR",name:"Al Khawr"},5:{code:"WK",name:"Al Wakrah"},6:{code:"RN",name:"Ar Rayyan"},7:{code:"JB",name:"Jarayan al Batinah"},8:{code:"MS",name:"Madinat ash Shamal"},9:{code:"UD",name:"Umm Sa'id"},10:{code:"UL",name:"Umm Salal"}},RO:{1:{code:"AB",name:"Alba"},2:{code:"AR",name:"Arad"},3:{code:"AG",name:"Arges"},4:{code:"BC",name:"Bacau"},5:{code:"BH",name:"Bihor"},6:{code:"BN",name:"Bistrita-Nasaud"},7:{code:"BT",name:"Botosani"},8:{code:"BV",name:"Brasov"},9:{code:"BR",name:"Braila"},10:{code:"B",name:"Bucuresti"},11:{code:"BZ",name:"Buzau"},12:{code:"CS",name:"Caras-Severin"},13:{code:"CL",name:"Calarasi"},14:{code:"CJ",name:"Cluj"},15:{code:"CT",name:"Constanta"},16:{code:"CV",name:"Covasna"},17:{code:"DB",name:"Dimbovita"},18:{code:"DJ",name:"Dolj"},19:{code:"GL",name:"Galati"},20:{code:"GR",name:"Giurgiu"},21:{code:"GJ",name:"Gorj"},22:{code:"HR",name:"Harghita"},23:{code:"HD",name:"Hunedoara"},24:{code:"IL",name:"Ialomita"},25:{code:"IS",name:"Iasi"},26:{code:"IF",name:"Ilfov"},27:{code:"MM",name:"Maramures"},28:{code:"MH",name:"Mehedinti"},29:{code:"MS",name:"Mures"},30:{code:"NT",name:"Neamt"},31:{code:"OT",name:"Olt"},32:{code:"PH",name:"Prahova"},33:{code:"SM",name:"Satu-Mare"},34:{code:"SJ",name:"Salaj"},35:{code:"SB",name:"Sibiu"},36:{code:"SV",name:"Suceava"},37:{code:"TR",name:"Teleorman"},38:{code:"TM",name:"Timis"},39:{code:"TL",name:"Tulcea"},40:{code:"VS",name:"Vaslui"},41:{code:"VL",name:"Valcea"},42:{code:"VN",name:"Vrancea"}},RU:{1:{code:"AB",name:"Abakan"},2:{code:"AG",name:"Aginskoye"},3:{code:"AN",name:"Anadyr"},4:{code:"AR",name:"Arkahangelsk"},5:{code:"AS",name:"Astrakhan"},6:{code:"BA",name:"Barnaul"},7:{code:"BE",name:"Belgorod"},8:{code:"BI",name:"Birobidzhan"},9:{code:"BL",name:"Blagoveshchensk"},10:{code:"BR",name:"Bryansk"},11:{code:"CH",name:"Cheboksary"},12:{code:"CL",name:"Chelyabinsk"},13:{code:"CR",name:"Cherkessk"},14:{code:"CI",name:"Chita"},15:{code:"DU",name:"Dudinka"},16:{code:"EL",name:"Elista"},17:{code:"GO",name:"Gomo-Altaysk"},18:{code:"GA",name:"Gorno-Altaysk"},19:{code:"GR",name:"Groznyy"},20:{code:"IR",name:"Irkutsk"},21:{code:"IV",name:"Ivanovo"},22:{code:"IZ",name:"Izhevsk"},23:{code:"KA",name:"Kalinigrad"},24:{code:"KL",name:"Kaluga"},25:{code:"KS",name:"Kasnodar"},26:{code:"KZ",name:"Kazan"},27:{code:"KE",name:"Kemerovo"},28:{code:"KH",name:"Khabarovsk"},29:{code:"KM",name:"Khanty-Mansiysk"},30:{code:"KO",name:"Kostroma"},31:{code:"KR",name:"Krasnodar"},32:{code:"KN",name:"Krasnoyarsk"},33:{code:"KU",name:"Kudymkar"},34:{code:"KG",name:"Kurgan"},35:{code:"KK",name:"Kursk"},36:{code:"KY",name:"Kyzyl"},37:{code:"LI",name:"Lipetsk"},38:{code:"MA",name:"Magadan"},39:{code:"MK",name:"Makhachkala"},40:{code:"MY",name:"Maykop"},41:{code:"MO",name:"Moscow"},42:{code:"MU",name:"Murmansk"},43:{code:"NA",name:"Nalchik"},44:{code:"NR",name:"Naryan Mar"},45:{code:"NZ",name:"Nazran"},46:{code:"NI",name:"Nizhniy Novgorod"},47:{code:"NO",name:"Novgorod"},48:{code:"NV",name:"Novosibirsk"},49:{code:"OM",name:"Omsk"},50:{code:"OR",name:"Orel"},51:{code:"OE",name:"Orenburg"},52:{code:"PA",name:"Palana"},53:{code:"PE",name:"Penza"},54:{code:"PR",name:"Perm"},55:{code:"PK",name:"Petropavlovsk-Kamchatskiy"},56:{code:"PT",name:"Petrozavodsk"},57:{code:"PS",name:"Pskov"},58:{code:"RO",name:"Rostov-na-Donu"},59:{code:"RY",name:"Ryazan"},60:{code:"SL",name:"Salekhard"},61:{code:"SA",name:"Samara"},62:{code:"SR",name:"Saransk"},63:{code:"SV",name:"Saratov"},64:{code:"SM",name:"Smolensk"},65:{code:"SP",name:"St. Petersburg"},66:{code:"ST",name:"Stavropol"},67:{code:"SY",name:"Syktyvkar"},68:{code:"TA",name:"Tambov"},69:{code:"TO",name:"Tomsk"},70:{code:"TU",name:"Tula"},71:{code:"TR",name:"Tura"},72:{code:"TV",name:"Tver"},73:{code:"TY",name:"Tyumen"},74:{code:"UF",name:"Ufa"},75:{code:"UL",name:"Ul'yanovsk"},76:{code:"UU",name:"Ulan-Ude"},77:{code:"US",name:"Ust'-Ordynskiy"},78:{code:"VL",name:"Vladikavkaz"},79:{code:"VA",name:"Vladimir"},80:{code:"VV",name:"Vladivostok"},81:{code:"VG",name:"Volgograd"},82:{code:"VD",name:"Vologda"},83:{code:"VO",name:"Voronezh"},84:{code:"VY",name:"Vyatka"},85:{code:"YA",name:"Yakutsk"},86:{code:"YR",name:"Yaroslavl"},87:{code:"YE",name:"Yekaterinburg"},88:{code:"YO",name:"Yoshkar-Ola"}},RW:{1:{code:"BU",name:"Butare"},2:{code:"BY",name:"Byumba"},3:{code:"CY",name:"Cyangugu"},4:{code:"GK",name:"Gikongoro"},5:{code:"GS",name:"Gisenyi"},6:{code:"GT",name:"Gitarama"},7:{code:"KG",name:"Kibungo"},8:{code:"KY",name:"Kibuye"},9:{code:"KR",name:"Kigali Rurale"},10:{code:"KV",name:"Kigali-ville"},11:{code:"RU",name:"Ruhengeri"},12:{code:"UM",name:"Umutara"}},KN:{1:{code:"CCN",name:"Christ Church Nichola Town"},2:{code:"SAS",name:"Saint Anne Sandy Point"},3:{code:"SGB",name:"Saint George Basseterre"},4:{code:"SGG",name:"Saint George Gingerland"},5:{code:"SJW",name:"Saint James Windward"},6:{code:"SJC",name:"Saint John Capesterre"},7:{code:"SJF",name:"Saint John Figtree"},8:{code:"SMC",name:"Saint Mary Cayon"},9:{code:"CAP",name:"Saint Paul Capesterre"},10:{code:"CHA",name:"Saint Paul Charlestown"},11:{code:"SPB",name:"Saint Peter Basseterre"},12:{code:"STL",name:"Saint Thomas Lowland"},13:{code:"STM",name:"Saint Thomas Middle Island"},14:{code:"TPP",name:"Trinity Palmetto Point"}},LC:{1:{code:"AR",name:"Anse-la-Raye"},2:{code:"CA",name:"Castries"},3:{code:"CH",name:"Choiseul"},4:{code:"DA",name:"Dauphin"},5:{code:"DE",name:"Dennery"},6:{code:"GI",name:"Gros-Islet"},7:{code:"LA",name:"Laborie"},8:{code:"MI",name:"Micoud"},9:{code:"PR",name:"Praslin"},10:{code:"SO",name:"Soufriere"},11:{code:"VF",name:"Vieux-Fort"}},VC:{1:{code:"C",name:"Charlotte"},2:{code:"R",name:"Grenadines"},3:{code:"A",name:"Saint Andrew"},4:{code:"D",name:"Saint David"},5:{code:"G",name:"Saint George"},6:{code:"P",name:"Saint Patrick"}},WS:{1:{code:"AN",name:"A'ana"},2:{code:"AI",name:"Aiga-i-le-Tai"},3:{code:"AT",name:"Atua"},4:{code:"FA",name:"Fa'asaleleaga"},5:{code:"GE",name:"Gaga'emauga"},6:{code:"GF",name:"Gagaifomauga"},7:{code:"PA",name:"Palauli"},8:{code:"SA",name:"Satupa'itea"},9:{code:"TU",name:"Tuamasaga"},10:{code:"VF",name:"Va'a-o-Fonoti"},11:{code:"VS",name:"Vaisigano"}},SM:{1:{code:"AC",name:"Acquaviva"},2:{code:"BM",name:"Borgo Maggiore"},3:{code:"CH",name:"Chiesanuova"},4:{code:"DO",name:"Domagnano"},5:{code:"FA",name:"Faetano"},6:{code:"FI",name:"Fiorentino"},7:{code:"MO",name:"Montegiardino"},8:{code:"SM",name:"Citta di San Marino"},9:{code:"SE",name:"Serravalle"}},ST:{1:{code:"S",name:"Sao Tome"},2:{code:"P",name:"Principe"}},SA:{1:{code:"BH",name:"Al Bahah"},2:{code:"HS",name:"Al Hudud ash Shamaliyah"},3:{code:"JF",name:"Al Jawf"},4:{code:"MD",name:"Al Madinah"},5:{code:"QS",name:"Al Qasim"},6:{code:"RD",name:"Ar Riyad"},7:{code:"AQ",name:"Ash Sharqiyah (Eastern)"},8:{code:"AS",name:"'Asir"},9:{code:"HL",name:"Ha'il"},10:{code:"JZ",name:"Jizan"},11:{code:"ML",name:"Makkah"},12:{code:"NR",name:"Najran"},13:{code:"TB",name:"Tabuk"}},SN:{1:{code:"DA",name:"Dakar"},2:{code:"DI",name:"Diourbel"},3:{code:"FA",name:"Fatick"},4:{code:"KA",name:"Kaolack"},5:{code:"KO",name:"Kolda"},6:{code:"LO",name:"Louga"},7:{code:"MA",name:"Matam"},8:{code:"SL",name:"Saint-Louis"},9:{code:"TA",name:"Tambacounda"},10:{code:"TH",name:"Thies"},11:{code:"ZI",name:"Ziguinchor"}},SC:{1:{code:"AP",name:"Anse aux Pins"},2:{code:"AB",name:"Anse Boileau"},3:{code:"AE",name:"Anse Etoile"},4:{code:"AL",name:"Anse Louis"},5:{code:"AR",name:"Anse Royale"},6:{code:"BL",name:"Baie Lazare"},7:{code:"BS",name:"Baie Sainte Anne"},8:{code:"BV",name:"Beau Vallon"},9:{code:"BA",name:"Bel Air"},10:{code:"BO",name:"Bel Ombre"},11:{code:"CA",name:"Cascade"},12:{code:"GL",name:"Glacis"},13:{code:"GM",name:"Grand' Anse (on Mahe)"},14:{code:"GP",name:"Grand' Anse (on Praslin)"},15:{code:"DG",name:"La Digue"},16:{code:"RA",name:"La Riviere Anglaise"},17:{code:"MB",name:"Mont Buxton"},18:{code:"MF",name:"Mont Fleuri"},19:{code:"PL",name:"Plaisance"},20:{code:"PR",name:"Pointe La Rue"},21:{code:"PG",name:"Port Glaud"},22:{code:"SL",name:"Saint Louis"},23:{code:"TA",name:"Takamaka"}},SL:{1:{code:"E",name:"Eastern"},2:{code:"N",name:"Northern"},3:{code:"S",name:"Southern"},4:{code:"W",name:"Western"}},SK:{1:{code:"BA",name:"Banskobystricky"},2:{code:"BR",name:"Bratislavsky"},3:{code:"KO",name:"Kosicky"},4:{code:"NI",name:"Nitriansky"},5:{code:"PR",name:"Presovsky"},6:{code:"TC",name:"Trenciansky"},7:{code:"TV",name:"Trnavsky"},8:{code:"ZI",name:"Zilinsky"}},SI:{1:{code:"4",name:"Štajerska"},2:{code:"2A",name:"Gorenjska"},3:{code:"5",name:"Prekmurje"},4:{code:"3",name:"Koroška"},5:{code:"2B",name:"Notranjska"},6:{code:"1",name:"Primorska"},7:{code:"2C",name:"Dolenjska"},8:{code:"2C",name:"Bela Krajina"}},SB:{1:{code:"CE",name:"Central"},2:{code:"CH",name:"Choiseul"},3:{code:"GC",name:"Guadalcanal"},4:{code:"HO",name:"Honiara"},5:{code:"IS",name:"Isabel"},6:{code:"MK",name:"Makira"},7:{code:"ML",name:"Malaita"},8:{code:"RB",name:"Rennell and Bellona"},9:{code:"TM",name:"Temotu"},10:{code:"WE",name:"Western"}},SO:{1:{code:"AW",name:"Awdal"},2:{code:"BK",name:"Bakool"},3:{code:"BN",name:"Banaadir"},4:{code:"BR",name:"Bari"},5:{code:"BY",name:"Bay"},6:{code:"GA",name:"Galguduud"},7:{code:"GE",name:"Gedo"},8:{code:"HI",name:"Hiiraan"},9:{code:"JD",name:"Jubbada Dhexe"},10:{code:"JH",name:"Jubbada Hoose"},11:{code:"MU",name:"Mudug"},12:{code:"NU",name:"Nugaal"},13:{code:"SA",name:"Sanaag"},14:{code:"SD",name:"Shabeellaha Dhexe"},15:{code:"SH",name:"Shabeellaha Hoose"},16:{code:"SL",name:"Sool"},17:{code:"TO",name:"Togdheer"},18:{code:"WG",name:"Woqooyi Galbeed"}},ZA:{1:{code:"EC",name:"Eastern Cape"},2:{code:"FS",name:"Free State"},3:{code:"GT",name:"Gauteng"},4:{code:"KN",name:"KwaZulu-Natal"},5:{code:"LP",name:"Limpopo"},6:{code:"MP",name:"Mpumalanga"},7:{code:"NW",name:"North West"},8:{code:"NC",name:"Northern Cape"},9:{code:"WC",name:"Western Cape"}},ES:{1:{code:"CA",name:"La Coruña"},2:{code:"AL",name:"Álava"},3:{code:"AB",name:"Albacete"},4:{code:"AC",name:"Alicante"},5:{code:"AM",name:"Almeria"},6:{code:"AS",name:"Asturias"},7:{code:"AV",name:"Ávila"},8:{code:"BJ",name:"Badajoz"},9:{code:"IB",name:"Baleares"},10:{code:"BA",name:"Barcelona"},11:{code:"BU",name:"Burgos"},12:{code:"CC",name:"Cáceres"},13:{code:"CZ",name:"Cádiz"},14:{code:"CT",name:"Cantabria"},15:{code:"CL",name:"Castellón"},16:{code:"CE",name:"Ceuta"},17:{code:"CR",name:"Ciudad Real"},18:{code:"CD",name:"Córdoba"},19:{code:"CU",name:"Cuenca"},20:{code:"GI",name:"Gerona"},21:{code:"GD",name:"Granada"},22:{code:"GJ",name:"Guadalajara"},23:{code:"GP",name:"Guipúzcoa"},24:{code:"HL",name:"Huelva"},25:{code:"HS",name:"Huesca"},26:{code:"JN",name:"Jaén"},27:{code:"RJ",name:"La Rioja"},28:{code:"PM",name:"Las Palmas"},29:{code:"LE",name:"León"},30:{code:"LL",name:"Lérida"},31:{code:"LG",name:"Lugo"},32:{code:"MD",name:"Madrid"},33:{code:"MA",name:"Málaga"},34:{code:"ML",name:"Melilla"},35:{code:"MU",name:"Murcia"},36:{code:"NV",name:"Navarra"},37:{code:"OU",name:"Ourense"},38:{code:"PL",name:"Palencia"},39:{code:"PO",name:"Pontevedra"},40:{code:"SL",name:"Salamanca"},41:{code:"SC",name:"Santa Cruz de Tenerife"},42:{code:"SG",name:"Segovia"},43:{code:"SV",name:"Sevilla"},44:{code:"SO",name:"Soria"},45:{code:"TA",name:"Tarragona"},46:{code:"TE",name:"Teruel"},47:{code:"TO",name:"Toledo"},48:{code:"VC",name:"Valencia"},49:{code:"VD",name:"Valladolid"},50:{code:"VZ",name:"Vizcaya"},51:{code:"ZM",name:"Zamora"},52:{code:"ZR",name:"Zaragoza"}},LK:{1:{code:"CE",name:"Central"},2:{code:"EA",name:"Eastern"},3:{code:"NC",name:"North Central"},4:{code:"NO",name:"Northern"},5:{code:"NW",name:"North Western"},6:{code:"SA",name:"Sabaragamuwa"},7:{code:"SO",name:"Southern"},8:{code:"UV",name:"Uva"},9:{code:"WE",name:"Western"}},SH:{1:{code:"A",name:"Ascension"},2:{code:"S",name:"Saint Helena"},3:{code:"T",name:"Tristan da Cunha"}},PM:{1:{code:"P",name:"Saint Pierre"},2:{code:"M",name:"Miquelon"}},SD:{1:{code:"ANL",name:"A'ali an Nil"},2:{code:"BAM",name:"Al Bahr al Ahmar"},3:{code:"BRT",name:"Al Buhayrat"},4:{code:"JZR",name:"Al Jazirah"},5:{code:"KRT",name:"Al Khartum"},6:{code:"QDR",name:"Al Qadarif"},7:{code:"WDH",name:"Al Wahdah"},8:{code:"ANB",name:"An Nil al Abyad"},9:{code:"ANZ",name:"An Nil al Azraq"},10:{code:"ASH",name:"Ash Shamaliyah"},11:{code:"BJA",name:"Bahr al Jabal"},12:{code:"GIS",name:"Gharb al Istiwa'iyah"},13:{code:"GBG",name:"Gharb Bahr al Ghazal"},14:{code:"GDA",name:"Gharb Darfur"},15:{code:"GKU",name:"Gharb Kurdufan"},16:{code:"JDA",name:"Janub Darfur"},17:{code:"JKU",name:"Janub Kurdufan"},18:{code:"JQL",name:"Junqali"},19:{code:"KSL",name:"Kassala"},20:{code:"NNL",name:"Nahr an Nil"},21:{code:"SBG",name:"Shamal Bahr al Ghazal"},22:{code:"SDA",name:"Shamal Darfur"},23:{code:"SKU",name:"Shamal Kurdufan"},24:{code:"SIS",name:"Sharq al Istiwa'iyah"},25:{code:"SNR",name:"Sinnar"},26:{code:"WRB",name:"Warab"}},SR:{1:{code:"BR",name:"Brokopondo"},2:{code:"CM",name:"Commewijne"},3:{code:"CR",name:"Coronie"},4:{code:"MA",name:"Marowijne"},5:{code:"NI",name:"Nickerie"},6:{code:"PA",name:"Para"},7:{code:"PM",name:"Paramaribo"},9:{code:"SA",name:"Saramacca"},10:{code:"SI",name:"Sipaliwini"},11:{code:"WA",name:"Wanica"}},SZ:{1:{code:"H",name:"Hhohho"},2:{code:"L",name:"Lubombo"},3:{code:"M",name:"Manzini"},4:{code:"S",name:"Shishelweni"}},SE:{1:{code:"K",name:"Blekinge"},2:{code:"W",name:"Dalama"},3:{code:"I",name:"Gotland"},4:{code:"X",name:"Gävleborg"},5:{code:"N",name:"Halland"},6:{code:"Z",name:"Jämtland"},7:{code:"F",name:"Jönköping"},8:{code:"H",name:"Kalmar"},9:{code:"G",name:"Kronoberg"},10:{code:"BD",name:"Norrbotten"},11:{code:"M",name:"Skåne"},12:{code:"AB",name:"Stockholm"},13:{code:"D",name:"Södermanland"},14:{code:"C",name:"Uppsala"},15:{code:"S",name:"Värmland"},16:{code:"AC",name:"Västerbotten"},17:{code:"Y",name:"Västernorrland"},18:{code:"U",name:"Västmanland"},19:{code:"O",name:"Västra Götaland"},20:{code:"T",name:"Örebro"},21:{code:"E",name:"Östergötland"}},CH:{1:{code:"AG",name:"Aargau"},2:{code:"AR",name:"Appenzell Ausserrhoden"},3:{code:"AI",name:"Appenzell Innerrhoden"},4:{code:"BS",name:"Basel-Stadt"},5:{code:"BL",name:"Basel-Landschaft"},6:{code:"BE",name:"Bern"},7:{code:"FR",name:"Fribourg"},8:{code:"GE",name:"Genève"},9:{code:"GL",name:"Glarus"},10:{code:"GR",name:"Graubünden"},11:{code:"JU",name:"Jura"},12:{code:"LU",name:"Lucerne"},13:{code:"NE",name:"Neuchâtel"},14:{code:"NW",name:"Nidwalden"},15:{code:"OW",name:"Obwalden"},16:{code:"SG",name:"St. Gallen"},17:{code:"SH",name:"Schaffhausen"},18:{code:"SZ",name:"Schwyz"},19:{code:"SO",name:"Solothurn"},20:{code:"TG",name:"Thurgau"},21:{code:"TI",name:"Ticino"},22:{code:"UR",name:"Uri"},23:{code:"VS",name:"Valais"},24:{code:"VD",name:"Vaud"},25:{code:"ZG",name:"Zug"},26:{code:"ZH",name:"Zürich"}},SY:{1:{code:"HA",name:"Al Hasakah"},2:{code:"LA",name:"Al Ladhiqiyah"},3:{code:"QU",name:"Al Qunaytirah"},4:{code:"RQ",name:"Ar Raqqah"},5:{code:"SU",name:"As Suwayda"},6:{code:"DA",name:"Dara"},7:{code:"DZ",name:"Dayr az Zawr"},8:{code:"DI",name:"Dimashq"},9:{code:"HL",name:"Halab"},10:{code:"HM",name:"Hamah"},11:{code:"HI",name:"Hims"},12:{code:"ID",name:"Idlib"},13:{code:"RD",name:"Rif Dimashq"},14:{code:"TA",name:"Tartus"}},TW:{1:{code:"CH",name:"Chang-hua"},2:{code:"CI",name:"Chia-i"},3:{code:"HS",name:"Hsin-chu"},4:{code:"HL",name:"Hua-lien"},5:{code:"IL",name:"I-lan"},6:{code:"KH",name:"Kao-hsiung county"},7:{code:"KM",name:"Kin-men"},8:{code:"LC",name:"Lien-chiang"},9:{code:"ML",name:"Miao-li"},10:{code:"NT",name:"Nan-t'ou"},11:{code:"PH",name:"P'eng-hu"},12:{code:"PT",name:"P'ing-tung"},13:{code:"TG",name:"T'ai-chung"},14:{code:"TA",name:"T'ai-nan"},15:{code:"TP",name:"T'ai-pei county"},16:{code:"TT",name:"T'ai-tung"},17:{code:"TY",name:"T'ao-yuan"},18:{code:"YL",name:"Yun-lin"},19:{code:"CC",name:"Chia-i city"},20:{code:"CL",name:"Chi-lung"},21:{code:"HC",name:"Hsin-chu"},22:{code:"TH",name:"T'ai-chung"},23:{code:"TN",name:"T'ai-nan"},24:{code:"KC",name:"Kao-hsiung city"},25:{code:"TC",name:"T'ai-pei city"}},TJ:{1:{code:"GB",name:"Gorno-Badakhstan"},2:{code:"KT",name:"Khatlon"},3:{code:"SU",name:"Sughd"}},TZ:{1:{code:"AR",name:"Arusha"},2:{code:"DS",name:"Dar es Salaam"},3:{code:"DO",name:"Dodoma"},4:{code:"IR",name:"Iringa"},5:{code:"KA",name:"Kagera"},6:{code:"KI",name:"Kigoma"},7:{code:"KJ",name:"Kilimanjaro"},8:{code:"LN",name:"Lindi"},9:{code:"MY",name:"Manyara"},10:{code:"MR",name:"Mara"},11:{code:"MB",name:"Mbeya"},12:{code:"MO",name:"Morogoro"},13:{code:"MT",name:"Mtwara"},14:{code:"MW",name:"Mwanza"},15:{code:"PN",name:"Pemba North"},16:{code:"PS",name:"Pemba South"},17:{code:"PW",name:"Pwani"},18:{code:"RK",name:"Rukwa"},19:{code:"RV",name:"Ruvuma"},20:{code:"SH",name:"Shinyanga"},21:{code:"SI",name:"Singida"},22:{code:"TB",name:"Tabora"},23:{code:"TN",name:"Tanga"},24:{code:"ZC",name:"Zanzibar Central/South"},25:{code:"ZN",name:"Zanzibar North"},26:{code:"ZU",name:"Zanzibar Urban/West"}},TH:{1:{code:"Amnat Charoen",name:"Amnat Charoen"},2:{code:"Ang Thong",name:"Ang Thong"},3:{code:"Ayutthaya",name:"Ayutthaya"},4:{code:"Bangkok",name:"Bangkok"},5:{code:"Buriram",name:"Buriram"},6:{code:"Chachoengsao",name:"Chachoengsao"},7:{code:"Chai Nat",name:"Chai Nat"},8:{code:"Chaiyaphum",name:"Chaiyaphum"},9:{code:"Chanthaburi",name:"Chanthaburi"},10:{code:"Chiang Mai",name:"Chiang Mai"},11:{code:"Chiang Rai",name:"Chiang Rai"},12:{code:"Chon Buri",name:"Chon Buri"},13:{code:"Chumphon",name:"Chumphon"},14:{code:"Kalasin",name:"Kalasin"},15:{code:"Kamphaeng Phet",name:"Kamphaeng Phet"},16:{code:"Kanchanaburi",name:"Kanchanaburi"},17:{code:"Khon Kaen",name:"Khon Kaen"},18:{code:"Krabi",name:"Krabi"},19:{code:"Lampang",name:"Lampang"},20:{code:"Lamphun",name:"Lamphun"},21:{code:"Loei",name:"Loei"},22:{code:"Lop Buri",name:"Lop Buri"},23:{code:"Mae Hong Son",name:"Mae Hong Son"},24:{code:"Maha Sarakham",name:"Maha Sarakham"},25:{code:"Mukdahan",name:"Mukdahan"},26:{code:"Nakhon Nayok",name:"Nakhon Nayok"},27:{code:"Nakhon Pathom",name:"Nakhon Pathom"},28:{code:"Nakhon Phanom",name:"Nakhon Phanom"},29:{code:"Nakhon Ratchasima",name:"Nakhon Ratchasima"},30:{code:"Nakhon Sawan",name:"Nakhon Sawan"},31:{code:"Nakhon Si Thammarat",name:"Nakhon Si Thammarat"},32:{code:"Nan",name:"Nan"},33:{code:"Narathiwat",name:"Narathiwat"},34:{code:"Nong Bua Lamphu",name:"Nong Bua Lamphu"},35:{code:"Nong Khai",name:"Nong Khai"},36:{code:"Nonthaburi",name:"Nonthaburi"},37:{code:"Pathum Thani",name:"Pathum Thani"},38:{code:"Pattani",name:"Pattani"},39:{code:"Phangnga",name:"Phangnga"},40:{code:"Phatthalung",name:"Phatthalung"},41:{code:"Phayao",name:"Phayao"},42:{code:"Phetchabun",name:"Phetchabun"},43:{code:"Phetchaburi",name:"Phetchaburi"},44:{code:"Phichit",name:"Phichit"},45:{code:"Phitsanulok",name:"Phitsanulok"},46:{code:"Phrae",name:"Phrae"},47:{code:"Phuket",name:"Phuket"},48:{code:"Prachin Buri",name:"Prachin Buri"},49:{code:"Prachuap Khiri Khan",name:"Prachuap Khiri Khan"},50:{code:"Ranong",name:"Ranong"},51:{code:"Ratchaburi",name:"Ratchaburi"},52:{code:"Rayong",name:"Rayong"},53:{code:"Roi Et",name:"Roi Et"},54:{code:"Sa Kaeo",name:"Sa Kaeo"},55:{code:"Sakon Nakhon",name:"Sakon Nakhon"},56:{code:"Samut Prakan",name:"Samut Prakan"},57:{code:"Samut Sakhon",name:"Samut Sakhon"},58:{code:"Samut Songkhram",name:"Samut Songkhram"},59:{code:"Sara Buri",name:"Sara Buri"},60:{code:"Satun",name:"Satun"},61:{code:"Sing Buri",name:"Sing Buri"},62:{code:"Sisaket",name:"Sisaket"},63:{code:"Songkhla",name:"Songkhla"},64:{code:"Sukhothai",name:"Sukhothai"},65:{code:"Suphan Buri",name:"Suphan Buri"},66:{code:"Surat Thani",name:"Surat Thani"},67:{code:"Surin",name:"Surin"},68:{code:"Tak",name:"Tak"},69:{code:"Trang",name:"Trang"},70:{code:"Trat",name:"Trat"},71:{code:"Ubon Ratchathani",name:"Ubon Ratchathani"},72:{code:"Udon Thani",name:"Udon Thani"},73:{code:"Uthai Thani",name:"Uthai Thani"},74:{code:"Uttaradit",name:"Uttaradit"},75:{code:"Yala",name:"Yala"},76:{code:"Yasothon",name:"Yasothon"}},TG:{1:{code:"K",name:"Kara"},2:{code:"P",name:"Plateaux"},3:{code:"S",name:"Savanes"},4:{code:"C",name:"Centrale"},5:{code:"M",name:"Maritime"}},TK:{1:{code:"A",name:"Atafu"},2:{code:"F",name:"Fakaofo"},3:{code:"N",name:"Nukunonu"}},TO:{1:{code:"H",name:"Ha'apai"},2:{code:"T",name:"Tongatapu"},3:{code:"V",name:"Vava'u"}},TT:{1:{code:"CT",name:"Couva/Tabaquite/Talparo"},2:{code:"DM",name:"Diego Martin"},3:{code:"MR",name:"Mayaro/Rio Claro"},4:{code:"PD",name:"Penal/Debe"},5:{code:"PT",name:"Princes Town"},6:{code:"SG",name:"Sangre Grande"},7:{code:"SL",name:"San Juan/Laventille"},8:{code:"SI",name:"Siparia"},9:{code:"TP",name:"Tunapuna/Piarco"},10:{code:"PS",name:"Port of Spain"},11:{code:"SF",name:"San Fernando"},12:{code:"AR",name:"Arima"},13:{code:"PF",name:"Point Fortin"},14:{code:"CH",name:"Chaguanas"},15:{code:"TO",name:"Tobago"}},TN:{1:{code:"AR",name:"Ariana"},2:{code:"BJ",name:"Beja"},3:{code:"BA",name:"Ben Arous"},4:{code:"BI",name:"Bizerte"},5:{code:"GB",name:"Gabes"},6:{code:"GF",name:"Gafsa"},7:{code:"JE",name:"Jendouba"},8:{code:"KR",name:"Kairouan"},9:{code:"KS",name:"Kasserine"},10:{code:"KB",name:"Kebili"},11:{code:"KF",name:"Kef"},12:{code:"MH",name:"Mahdia"},13:{code:"MN",name:"Manouba"},14:{code:"ME",name:"Medenine"},15:{code:"MO",name:"Monastir"},16:{code:"NA",name:"Nabeul"},17:{code:"SF",name:"Sfax"},18:{code:"SD",name:"Sidi"},19:{code:"SL",name:"Siliana"},20:{code:"SO",name:"Sousse"},21:{code:"TA",name:"Tataouine"},22:{code:"TO",name:"Tozeur"},23:{code:"TU",name:"Tunis"},24:{code:"ZA",name:"Zaghouan"}},TR:{1:{code:"ADA",name:"Adana"},2:{code:"ADI",name:"Adiyaman"},3:{code:"AFY",name:"Afyonkarahisar"},4:{code:"AGR",name:"Agri"},5:{code:"AKS",name:"Aksaray"},6:{code:"AMA",name:"Amasya"},7:{code:"ANK",name:"Ankara"},8:{code:"ANT",name:"Antalya"},9:{code:"ARD",name:"Ardahan"},10:{code:"ART",name:"Artvin"},11:{code:"AYI",name:"Aydin"},12:{code:"BAL",name:"Balikesir"},13:{code:"BAR",name:"Bartin"},14:{code:"BAT",name:"Batman"},15:{code:"BAY",name:"Bayburt"},16:{code:"BIL",name:"Bilecik"},17:{code:"BIN",name:"Bingol"},18:{code:"BIT",name:"Bitlis"},19:{code:"BOL",name:"Bolu"},20:{code:"BRD",name:"Burdur"},21:{code:"BRS",name:"Bursa"},22:{code:"CKL",name:"Canakkale"},23:{code:"CKR",name:"Cankiri"},24:{code:"COR",name:"Corum"},25:{code:"DEN",name:"Denizli"},26:{code:"DIY",name:"Diyarbakir"},27:{code:"DUZ",name:"Duzce"},28:{code:"EDI",name:"Edirne"},29:{code:"ELA",name:"Elazig"},30:{code:"EZC",name:"Erzincan"},31:{code:"EZR",name:"Erzurum"},32:{code:"ESK",name:"Eskisehir"},33:{code:"GAZ",name:"Gaziantep"},34:{code:"GIR",name:"Giresun"},35:{code:"GMS",name:"Gumushane"},36:{code:"HKR",name:"Hakkari"},37:{code:"HTY",name:"Hatay"},38:{code:"IGD",name:"Igdir"},39:{code:"ISP",name:"Isparta"},40:{code:"IST",name:"Istanbul"},41:{code:"IZM",name:"Izmir"},42:{code:"KAH",name:"Kahramanmaras"},43:{code:"KRB",name:"Karabuk"},44:{code:"KRM",name:"Karaman"},45:{code:"KRS",name:"Kars"},46:{code:"KAS",name:"Kastamonu"},47:{code:"KAY",name:"Kayseri"},48:{code:"KLS",name:"Kilis"},49:{code:"KRK",name:"Kirikkale"},50:{code:"KLR",name:"Kirklareli"},51:{code:"KRH",name:"Kirsehir"},52:{code:"KOC",name:"Kocaeli"},53:{code:"KON",name:"Konya"},54:{code:"KUT",name:"Kutahya"},55:{code:"MAL",name:"Malatya"},56:{code:"MAN",name:"Manisa"},57:{code:"MAR",name:"Mardin"},58:{code:"MER",name:"Mersin"},59:{code:"MUG",name:"Mugla"},60:{code:"MUS",name:"Mus"},61:{code:"NEV",name:"Nevsehir"},62:{code:"NIG",name:"Nigde"},63:{code:"ORD",name:"Ordu"},64:{code:"OSM",name:"Osmaniye"},65:{code:"RIZ",name:"Rize"},66:{code:"SAK",name:"Sakarya"},67:{code:"SAM",name:"Samsun"},68:{code:"SAN",name:"Sanliurfa"},69:{code:"SII",name:"Siirt"},70:{code:"SIN",name:"Sinop"},71:{code:"SIR",name:"Sirnak"},72:{code:"SIV",name:"Sivas"},73:{code:"TEL",name:"Tekirdag"},74:{code:"TOK",name:"Tokat"},75:{code:"TRA",name:"Trabzon"},76:{code:"TUN",name:"Tunceli"},77:{code:"USK",name:"Usak"},78:{code:"VAN",name:"Van"},79:{code:"YAL",name:"Yalova"},80:{code:"YOZ",name:"Yozgat"},81:{code:"ZON",name:"Zonguldak"}},TM:{1:{code:"A",name:"Ahal Welayaty"},2:{code:"B",name:"Balkan Welayaty"},3:{code:"D",name:"Dashhowuz Welayaty"},4:{code:"L",name:"Lebap Welayaty"},5:{code:"M",name:"Mary Welayaty"}},TC:{1:{code:"AC",name:"Ambergris Cays"},2:{code:"DC",name:"Dellis Cay"},3:{code:"FC",name:"French Cay"},4:{code:"LW",name:"Little Water Cay"},5:{code:"RC",name:"Parrot Cay"},6:{code:"PN",name:"Pine Cay"},7:{code:"SL",name:"Salt Cay"},8:{code:"GT",name:"Grand Turk"},9:{code:"SC",name:"South Caicos"},10:{code:"EC",name:"East Caicos"},11:{code:"MC",name:"Middle Caicos"},12:{code:"NC",name:"North Caicos"},13:{code:"PR",name:"Providenciales"},14:{code:"WC",name:"West Caicos"}},TV:{1:{code:"NMG",name:"Nanumanga"},2:{code:"NLK",name:"Niulakita"},3:{code:"NTO",name:"Niutao"},4:{code:"FUN",name:"Funafuti"},5:{code:"NME",name:"Nanumea"},6:{code:"NUI",name:"Nui"},7:{code:"NFT",name:"Nukufetau"},8:{code:"NLL",name:"Nukulaelae"},9:{code:"VAI",name:"Vaitupu"}},UG:{1:{code:"KAL",name:"Kalangala"},2:{code:"KMP",name:"Kampala"},3:{code:"KAY",name:"Kayunga"},4:{code:"KIB",name:"Kiboga"},5:{code:"LUW",name:"Luwero"},6:{code:"MAS",name:"Masaka"},7:{code:"MPI",name:"Mpigi"},8:{code:"MUB",name:"Mubende"},9:{code:"MUK",name:"Mukono"},10:{code:"NKS",name:"Nakasongola"},11:{code:"RAK",name:"Rakai"},12:{code:"SEM",name:"Sembabule"},13:{code:"WAK",name:"Wakiso"},14:{code:"BUG",name:"Bugiri"},15:{code:"BUS",name:"Busia"},16:{code:"IGA",name:"Iganga"},17:{code:"JIN",name:"Jinja"},18:{code:"KAB",name:"Kaberamaido"},19:{code:"KML",name:"Kamuli"},20:{code:"KPC",name:"Kapchorwa"},21:{code:"KTK",name:"Katakwi"},22:{code:"KUM",name:"Kumi"},23:{code:"MAY",name:"Mayuge"},24:{code:"MBA",name:"Mbale"},25:{code:"PAL",name:"Pallisa"},26:{code:"SIR",name:"Sironko"},27:{code:"SOR",name:"Soroti"},28:{code:"TOR",name:"Tororo"},29:{code:"ADJ",name:"Adjumani"},30:{code:"APC",name:"Apac"},31:{code:"ARU",name:"Arua"},32:{code:"GUL",name:"Gulu"},33:{code:"KIT",name:"Kitgum"},34:{code:"KOT",name:"Kotido"},35:{code:"LIR",name:"Lira"},36:{code:"MRT",name:"Moroto"},37:{code:"MOY",name:"Moyo"},38:{code:"NAK",name:"Nakapiripirit"},39:{code:"NEB",name:"Nebbi"},40:{code:"PAD",name:"Pader"},41:{code:"YUM",name:"Yumbe"},42:{code:"BUN",name:"Bundibugyo"},43:{code:"BSH",name:"Bushenyi"},44:{code:"HOI",name:"Hoima"},45:{code:"KBL",name:"Kabale"},46:{code:"KAR",name:"Kabarole"},47:{code:"KAM",name:"Kamwenge"},48:{code:"KAN",name:"Kanungu"},49:{code:"KAS",name:"Kasese"},50:{code:"KBA",name:"Kibaale"},51:{code:"KIS",name:"Kisoro"},52:{code:"KYE",name:"Kyenjojo"},53:{code:"MSN",name:"Masindi"},54:{code:"MBR",name:"Mbarara"},55:{code:"NTU",name:"Ntungamo"},56:{code:"RUK",name:"Rukungiri"}},UA:{1:{code:"CK",name:"Cherkasy"},2:{code:"CH",name:"Chernihiv"},3:{code:"CV",name:"Chernivtsi"},4:{code:"CR",name:"Crimea"},5:{code:"DN",name:"Dnipropetrovs'k"},6:{code:"DO",name:"Donets'k"},7:{code:"IV",name:"Ivano-Frankivs'k"},8:{code:"KL",name:"Kharkiv Kherson"},9:{code:"KM",name:"Khmel'nyts'kyy"},10:{code:"KR",name:"Kirovohrad"},11:{code:"KV",name:"Kiev"},12:{code:"KY",name:"Kyyiv"},13:{code:"LU",name:"Luhans'k"},14:{code:"LV",name:"L'viv"},15:{code:"MY",name:"Mykolayiv"},16:{code:"OD",name:"Odesa"},17:{code:"PO",name:"Poltava"},18:{code:"RI",name:"Rivne"},19:{code:"SE",name:"Sevastopol"},20:{code:"SU",name:"Sumy"},21:{code:"TE",name:"Ternopil'"},22:{code:"VI",name:"Vinnytsya"},23:{code:"VO",name:"Volyn'"},24:{code:"ZK",name:"Zakarpattya"},25:{code:"ZA",name:"Zaporizhzhya"},26:{code:"ZH",name:"Zhytomyr"}},AE:{1:{code:"AZ",name:"Abu Zaby"},2:{code:"AJ",name:"'Ajman"},3:{code:"FU",name:"Al Fujayrah"},4:{code:"SH",name:"Ash Shariqah"},5:{code:"DU",name:"Dubayy"},6:{code:"RK",name:"R'as al Khaymah"},7:{code:"UQ",name:"Umm al Qaywayn"}},GB:{1:{code:"ABN",name:"Aberdeen"},2:{code:"ABNS",name:"Aberdeenshire"},3:{code:"ANG",name:"Anglesey"},4:{code:"AGS",name:"Angus"},5:{code:"ARY",name:"Argyll and Bute"},6:{code:"BEDS",name:"Bedfordshire"},7:{code:"BERKS",name:"Berkshire"},8:{code:"BLA",name:"Blaenau Gwent"},9:{code:"BRI",name:"Bridgend"},10:{code:"BSTL",name:"Bristol"},11:{code:"BUCKS",name:"Buckinghamshire"},12:{code:"CAE",name:"Caerphilly"},13:{code:"CAMBS",name:"Cambridgeshire"},14:{code:"CDF",name:"Cardiff"},15:{code:"CARM",name:"Carmarthenshire"},16:{code:"CDGN",name:"Ceredigion"},17:{code:"CHES",name:"Cheshire"},18:{code:"CLACK",name:"Clackmannanshire"},19:{code:"CON",name:"Conwy"},20:{code:"CORN",name:"Cornwall"},21:{code:"DNBG",name:"Denbighshire"},22:{code:"DERBY",name:"Derbyshire"},23:{code:"DVN",name:"Devon"},24:{code:"DOR",name:"Dorset"},25:{code:"DGL",name:"Dumfries and Galloway"},26:{code:"DUND",name:"Dundee"},27:{code:"DHM",name:"Durham"},28:{code:"ARYE",name:"East Ayrshire"},29:{code:"DUNBE",name:"East Dunbartonshire"},30:{code:"LOTE",name:"East Lothian"},31:{code:"RENE",name:"East Renfrewshire"},32:{code:"ERYS",name:"East Riding of Yorkshire"},33:{code:"SXE",name:"East Sussex"},34:{code:"EDIN",name:"Edinburgh"},35:{code:"ESX",name:"Essex"},36:{code:"FALK",name:"Falkirk"},37:{code:"FFE",name:"Fife"},38:{code:"FLINT",name:"Flintshire"},39:{code:"GLAS",name:"Glasgow"},40:{code:"GLOS",name:"Gloucestershire"},41:{code:"LDN",name:"Greater London"},42:{code:"MCH",name:"Greater Manchester"},43:{code:"GDD",name:"Gwynedd"},44:{code:"HANTS",name:"Hampshire"},45:{code:"HWR",name:"Herefordshire"},46:{code:"HERTS",name:"Hertfordshire"},47:{code:"HLD",name:"Highlands"},48:{code:"IVER",name:"Inverclyde"},49:{code:"IOW",name:"Isle of Wight"},50:{code:"KNT",name:"Kent"},51:{code:"LANCS",name:"Lancashire"},52:{code:"LEICS",name:"Leicestershire"},53:{code:"LINCS",name:"Lincolnshire"},54:{code:"MSY",name:"Merseyside"},55:{code:"MERT",name:"Merthyr Tydfil"},56:{code:"MLOT",name:"Midlothian"},57:{code:"MMOUTH",name:"Monmouthshire"},58:{code:"MORAY",name:"Moray"},59:{code:"NPRTAL",name:"Neath Port Talbot"},60:{code:"NEWPT",name:"Newport"},61:{code:"NOR",name:"Norfolk"},62:{code:"ARYN",name:"North Ayrshire"},63:{code:"LANN",name:"North Lanarkshire"},64:{code:"YSN",name:"North Yorkshire"},65:{code:"NHM",name:"Northamptonshire"},66:{code:"NLD",name:"Northumberland"},67:{code:"NOT",name:"Nottinghamshire"},68:{code:"ORK",name:"Orkney Islands"},69:{code:"OFE",name:"Oxfordshire"},70:{code:"PEM",name:"Pembrokeshire"},71:{code:"PERTH",name:"Perth and Kinross"},72:{code:"PWS",name:"Powys"},73:{code:"REN",name:"Renfrewshire"},74:{code:"RHON",name:"Rhondda Cynon Taff"},75:{code:"RUT",name:"Rutland"},76:{code:"BOR",name:"Scottish Borders"},77:{code:"SHET",name:"Shetland Islands"},78:{code:"SPE",name:"Shropshire"},79:{code:"SOM",name:"Somerset"},80:{code:"ARYS",name:"South Ayrshire"},81:{code:"LANS",name:"South Lanarkshire"},82:{code:"YSS",name:"South Yorkshire"},83:{code:"SFD",name:"Staffordshire"},84:{code:"STIR",name:"Stirling"},85:{code:"SFK",name:"Suffolk"},86:{code:"SRY",name:"Surrey"},87:{code:"SWAN",name:"Swansea"},88:{code:"TORF",name:"Torfaen"},89:{code:"TWR",name:"Tyne and Wear"},90:{code:"VGLAM",name:"Vale of Glamorgan"},91:{code:"WARKS",name:"Warwickshire"},92:{code:"WDUN",name:"West Dunbartonshire"},93:{code:"WLOT",name:"West Lothian"},94:{code:"WMD",name:"West Midlands"},95:{code:"SXW",name:"West Sussex"},96:{code:"YSW",name:"West Yorkshire"},97:{code:"WIL",name:"Western Isles"},98:{code:"WLT",name:"Wiltshire"},99:{code:"WORCS",name:"Worcestershire"},100:{code:"WRX",name:"Wrexham"}},US:{1:{code:"AL",name:"Alabama"},2:{code:"AK",name:"Alaska"},3:{code:"AS",name:"American Samoa"},4:{code:"AZ",name:"Arizona"},5:{code:"AR",name:"Arkansas"},6:{code:"AF",name:"Armed Forces Africa"},7:{code:"AA",name:"Armed Forces Americas"},8:{code:"AC",name:"Armed Forces Canada"},9:{code:"AE",name:"Armed Forces Europe"},10:{code:"AM",name:"Armed Forces Middle East"},11:{code:"AP",name:"Armed Forces Pacific"},12:{code:"CA",name:"California"},13:{code:"CO",name:"Colorado"},14:{code:"CT",name:"Connecticut"},15:{code:"DE",name:"Delaware"},16:{code:"DC",name:"District of Columbia"},17:{code:"FM",name:"Federated States Of Micronesia"},18:{code:"FL",name:"Florida"},19:{code:"GA",name:"Georgia"},20:{code:"GU",name:"Guam"},21:{code:"HI",name:"Hawaii"},22:{code:"ID",name:"Idaho"},23:{code:"IL",name:"Illinois"},24:{code:"IN",name:"Indiana"},25:{code:"IA",name:"Iowa"},26:{code:"KS",name:"Kansas"},27:{code:"KY",name:"Kentucky"},28:{code:"LA",name:"Louisiana"},29:{code:"ME",name:"Maine"},30:{code:"MH",name:"Marshall Islands"},31:{code:"MD",name:"Maryland"},32:{code:"MA",name:"Massachusetts"},33:{code:"MI",name:"Michigan"},34:{code:"MN",name:"Minnesota"},35:{code:"MS",name:"Mississippi"},36:{code:"MO",name:"Missouri"},37:{code:"MT",name:"Montana"},38:{code:"NE",name:"Nebraska"},39:{code:"NV",name:"Nevada"},40:{code:"NH",name:"New Hampshire"},41:{code:"NJ",name:"New Jersey"},42:{code:"NM",name:"New Mexico"},43:{code:"NY",name:"New York"},44:{code:"NC",name:"North Carolina"},45:{code:"ND",name:"North Dakota"},46:{code:"MP",name:"Northern Mariana Islands"},47:{code:"OH",name:"Ohio"},48:{code:"OK",name:"Oklahoma"},49:{code:"OR",name:"Oregon"},50:{code:"PW",name:"Palau"},51:{code:"PA",name:"Pennsylvania"},52:{code:"PR",name:"Puerto Rico"},53:{code:"RI",name:"Rhode Island"},54:{code:"SC",name:"South Carolina"},55:{code:"SD",name:"South Dakota"},56:{code:"TN",name:"Tennessee"},57:{code:"TX",name:"Texas"},58:{code:"UT",name:"Utah"},59:{code:"VT",name:"Vermont"},60:{code:"VI",name:"Virgin Islands"},61:{code:"VA",name:"Virginia"},62:{code:"WA",name:"Washington"},63:{code:"WV",name:"West Virginia"},64:{code:"WI",name:"Wisconsin"},65:{code:"WY",name:"Wyoming"}},UM:{1:{code:"BI",name:"Baker Island"},2:{code:"HI",name:"Howland Island"},3:{code:"JI",name:"Jarvis Island"},4:{code:"JA",name:"Johnston Atoll"},5:{code:"KR",name:"Kingman Reef"},6:{code:"MA",name:"Midway Atoll"},7:{code:"NI",name:"Navassa Island"},8:{code:"PA",name:"Palmyra Atoll"},9:{code:"WI",name:"Wake Island"}},UY:{1:{code:"AR",name:"Artigas"},2:{code:"CA",name:"Canelones"},3:{code:"CL",name:"Cerro Largo"},4:{code:"CO",name:"Colonia"},5:{code:"DU",name:"Durazno"},6:{code:"FS",name:"Flores"},7:{code:"FA",name:"Florida"},8:{code:"LA",name:"Lavalleja"},9:{code:"MA",name:"Maldonado"},10:{code:"MO",name:"Montevideo"},11:{code:"PA",name:"Paysandu"},12:{code:"RN",name:"Rio Negro"},13:{code:"RV",name:"Rivera"},14:{code:"RO",name:"Rocha"},15:{code:"SL",name:"Salto"},16:{code:"SJ",name:"San Jose"},17:{code:"SO",name:"Soriano"},18:{code:"TA",name:"Tacuarembo"},19:{code:"TT",name:"Treinta y Tres"}},UZ:{1:{code:"AN",name:"Andijon"},2:{code:"BU",name:"Buxoro"},3:{code:"FA",name:"Farg'ona"},4:{code:"JI",name:"Jizzax"},5:{code:"NG",name:"Namangan"},6:{code:"NW",name:"Navoiy"},7:{code:"QA",name:"Qashqadaryo"},8:{code:"QR",name:"Qoraqalpog'iston Republikasi"},9:{code:"SA",name:"Samarqand"},10:{code:"SI",name:"Sirdaryo"},11:{code:"SU",name:"Surxondaryo"},12:{code:"TK",name:"Toshkent City"},13:{code:"TO",name:"Toshkent Region"},14:{code:"XO",name:"Xorazm"}},VU:{1:{code:"MA",name:"Malampa"},2:{code:"PE",name:"Penama"},3:{code:"SA",name:"Sanma"},4:{code:"SH",name:"Shefa"},5:{code:"TA",name:"Tafea"},6:{code:"TO",name:"Torba"}},VE:{1:{code:"AM",name:"Amazonas"},2:{code:"AN",name:"Anzoategui"},3:{code:"AP",name:"Apure"},4:{code:"AR",name:"Aragua"},5:{code:"BA",name:"Barinas"},6:{code:"BO",name:"Bolivar"},7:{code:"CA",name:"Carabobo"},8:{code:"CO",name:"Cojedes"},9:{code:"DA",name:"Delta Amacuro"},10:{code:"DF",name:"Dependencias Federales"},11:{code:"DI",name:"Distrito Federal"},12:{code:"FA",name:"Falcon"},13:{code:"GU",name:"Guarico"},14:{code:"LA",name:"Lara"},15:{code:"ME",name:"Merida"},16:{code:"MI",name:"Miranda"},17:{code:"MO",name:"Monagas"},18:{code:"NE",name:"Nueva Esparta"},19:{code:"PO",name:"Portuguesa"},20:{code:"SU",name:"Sucre"},21:{code:"TA",name:"Tachira"},22:{code:"TR",name:"Trujillo"},23:{code:"VA",name:"Vargas"},24:{code:"YA",name:"Yaracuy"},25:{code:"ZU",name:"Zulia"}},VN:{1:{code:"AG",name:"An Giang"},2:{code:"BG",name:"Bac Giang"},3:{code:"BK",name:"Bac Kan"},4:{code:"BL",name:"Bac Lieu"},5:{code:"BC",name:"Bac Ninh"},6:{code:"BR",name:"Ba Ria-Vung Tau"},7:{code:"BN",name:"Ben Tre"},8:{code:"BH",name:"Binh Dinh"},9:{code:"BU",name:"Binh Duong"},10:{code:"BP",name:"Binh Phuoc"},11:{code:"BT",name:"Binh Thuan"},12:{code:"CM",name:"Ca Mau"},13:{code:"CT",name:"Can Tho"},14:{code:"CB",name:"Cao Bang"},15:{code:"DL",name:"Dak Lak"},16:{code:"DG",name:"Dak Nong"},17:{code:"DN",name:"Da Nang"},18:{code:"DB",name:"Dien Bien"},19:{code:"DI",name:"Dong Nai"},20:{code:"DT",name:"Dong Thap"},21:{code:"GL",name:"Gia Lai"},22:{code:"HG",name:"Ha Giang"},23:{code:"HD",name:"Hai Duong"},24:{code:"HP",name:"Hai Phong"},25:{code:"HM",name:"Ha Nam"},26:{code:"HI",name:"Ha Noi"},27:{code:"HT",name:"Ha Tay"},28:{code:"HH",name:"Ha Tinh"},29:{code:"HB",name:"Hoa Binh"},30:{code:"HC",name:"Ho Chin Minh"},31:{code:"HU",name:"Hau Giang"},32:{code:"HY",name:"Hung Yen"}},VI:{1:{code:"C",name:"Saint Croix"},2:{code:"J",name:"Saint John"},3:{code:"T",name:"Saint Thomas"}},WF:{1:{code:"A",name:"Alo"},2:{code:"S",name:"Sigave"},3:{code:"W",name:"Wallis"}},YE:{1:{code:"AB",name:"Abyan"},2:{code:"AD",name:"Adan"},3:{code:"AM",name:"Amran"},4:{code:"BA",name:"Al Bayda"},5:{code:"DA",name:"Ad Dali"},6:{code:"DH",name:"Dhamar"},7:{code:"HD",name:"Hadramawt"},8:{code:"HJ",name:"Hajjah"},9:{code:"HU",name:"Al Hudaydah"},10:{code:"IB",name:"Ibb"},11:{code:"JA",name:"Al Jawf"},12:{code:"LA",name:"Lahij"},13:{code:"MA",name:"Ma'rib"},14:{code:"MR",name:"Al Mahrah"},15:{code:"MW",name:"Al Mahwit"},16:{code:"SD",name:"Sa'dah"},17:{code:"SN",name:"San'a"},18:{code:"SH",name:"Shabwah"},19:{code:"TA",name:"Ta'izz"}},YU:{1:{code:"KOS",name:"Kosovo"},2:{code:"MON",name:"Montenegro"},3:{code:"SER",name:"Serbia"},4:{code:"VOJ",name:"Vojvodina"}},ZR:{1:{code:"BC",name:"Bas-Congo"},2:{code:"BN",name:"Bandundu"},3:{code:"EQ",name:"Equateur"},4:{code:"KA",name:"Katanga"},5:{code:"KE",name:"Kasai-Oriental"},6:{code:"KN",name:"Kinshasa"},7:{code:"KW",name:"Kasai-Occidental"},8:{code:"MA",name:"Maniema"},9:{code:"NK",name:"Nord-Kivu"},10:{code:"OR",name:"Orientale"},11:{code:"SK",name:"Sud-Kivu"}},ZM:{1:{code:"CE",name:"Central"},2:{code:"CB",name:"Copperbelt"},3:{code:"EA",name:"Eastern"},4:{code:"LP",name:"Luapula"},5:{code:"LK",name:"Lusaka"},6:{code:"NO",name:"Northern"},7:{code:"NW",name:"North-Western"},8:{code:"SO",name:"Southern"},9:{code:"WE",name:"Western"}},ZW:{1:{code:"BU",name:"Bulawayo"},2:{code:"HA",name:"Harare"},3:{code:"ML",name:"Manicaland"},4:{code:"MC",name:"Mashonaland Central"},5:{code:"ME",name:"Mashonaland East"},6:{code:"MW",name:"Mashonaland West"},7:{code:"MV",name:"Masvingo"},8:{code:"MN",name:"Matabeleland North"},9:{code:"MS",name:"Matabeleland South"},10:{code:"MD",name:"Midlands"}}},BFHTimePickerDelimiter=":",BFHTimePickerModes={am:"AM",pm:"PM"},BFHTimezonesList={AF:{"Asia/Kabul":"Kabul"},AL:{"Europe/Tirane":"Tirane"},DZ:{"Africa/Algiers":"Algiers"},AS:{"Pacific/Pago_Pago":"Pago Pago"},AD:{"Europe/Andorra":"Andorra"},AO:{"Africa/Luanda":"Luanda"},AI:{"America/Anguilla":"Anguilla"},AQ:{"Antarctica/Casey":"Casey","Antarctica/Davis":"Davis","Antarctica/DumontDUrville":"DumontDUrville","Antarctica/Macquarie":"Macquarie","Antarctica/Mawson":"Mawson","Antarctica/McMurdo":"McMurdo","Antarctica/Palmer":"Palmer","Antarctica/Rothera":"Rothera","Antarctica/South_Pole":"South Pole","Antarctica/Syowa":"Syowa","Antarctica/Vostok":"Vostok"},AG:{"America/Antigua":"Antigua"},AR:{"America/Argentina/Buenos_Aires":"Argentina / Buenos Aires","America/Argentina/Catamarca":"Argentina / Catamarca","America/Argentina/Cordoba":"Argentina / Cordoba","America/Argentina/Jujuy":"Argentina / Jujuy","America/Argentina/La_Rioja":"Argentina / La Rioja","America/Argentina/Mendoza":"Argentina / Mendoza","America/Argentina/Rio_Gallegos":"Argentina / Rio Gallegos","America/Argentina/Salta":"Argentina / Salta","America/Argentina/San_Juan":"Argentina / San Juan","America/Argentina/San_Luis":"Argentina / San Luis","America/Argentina/Tucuman":"Argentina / Tucuman","America/Argentina/Ushuaia":"Argentina / Ushuaia"},AM:{"Asia/Yerevan":"Yerevan"},AW:{"America/Aruba":"Aruba"},AU:{"Australia/Adelaide":"Adelaide","Australia/Brisbane":"Brisbane","Australia/Broken_Hill":"Broken Hill","Australia/Currie":"Currie","Australia/Darwin":"Darwin","Australia/Eucla":"Eucla","Australia/Hobart":"Hobart","Australia/Lindeman":"Lindeman","Australia/Lord_Howe":"Lord Howe","Australia/Melbourne":"Melbourne","Australia/Perth":"Perth","Australia/Sydney":"Sydney"},AT:{"Europe/Vienna":"Vienna"},AZ:{"Asia/Baku":"Baku"},BH:{"Asia/Bahrain":"Bahrain"},BD:{"Asia/Dhaka":"Dhaka"},BB:{"America/Barbados":"Barbados"},BY:{"Europe/Minsk":"Minsk"},BE:{"Europe/Brussels":"Brussels"},BZ:{"America/Belize":"Belize"},BJ:{"Africa/Porto-Novo":"Porto-Novo"},BM:{"Atlantic/Bermuda":"Bermuda"},BT:{"Asia/Thimphu":"Thimphu"},BO:{"America/La_Paz":"La Paz"},BA:{"Europe/Sarajevo":"Sarajevo"},BW:{"Africa/Gaborone":"Gaborone"},BR:{"America/Araguaina":"Araguaina","America/Bahia":"Bahia","America/Belem":"Belem","America/Boa_Vista":"Boa Vista","America/Campo_Grande":"Campo Grande","America/Cuiaba":"Cuiaba","America/Eirunepe":"Eirunepe","America/Fortaleza":"Fortaleza","America/Maceio":"Maceio","America/Manaus":"Manaus","America/Noronha":"Noronha","America/Porto_Velho":"Porto Velho","America/Recife":"Recife","America/Rio_Branco":"Rio Branco","America/Santarem":"Santarem","America/Sao_Paulo":"Sao Paulo"},VG:{"America/Tortola":"Tortola"},BN:{"Asia/Brunei":"Brunei"},BG:{"Europe/Sofia":"Sofia"},BF:{"Africa/Ouagadougou":"Ouagadougou"},BI:{"Africa/Bujumbura":"Bujumbura"},CI:{"Africa/Abidjan":"Abidjan"},KH:{"Asia/Phnom_Penh":"Phnom Penh"},CM:{"Africa/Douala":"Douala"},CA:{"America/Atikokan":"Atikokan","America/Blanc-Sablon":"Blanc-Sablon","America/Cambridge_Bay":"Cambridge Bay","America/Creston":"Creston","America/Dawson":"Dawson","America/Dawson_Creek":"Dawson Creek","America/Edmonton":"Edmonton","America/Glace_Bay":"Glace Bay","America/Goose_Bay":"Goose Bay","America/Halifax":"Halifax","America/Inuvik":"Inuvik","America/Iqaluit":"Iqaluit","America/Moncton":"Moncton","America/Montreal":"Montreal","America/Nipigon":"Nipigon","America/Pangnirtung":"Pangnirtung","America/Rainy_River":"Rainy River","America/Rankin_Inlet":"Rankin Inlet","America/Regina":"Regina","America/Resolute":"Resolute","America/St_Johns":"St Johns","America/Swift_Current":"Swift Current","America/Thunder_Bay":"Thunder Bay","America/Toronto":"Toronto","America/Vancouver":"Vancouver","America/Whitehorse":"Whitehorse","America/Winnipeg":"Winnipeg","America/Yellowknife":"Yellowknife"},CV:{"Atlantic/Cape_Verde":"Cape Verde"},KY:{"America/Cayman":"Cayman"},CF:{"Africa/Bangui":"Bangui"},TD:{"Africa/Ndjamena":"Ndjamena"},CL:{"America/Santiago":"Santiago","Pacific/Easter":"Easter"},CN:{"Asia/Chongqing":"Chongqing","Asia/Harbin":"Harbin","Asia/Kashgar":"Kashgar","Asia/Shanghai":"Shanghai","Asia/Urumqi":"Urumqi"},CO:{"America/Bogota":"Bogota"},KM:{"Indian/Comoro":"Comoro"},CG:{"Africa/Brazzaville":"Brazzaville"},CR:{"America/Costa_Rica":"Costa Rica"},HR:{"Europe/Zagreb":"Zagreb"},CU:{"America/Havana":"Havana"},CY:{"Asia/Nicosia":"Nicosia"},CZ:{"Europe/Prague":"Prague"},CD:{"Africa/Kinshasa":"Kinshasa","Africa/Lubumbashi":"Lubumbashi"},DK:{"Europe/Copenhagen":"Copenhagen"},DJ:{"Africa/Djibouti":"Djibouti"},DM:{"America/Dominica":"Dominica"},DO:{"America/Santo_Domingo":"Santo Domingo"},TP:{},EC:{"America/Guayaquil":"Guayaquil","Pacific/Galapagos":"Galapagos"},EG:{"Africa/Cairo":"Cairo"},SV:{"America/El_Salvador":"El Salvador"},GQ:{"Africa/Malabo":"Malabo"},ER:{"Africa/Asmara":"Asmara"},EE:{"Europe/Tallinn":"Tallinn"},ET:{"Africa/Addis_Ababa":"Addis Ababa"},FO:{"Atlantic/Faroe":"Faroe"},FK:{"Atlantic/Stanley":"Stanley"},FJ:{"Pacific/Fiji":"Fiji"},FI:{"Europe/Helsinki":"Helsinki"},MK:{"Europe/Skopje":"Skopje"},FR:{"Europe/Paris":"Paris"},GA:{"Africa/Libreville":"Libreville"},GE:{"Asia/Tbilisi":"Tbilisi"},DE:{"Europe/Berlin":"Berlin"},GH:{"Africa/Accra":"Accra"},GR:{"Europe/Athens":"Athens"},GL:{"America/Danmarkshavn":"Danmarkshavn","America/Godthab":"Godthab","America/Scoresbysund":"Scoresbysund","America/Thule":"Thule"},GD:{"America/Grenada":"Grenada"},GU:{"Pacific/Guam":"Guam"},GT:{"America/Guatemala":"Guatemala"},GN:{"Africa/Conakry":"Conakry"},GW:{"Africa/Bissau":"Bissau"},GY:{"America/Guyana":"Guyana"},HT:{"America/Port-au-Prince":"Port-au-Prince"},HN:{"America/Tegucigalpa":"Tegucigalpa"},HK:{"Asia/Hong_Kong":"Hong Kong"},HU:{"Europe/Budapest":"Budapest"},IS:{"Atlantic/Reykjavik":"Reykjavik"},IN:{"Asia/Kolkata":"Kolkata"},ID:{"Asia/Jakarta":"Jakarta","Asia/Jayapura":"Jayapura","Asia/Makassar":"Makassar","Asia/Pontianak":"Pontianak"},IR:{"Asia/Tehran":"Tehran"},IQ:{"Asia/Baghdad":"Baghdad"},IE:{"Europe/Dublin":"Dublin"},IL:{"Asia/Jerusalem":"Jerusalem"},IT:{"Europe/Rome":"Rome"},JM:{"America/Jamaica":"Jamaica"},JP:{"Asia/Tokyo":"Tokyo"},JO:{"Asia/Amman":"Amman"},KZ:{"Asia/Almaty":"Almaty","Asia/Aqtau":"Aqtau","Asia/Aqtobe":"Aqtobe","Asia/Oral":"Oral","Asia/Qyzylorda":"Qyzylorda"},KE:{"Africa/Nairobi":"Nairobi"},KI:{"Pacific/Enderbury":"Enderbury","Pacific/Kiritimati":"Kiritimati","Pacific/Tarawa":"Tarawa"},KW:{"Asia/Kuwait":"Kuwait"},KG:{"Asia/Bishkek":"Bishkek"},LA:{"Asia/Vientiane":"Vientiane"},LV:{"Europe/Riga":"Riga"},LB:{"Asia/Beirut":"Beirut"},LS:{"Africa/Maseru":"Maseru"},LR:{"Africa/Monrovia":"Monrovia"},LY:{"Africa/Tripoli":"Tripoli"},LI:{"Europe/Vaduz":"Vaduz"},LT:{"Europe/Vilnius":"Vilnius"},LU:{"Europe/Luxembourg":"Luxembourg"},MO:{"Asia/Macau":"Macau"},MG:{"Indian/Antananarivo":"Antananarivo"},MW:{"Africa/Blantyre":"Blantyre"},MY:{"Asia/Kuala_Lumpur":"Kuala Lumpur","Asia/Kuching":"Kuching"},MV:{"Indian/Maldives":"Maldives"},ML:{"Africa/Bamako":"Bamako"},MT:{"Europe/Malta":"Malta"},MH:{"Pacific/Kwajalein":"Kwajalein","Pacific/Majuro":"Majuro"},MR:{"Africa/Nouakchott":"Nouakchott"},MU:{"Indian/Mauritius":"Mauritius"},MX:{"America/Bahia_Banderas":"Bahia Banderas","America/Cancun":"Cancun","America/Chihuahua":"Chihuahua","America/Hermosillo":"Hermosillo","America/Matamoros":"Matamoros","America/Mazatlan":"Mazatlan","America/Merida":"Merida","America/Mexico_City":"Mexico City","America/Monterrey":"Monterrey","America/Ojinaga":"Ojinaga","America/Santa_Isabel":"Santa Isabel","America/Tijuana":"Tijuana"},FM:{"Pacific/Chuuk":"Chuuk","Pacific/Kosrae":"Kosrae","Pacific/Pohnpei":"Pohnpei"},MD:{"Europe/Chisinau":"Chisinau"},MC:{"Europe/Monaco":"Monaco"},MN:{"Asia/Choibalsan":"Choibalsan","Asia/Hovd":"Hovd","Asia/Ulaanbaatar":"Ulaanbaatar"},ME:{"Europe/Podgorica":"Podgorica"},MS:{"America/Montserrat":"Montserrat"},MA:{"Africa/Casablanca":"Casablanca"},MZ:{"Africa/Maputo":"Maputo"},MM:{"Asia/Rangoon":"Rangoon"},NA:{"Africa/Windhoek":"Windhoek"},NR:{"Pacific/Nauru":"Nauru"},NP:{"Asia/Kathmandu":"Kathmandu"},NL:{"Europe/Amsterdam":"Amsterdam"},AN:{},NZ:{"Pacific/Auckland":"Auckland","Pacific/Chatham":"Chatham"},NI:{"America/Managua":"Managua"},NE:{"Africa/Niamey":"Niamey"},NG:{"Africa/Lagos":"Lagos"},NF:{"Pacific/Norfolk":"Norfolk"},KP:{"Asia/Pyongyang":"Pyongyang"},MP:{"Pacific/Saipan":"Saipan"},NO:{"Europe/Oslo":"Oslo"},OM:{"Asia/Muscat":"Muscat"},PK:{"Asia/Karachi":"Karachi"},PW:{"Pacific/Palau":"Palau"},PA:{"America/Panama":"Panama"},PG:{"Pacific/Port_Moresby":"Port Moresby"},PY:{"America/Asuncion":"Asuncion"},PE:{"America/Lima":"Lima"},PH:{"Asia/Manila":"Manila"},PN:{"Pacific/Pitcairn":"Pitcairn"},PL:{"Europe/Warsaw":"Warsaw"},PT:{"Atlantic/Azores":"Azores","Atlantic/Madeira":"Madeira","Europe/Lisbon":"Lisbon"},PR:{"America/Puerto_Rico":"Puerto Rico"},QA:{"Asia/Qatar":"Qatar"},RO:{"Europe/Bucharest":"Bucharest"},RU:{"Asia/Anadyr":"Anadyr","Asia/Irkutsk":"Irkutsk","Asia/Kamchatka":"Kamchatka","Asia/Krasnoyarsk":"Krasnoyarsk","Asia/Magadan":"Magadan","Asia/Novokuznetsk":"Novokuznetsk","Asia/Novosibirsk":"Novosibirsk","Asia/Omsk":"Omsk","Asia/Sakhalin":"Sakhalin","Asia/Vladivostok":"Vladivostok","Asia/Yakutsk":"Yakutsk","Asia/Yekaterinburg":"Yekaterinburg","Europe/Kaliningrad":"Kaliningrad","Europe/Moscow":"Moscow","Europe/Samara":"Samara","Europe/Volgograd":"Volgograd"},RW:{"Africa/Kigali":"Kigali"},ST:{"Africa/Sao_Tome":"Sao Tome"},SH:{"Atlantic/St_Helena":"St Helena"},KN:{"America/St_Kitts":"St Kitts"},LC:{"America/St_Lucia":"St Lucia"},VC:{"America/St_Vincent":"St Vincent"},WS:{"Pacific/Apia":"Apia"},SM:{"Europe/San_Marino":"San Marino"},SA:{"Asia/Riyadh":"Riyadh"},SN:{"Africa/Dakar":"Dakar"},RS:{"Europe/Belgrade":"Belgrade"},SC:{"Indian/Mahe":"Mahe"},SL:{"Africa/Freetown":"Freetown"},SG:{"Asia/Singapore":"Singapore"},SK:{"Europe/Bratislava":"Bratislava"},SI:{"Europe/Ljubljana":"Ljubljana"},SB:{"Pacific/Guadalcanal":"Guadalcanal"},SO:{"Africa/Mogadishu":"Mogadishu"},ZA:{"Africa/Johannesburg":"Johannesburg"},GS:{"Atlantic/South_Georgia":"South Georgia"},KR:{"Asia/Seoul":"Seoul"},ES:{"Africa/Ceuta":"Ceuta","Atlantic/Canary":"Canary","Europe/Madrid":"Madrid"},LK:{"Asia/Colombo":"Colombo"},SD:{"Africa/Khartoum":"Khartoum"},SR:{"America/Paramaribo":"Paramaribo"},SZ:{"Africa/Mbabane":"Mbabane"},SE:{"Europe/Stockholm":"Stockholm"},CH:{"Europe/Zurich":"Zurich"},SY:{"Asia/Damascus":"Damascus"},TW:{"Asia/Taipei":"Taipei"},TJ:{"Asia/Dushanbe":"Dushanbe"},TZ:{"Africa/Dar_es_Salaam":"Dar es Salaam"},TH:{"Asia/Bangkok":"Bangkok"},BS:{"America/Nassau":"Nassau"},GM:{"Africa/Banjul":"Banjul"},TG:{"Africa/Lome":"Lome"},TO:{"Pacific/Tongatapu":"Tongatapu"},TT:{"America/Port_of_Spain":"Port of Spain"},TN:{"Africa/Tunis":"Tunis"},TR:{"Europe/Istanbul":"Istanbul"},TM:{"Asia/Ashgabat":"Ashgabat"},TC:{"America/Grand_Turk":"Grand Turk"},TV:{"Pacific/Funafuti":"Funafuti"},VI:{"America/St_Thomas":"St Thomas"},UG:{"Africa/Kampala":"Kampala"},UA:{"Europe/Kiev":"Kiev","Europe/Simferopol":"Simferopol","Europe/Uzhgorod":"Uzhgorod","Europe/Zaporozhye":"Zaporozhye"},AE:{"Asia/Dubai":"Dubai"},GB:{"Europe/London":"London"},US:{"America/Adak":"Adak","America/Anchorage":"Anchorage","America/Boise":"Boise","America/Chicago":"Chicago","America/Denver":"Denver","America/Detroit":"Detroit","America/Indiana/Indianapolis":"Indiana / Indianapolis","America/Indiana/Knox":"Indiana / Knox","America/Indiana/Marengo":"Indiana / Marengo","America/Indiana/Petersburg":"Indiana / Petersburg","America/Indiana/Tell_City":"Indiana / Tell City","America/Indiana/Vevay":"Indiana / Vevay","America/Indiana/Vincennes":"Indiana / Vincennes","America/Indiana/Winamac":"Indiana / Winamac","America/Juneau":"Juneau","America/Kentucky/Louisville":"Kentucky / Louisville","America/Kentucky/Monticello":"Kentucky / Monticello","America/Los_Angeles":"Los Angeles","America/Menominee":"Menominee","America/Metlakatla":"Metlakatla","America/New_York":"New York","America/Nome":"Nome","America/North_Dakota/Beulah":"North Dakota / Beulah","America/North_Dakota/Center":"North Dakota / Center","America/North_Dakota/New_Salem":"North Dakota / New Salem","America/Phoenix":"Phoenix","America/Shiprock":"Shiprock","America/Sitka":"Sitka","America/Yakutat":"Yakutat","Pacific/Honolulu":"Honolulu"},UY:{"America/Montevideo":"Montevideo"},UZ:{"Asia/Samarkand":"Samarkand","Asia/Tashkent":"Tashkent"},VU:{"Pacific/Efate":"Efate"},VA:{"Europe/Vatican":"Vatican"},VE:{"America/Caracas":"Caracas"},VN:{"Asia/Ho_Chi_Minh":"Ho Chi Minh"},EH:{"Africa/El_Aaiun":"El Aaiun"},YE:{"Asia/Aden":"Aden"},ZM:{"Africa/Lusaka":"Lusaka"},ZW:{"Africa/Harare":"Harare"}}; -+function(a){"use strict";function b(a){var b=a.toString(16);return 1===b.length?"0"+b:b}function c(a,c,d){return"#"+b(a)+b(c)+b(d)}function d(){var b;a(f).each(function(c){return b=e(a(this)),b.hasClass("open")?(b.trigger(c=a.Event("hide.bfhcolorpicker")),c.isDefaultPrevented()?!0:(b.removeClass("open").trigger("hidden.bfhcolorpicker"),void 0)):!0})}function e(a){return a.closest(".bfh-colorpicker")}var f="[data-toggle=bfh-colorpicker]",g=function(b,c){this.options=a.extend({},a.fn.bfhcolorpicker.defaults,c),this.$element=a(b),this.initPopover()};g.prototype={constructor:g,initPalette:function(){var a,b,c;a=this.$element.find("canvas"),b=a[0].getContext("2d"),c=b.createLinearGradient(0,0,a.width(),0),c.addColorStop(0,"rgb(255, 255, 255)"),c.addColorStop(.1,"rgb(255, 0, 0)"),c.addColorStop(.25,"rgb(255, 0, 255)"),c.addColorStop(.4,"rgb(0, 0, 255)"),c.addColorStop(.55,"rgb(0, 255, 255)"),c.addColorStop(.7,"rgb(0, 255, 0)"),c.addColorStop(.85,"rgb(255, 255, 0)"),c.addColorStop(1,"rgb(255, 0, 0)"),b.fillStyle=c,b.fillRect(0,0,b.canvas.width,b.canvas.height),c=b.createLinearGradient(0,0,0,a.height()),c.addColorStop(0,"rgba(255, 255, 255, 1)"),c.addColorStop(.5,"rgba(255, 255, 255, 0)"),c.addColorStop(.5,"rgba(0, 0, 0, 0)"),c.addColorStop(1,"rgba(0, 0, 0, 1)"),b.fillStyle=c,b.fillRect(0,0,b.canvas.width,b.canvas.height)},initPopover:function(){var a,b;a="",b="","right"===this.options.align?b='':a='',this.$element.html('
          '+a+''+b+"
          "+'
          '+''+"
          "),this.$element.on("click.bfhcolorpicker.data-api touchstart.bfhcolorpicker.data-api",f,g.prototype.toggle).on("mousedown.bfhcolorpicker.data-api","canvas",g.prototype.mouseDown).on("click.bfhcolorpicker.data-api touchstart.bfhcolorpicker.data-api",".bfh-colorpicker-popover",function(){return!1}),this.initPalette(),this.$element.val(this.options.color)},updateVal:function(a,b){var d,e,f,g,h,i,j;h=5,d=this.$element.find("canvas"),e=d[0].getContext("2d"),f=a-d.offset().left,g=b-d.offset().top,f=Math.round(f/h)*h,g=Math.round(g/h)*h,0>f&&(f=0),f>=d.width()&&(f=d.width()-1),0>g&&(g=0),g>d.height()&&(g=d.height()),i=e.getImageData(f,g,1,1),j=c(i.data[0],i.data[1],i.data[2]),j!==this.$element.val()&&(this.$element.val(j),this.$element.trigger("change.bfhcolorpicker"))},mouseDown:function(){var b,c;b=a(this),c=e(b),a(document).on("mousemove.bfhcolorpicker.data-api",{colorpicker:c},g.prototype.mouseMove).one("mouseup.bfhcolorpicker.data-api",{colorpicker:c},g.prototype.mouseUp)},mouseMove:function(a){var b;b=a.data.colorpicker,b.data("bfhcolorpicker").updateVal(a.pageX,a.pageY)},mouseUp:function(b){var c;c=b.data.colorpicker,c.data("bfhcolorpicker").updateVal(b.pageX,b.pageY),a(document).off("mousemove.bfhcolorpicker.data-api"),c.data("bfhcolorpicker").options.close===!0&&d()},toggle:function(b){var c,f,g;if(c=a(this),f=e(c),f.is(".disabled")||void 0!==f.attr("disabled"))return!0;if(g=f.hasClass("open"),d(),!g){if(f.trigger(b=a.Event("show.bfhcolorpicker")),b.isDefaultPrevented())return!0;f.toggleClass("open").trigger("shown.bfhcolorpicker"),c.focus()}return!1}};var h=a.fn.bfhcolorpicker;a.fn.bfhcolorpicker=function(b){return this.each(function(){var c,d,e;c=a(this),d=c.data("bfhcolorpicker"),e="object"==typeof b&&b,this.type="bfhcolorpicker",d||c.data("bfhcolorpicker",d=new g(this,e)),"string"==typeof b&&d[b].call(c)})},a.fn.bfhcolorpicker.Constructor=g,a.fn.bfhcolorpicker.defaults={align:"left",input:"form-control",placeholder:"",name:"",color:"#000000",close:!0},a.fn.bfhcolorpicker.noConflict=function(){return a.fn.bfhcolorpicker=h,this};var i;a.valHooks.div&&(i=a.valHooks.div),a.valHooks.div={get:function(b){return a(b).hasClass("bfh-colorpicker")?a(b).find('input[type="text"]').val():i?i.get(b):void 0},set:function(b,c){if(a(b).hasClass("bfh-colorpicker"))a(b).find(".bfh-colorpicker-icon").css("background-color",c),a(b).find('input[type="text"]').val(c);else if(i)return i.set(b,c)}},a(document).ready(function(){a("div.bfh-colorpicker").each(function(){var b;b=a(this),b.bfhcolorpicker(b.data())})}),a(document).on("click.bfhcolorpicker.data-api",d)}(window.jQuery),+function(a){"use strict";var b=function(b,c){this.options=a.extend({},a.fn.bfhcountries.defaults,c),this.$element=a(b),this.$element.is("select")&&this.addCountries(),this.$element.hasClass("bfh-selectbox")&&this.addBootstrapCountries(),this.$element.is("span")&&this.displayCountry()};b.prototype={constructor:b,getCountries:function(){var b,c;if(this.options.available){if("string"==typeof this.options.available){c=[],this.options.available=this.options.available.split(",");for(b in BFHCountriesList)BFHCountriesList.hasOwnProperty(b)&&a.inArray(b,this.options.available)>=0&&(c[b]=BFHCountriesList[b])}else c=this.options.available;return c}return BFHCountriesList},addCountries:function(){var a,b,c;a=this.options.country,c=this.getCountries(),this.$element.html(""),this.options.blank===!0&&this.$element.append('');for(b in c)c.hasOwnProperty(b)&&this.$element.append('");this.$element.val(a)},addBootstrapCountries:function(){var a,b,c,d,e,f;d=this.options.country,a=this.$element.find('input[type="hidden"]'),b=this.$element.find(".bfh-selectbox-option"),c=this.$element.find("[role=option]"),f=this.getCountries(),c.html(""),this.options.blank===!0&&c.append('
        • ');for(e in f)f.hasOwnProperty(e)&&(this.options.flags===!0?c.append('
        • '+f[e]+"
        • "):c.append('
        • '+f[e]+"
        • "));this.$element.val(d)},displayCountry:function(){var a;a=this.options.country,this.options.flags===!0?this.$element.html(' '+BFHCountriesList[a]):this.$element.html(BFHCountriesList[a])}};var c=a.fn.bfhcountries;a.fn.bfhcountries=function(c){return this.each(function(){var d,e,f;d=a(this),e=d.data("bfhcountries"),f="object"==typeof c&&c,e||d.data("bfhcountries",e=new b(this,f)),"string"==typeof c&&e[c].call(d)})},a.fn.bfhcountries.Constructor=b,a.fn.bfhcountries.defaults={country:"",available:"",flags:!1,blank:!0},a.fn.bfhcountries.noConflict=function(){return a.fn.bfhcountries=c,this},a(document).ready(function(){a("form select.bfh-countries, span.bfh-countries, div.bfh-countries").each(function(){var b;b=a(this),b.hasClass("bfh-selectbox")&&b.bfhselectbox(b.data()),b.bfhcountries(b.data())})})}(window.jQuery),+function(a){"use strict";var b=function(b,c){this.options=a.extend({},a.fn.bfhcurrencies.defaults,c),this.$element=a(b),this.$element.is("select")&&this.addCurrencies(),this.$element.hasClass("bfh-selectbox")&&this.addBootstrapCurrencies(),this.$element.is("span")&&this.displayCurrency()};b.prototype={constructor:b,getCurrencies:function(){var b,c;if(this.options.available){c=[],this.options.available=this.options.available.split(",");for(b in BFHCurrenciesList)BFHCurrenciesList.hasOwnProperty(b)&&a.inArray(b,this.options.available)>=0&&(c[b]=BFHCurrenciesList[b]);return c}return BFHCurrenciesList},addCurrencies:function(){var a,b,c;a=this.options.currency,c=this.getCurrencies(),this.$element.html(""),this.options.blank===!0&&this.$element.append('');for(b in c)c.hasOwnProperty(b)&&this.$element.append('");this.$element.val(a)},addBootstrapCurrencies:function(){var a,b,c,d,e,f,g;d=this.options.currency,a=this.$element.find('input[type="hidden"]'),b=this.$element.find(".bfh-selectbox-option"),c=this.$element.find("[role=option]"),f=this.getCurrencies(),c.html(""),this.options.blank===!0&&c.append('
        • ');for(e in f)f.hasOwnProperty(e)&&(this.options.flags===!0?(g=f[e].currencyflag?f[e].currencyflag:e.substr(0,2),c.append('
        • '+f[e].label+"
        • ")):c.append('
        • '+f[e].label+"
        • "));this.$element.val(d)},displayCurrency:function(){var a,b;a=this.options.currency,this.options.flags===!0?(b=BFHCurrenciesList[a].currencyflag?BFHCurrenciesList[a].currencyflag:a.substr(0,2),this.$element.html(' '+BFHCurrenciesList[a].label)):this.$element.html(BFHCurrenciesList[a].label)}};var c=a.fn.bfhcurrencies;a.fn.bfhcurrencies=function(c){return this.each(function(){var d,e,f;d=a(this),e=d.data("bfhcurrencies"),f="object"==typeof c&&c,e||d.data("bfhcurrencies",e=new b(this,f)),"string"==typeof c&&e[c].call(d)})},a.fn.bfhcurrencies.Constructor=b,a.fn.bfhcurrencies.defaults={currency:"",available:"",flags:!1,blank:!0},a.fn.bfhcurrencies.noConflict=function(){return a.fn.bfhcurrencies=c,this},a(document).ready(function(){a("form select.bfh-currencies, span.bfh-currencies, div.bfh-currencies").each(function(){var b;b=a(this),b.hasClass("bfh-selectbox")&&b.bfhselectbox(b.data()),b.bfhcurrencies(b.data())})})}(window.jQuery),+function(a){"use strict";function b(a,b){return new Date(b,a,0).getDate()}function c(a,b,c){return new Date(b,a,c).getDay()}function d(a,b,c,d){return b+=1,b=String(b),d=String(d),1===b.length&&(b="0"+b),1===d.length&&(d="0"+d),a.replace("m",b).replace("y",c).replace("d",d)}function e(a,b,c){var d,e,f;d=[{part:"m",position:a.indexOf("m")},{part:"y",position:a.indexOf("y")},{part:"d",position:a.indexOf("d")}],d.sort(function(a,b){return a.position-b.position}),f=b.match(/(\d+)/g);for(e in d)if(d.hasOwnProperty(e)&&d[e].part===c)return Number(f[e]).toString()}function f(){var b;a(h).each(function(c){return b=g(a(this)),b.hasClass("open")?(b.trigger(c=a.Event("hide.bfhdatepicker")),c.isDefaultPrevented()?!0:(b.removeClass("open").trigger("hidden.bfhdatepicker"),void 0)):!0})}function g(a){return a.closest(".bfh-datepicker")}var h="[data-toggle=bfh-datepicker]",i=function(b,c){this.options=a.extend({},a.fn.bfhdatepicker.defaults,c),this.$element=a(b),this.initCalendar()};i.prototype={constructor:i,setDate:function(){var a,b,c;a=this.options.date,c=this.options.format,""===a||"today"===a||void 0===a?(b=new Date,"today"===a&&this.$element.val(d(c,b.getMonth(),b.getFullYear(),b.getDate())),this.$element.data("month",b.getMonth()),this.$element.data("year",b.getFullYear())):(this.$element.val(a),this.$element.data("month",Number(e(c,a,"m")-1)),this.$element.data("year",Number(e(c,a,"y"))))},setDateLimit:function(a,b){var c,d;d=this.options.format,""!==a?(this.$element.data(b+"limit",!0),"today"===a?(c=new Date,this.$element.data(b+"day",c.getDate()),this.$element.data(b+"month",c.getMonth()),this.$element.data(b+"year",c.getFullYear())):(this.$element.data(b+"day",Number(e(d,a,"d"))),this.$element.data(b+"month",Number(e(d,a,"m")-1)),this.$element.data(b+"year",Number(e(d,a,"y"))))):this.$element.data(b+"limit",!1)},initCalendar:function(){var a,b,c;a="",b="",c="",""!==this.options.icon&&("right"===this.options.align?b='':a='',c="input-group"),this.$element.html('
          '+a+''+b+"
          "+'
          '+''+""+''+'"+'"+""+''+""+""+""+""+"
          '+''+""+''+"'+''+""+''+"
          "+"
          "),this.$element.on("click.bfhdatepicker.data-api touchstart.bfhdatepicker.data-api",h,i.prototype.toggle).on("click.bfhdatepicker.data-api touchstart.bfhdatepicker.data-api",".bfh-datepicker-calendar > table.calendar .month > .previous",i.prototype.previousMonth).on("click.bfhdatepicker.data-api touchstart.bfhdatepicker.data-api",".bfh-datepicker-calendar > table.calendar .month > .next",i.prototype.nextMonth).on("click.bfhdatepicker.data-api touchstart.bfhdatepicker.data-api",".bfh-datepicker-calendar > table.calendar .year > .previous",i.prototype.previousYear).on("click.bfhdatepicker.data-api touchstart.bfhdatepicker.data-api",".bfh-datepicker-calendar > table.calendar .year > .next",i.prototype.nextYear).on("click.bfhdatepicker.data-api touchstart.bfhdatepicker.data-api",".bfh-datepicker-calendar > table.calendar td:not(.off)",i.prototype.select).on("click.bfhdatepicker.data-api touchstart.bfhdatepicker.data-api",".bfh-datepicker-calendar > table.calendar",function(){return!1}),this.setDate(),this.setDateLimit(this.options.min,"lower"),this.setDateLimit(this.options.max,"higher"),this.updateCalendar()},updateCalendarHeader:function(a,b,c){var d,e;for(a.find("table > thead > tr > th.month > span").text(BFHMonthsList[b]),a.find("table > thead > tr > th.year > span").text(c),d=a.find("table > thead > tr.days-header"),d.html(""),e=BFHDayOfWeekStart;e"+BFHDaysList[e]+"");for(e=0;BFHDayOfWeekStart>e;e+=1)d.append(""+BFHDaysList[e]+"")},checkMinDate:function(a,b,c){var d,e,f,g;return d=this.$element.data("lowerlimit"),d===!0&&(e=this.$element.data("lowerday"),f=this.$element.data("lowermonth"),g=this.$element.data("loweryear"),e>a&&b===f&&c===g||f>b&&c===g||g>c)?!0:!1},checkMaxDate:function(a,b,c){var d,e,f,g;return d=this.$element.data("higherlimit"),d===!0&&(e=this.$element.data("higherday"),f=this.$element.data("highermonth"),g=this.$element.data("higheryear"),a>e&&b===f&&c===g||b>f&&c===g||c>g)?!0:!1},checkToday:function(a,b,c){var d;return d=new Date,a===d.getDate()&&b===d.getMonth()&&c===d.getFullYear()?!0:!1},updateCalendarDays:function(a,d,e){var f,g,h,i,j,k,l;for(f=a.find("table > tbody").html(""),g=b(d,e),h=b(d+1,e),i=c(d,e,1),j=c(d,e,h),k="",l=0;(i-BFHDayOfWeekStart+7)%7>l;l+=1)k+=''+(g-(i-BFHDayOfWeekStart+7)%7+l+1)+"";for(l=1;h>=l;l+=1)k+=this.checkMinDate(l,d,e)?''+l+"":this.checkMaxDate(l,d,e)?''+l+"":this.checkToday(l,d,e)?''+l+"":''+l+"",c(d,e,l)===(6+BFHDayOfWeekStart)%7&&(f.append(""+k+""),k="");for(l=1;(7-(j+1-BFHDayOfWeekStart+7)%7)%7+1>=l;l+=1)k+=''+l+"",l===(7-(j+1-BFHDayOfWeekStart+7)%7)%7&&f.append(""+k+"")},updateCalendar:function(){var a,b,c;a=this.$element.find(".bfh-datepicker-calendar"),b=this.$element.data("month"),c=this.$element.data("year"),this.updateCalendarHeader(a,b,c),this.updateCalendarDays(a,b,c)},previousMonth:function(){var b,c,d;return b=a(this),c=g(b),0===Number(c.data("month"))?(c.data("month",11),c.data("year",Number(c.data("year"))-1)):c.data("month",Number(c.data("month"))-1),d=c.data("bfhdatepicker"),d.updateCalendar(),!1},nextMonth:function(){var b,c,d;return b=a(this),c=g(b),11===Number(c.data("month"))?(c.data("month",0),c.data("year",Number(c.data("year"))+1)):c.data("month",Number(c.data("month"))+1),d=c.data("bfhdatepicker"),d.updateCalendar(),!1},previousYear:function(){var b,c,d;return b=a(this),c=g(b),c.data("year",Number(c.data("year"))-1),d=c.data("bfhdatepicker"),d.updateCalendar(),!1},nextYear:function(){var b,c,d;return b=a(this),c=g(b),c.data("year",Number(c.data("year"))+1),d=c.data("bfhdatepicker"),d.updateCalendar(),!1},select:function(b){var c,e,h,i,j,k;c=a(this),b.preventDefault(),b.stopPropagation(),e=g(c),h=e.data("bfhdatepicker"),i=e.data("month"),j=e.data("year"),k=c.data("day"),e.val(d(h.options.format,i,j,k)),e.trigger("change.bfhdatepicker"),h.options.close===!0&&f()},toggle:function(b){var c,d,e;if(c=a(this),d=g(c),d.is(".disabled")||void 0!==d.attr("disabled"))return!0;if(e=d.hasClass("open"),f(),!e){if(d.trigger(b=a.Event("show.bfhdatepicker")),b.isDefaultPrevented())return!0;d.toggleClass("open").trigger("shown.bfhdatepicker"),c.focus()}return!1}};var j=a.fn.bfhdatepicker;a.fn.bfhdatepicker=function(b){return this.each(function(){var c,d,e;c=a(this),d=c.data("bfhdatepicker"),e="object"==typeof b&&b,this.type="bfhdatepicker",d||c.data("bfhdatepicker",d=new i(this,e)),"string"==typeof b&&d[b].call(c)})},a.fn.bfhdatepicker.Constructor=i,a.fn.bfhdatepicker.defaults={icon:"glyphicon glyphicon-calendar",align:"left",input:"form-control",placeholder:"",name:"",date:"today",format:"m/d/y",min:"",max:"",close:!0},a.fn.bfhdatepicker.noConflict=function(){return a.fn.bfhdatepicker=j,this};var k;a.valHooks.div&&(k=a.valHooks.div),a.valHooks.div={get:function(b){return a(b).hasClass("bfh-datepicker")?a(b).find('input[type="text"]').val():k?k.get(b):void 0},set:function(b,c){if(a(b).hasClass("bfh-datepicker"))a(b).find('input[type="text"]').val(c);else if(k)return k.set(b,c)}},a(document).ready(function(){a("div.bfh-datepicker").each(function(){var b;b=a(this),b.bfhdatepicker(b.data())})}),a(document).on("click.bfhdatepicker.data-api",f)}(window.jQuery),+function(a){"use strict";var b=function(b,c){this.options=a.extend({},a.fn.bfhfonts.defaults,c),this.$element=a(b),this.$element.is("select")&&this.addFonts(),this.$element.hasClass("bfh-selectbox")&&this.addBootstrapFonts()};b.prototype={constructor:b,getFonts:function(){var b,c;if(this.options.available){c=[],this.options.available=this.options.available.split(",");for(b in BFHFontsList)BFHFontsList.hasOwnProperty(b)&&a.inArray(b,this.options.available)>=0&&(c[b]=BFHFontsList[b]);return c}return BFHFontsList},addFonts:function(){var a,b,c;a=this.options.font,c=this.getFonts(),this.$element.html(""),this.options.blank===!0&&this.$element.append('');for(b in c)c.hasOwnProperty(b)&&this.$element.append('");this.$element.val(a)},addBootstrapFonts:function(){var a,b,c,d,e,f;d=this.options.font,a=this.$element.find('input[type="hidden"]'),b=this.$element.find(".bfh-selectbox-option"),c=this.$element.find("[role=option]"),f=this.getFonts(),c.html(""),this.options.blank===!0&&c.append('
        • ');for(e in f)f.hasOwnProperty(e)&&c.append('
        • '+e+"
        • ");this.$element.val(d)}};var c=a.fn.bfhfonts;a.fn.bfhfonts=function(c){return this.each(function(){var d,e,f;d=a(this),e=d.data("bfhfonts"),f="object"==typeof c&&c,e||d.data("bfhfonts",e=new b(this,f)),"string"==typeof c&&e[c].call(d)})},a.fn.bfhfonts.Constructor=b,a.fn.bfhfonts.defaults={font:"",available:"",blank:!0},a.fn.bfhfonts.noConflict=function(){return a.fn.bfhfonts=c,this},a(document).ready(function(){a("form select.bfh-fonts, span.bfh-fonts, div.bfh-fonts").each(function(){var b;b=a(this),b.hasClass("bfh-selectbox")&&b.bfhselectbox(b.data()),b.bfhfonts(b.data())})})}(window.jQuery),+function(a){"use strict";var b=function(b,c){this.options=a.extend({},a.fn.bfhfontsizes.defaults,c),this.$element=a(b),this.$element.is("select")&&this.addFontSizes(),this.$element.hasClass("bfh-selectbox")&&this.addBootstrapFontSizes()};b.prototype={constructor:b,getFontsizes:function(){var b,c;if(this.options.available){c=[],this.options.available=this.options.available.split(",");for(b in BFHFontSizesList)BFHFontSizesList.hasOwnProperty(b)&&a.inArray(b,this.options.available)>=0&&(c[b]=BFHFontSizesList[b]);return c}return BFHFontSizesList},addFontSizes:function(){var a,b,c;a=this.options.fontsize,c=this.getFontsizes(),this.$element.html(""),this.options.blank===!0&&this.$element.append('');for(b in c)c.hasOwnProperty(b)&&this.$element.append('");this.$element.val(a)},addBootstrapFontSizes:function(){var a,b,c,d,e,f;d=this.options.fontsize,a=this.$element.find('input[type="hidden"]'),b=this.$element.find(".bfh-selectbox-option"),c=this.$element.find("[role=option]"),f=this.getFontsizes(),c.html(""),this.options.blank===!0&&c.append('
        • ');for(e in f)f.hasOwnProperty(e)&&c.append('
        • '+f[e]+"
        • ");this.$element.val(d)}};var c=a.fn.bfhfontsizes;a.fn.bfhfontsizes=function(c){return this.each(function(){var d,e,f;d=a(this),e=d.data("bfhfontsizes"),f="object"==typeof c&&c,e||d.data("bfhfontsizes",e=new b(this,f)),"string"==typeof c&&e[c].call(d)})},a.fn.bfhfontsizes.Constructor=b,a.fn.bfhfontsizes.defaults={fontsize:"",available:"",blank:!0},a.fn.bfhfontsizes.noConflict=function(){return a.fn.bfhfontsizes=c,this},a(document).ready(function(){a("form select.bfh-fontsizes, span.bfh-fontsizes, div.bfh-fontsizes").each(function(){var b;b=a(this),b.hasClass("bfh-selectbox")&&b.bfhselectbox(b.data()),b.bfhfontsizes(b.data())})})}(window.jQuery),+function(a){"use strict";var b=function(b,c){this.options=a.extend({},a.fn.bfhgooglefonts.defaults,c),this.$element=a(b),this.$element.is("select")&&this.addFonts(),this.$element.hasClass("bfh-selectbox")&&this.addBootstrapFonts()};b.prototype={constructor:b,getFonts:function(){var b,c;if(c=[],this.options.subset)for(b in BFHGoogleFontsList.items)BFHGoogleFontsList.items.hasOwnProperty(b)&&a.inArray(this.options.subset,BFHGoogleFontsList.items[b].subsets)>=0&&(c[BFHGoogleFontsList.items[b].family]={info:BFHGoogleFontsList.items[b],index:parseInt(b,10)});else if(this.options.available){this.options.available=this.options.available.split(",");for(b in BFHGoogleFontsList.items)BFHGoogleFontsList.items.hasOwnProperty(b)&&a.inArray(BFHGoogleFontsList.items[b].family,this.options.available)>=0&&(c[BFHGoogleFontsList.items[b].family]={info:BFHGoogleFontsList.items[b],index:parseInt(b,10)})}else for(b in BFHGoogleFontsList.items)BFHGoogleFontsList.items.hasOwnProperty(b)&&(c[BFHGoogleFontsList.items[b].family]={info:BFHGoogleFontsList.items[b],index:parseInt(b,10)});return c},addFonts:function(){var a,b,c;a=this.options.font,c=this.getFonts(),this.$element.html(""),this.options.blank===!0&&this.$element.append('');for(b in c)c.hasOwnProperty(b)&&this.$element.append('");this.$element.val(a)},addBootstrapFonts:function(){var a,b,c,d,e,f;d=this.options.font,a=this.$element.find('input[type="hidden"]'),b=this.$element.find(".bfh-selectbox-option"),c=this.$element.find("[role=option]"),f=this.getFonts(),c.html(""),this.options.blank===!0&&c.append('
        • ');for(e in f)f.hasOwnProperty(e)&&c.append('
        • '+f[e].info.family+"
        • ");this.$element.val(d)}};var c=a.fn.bfhgooglefonts;a.fn.bfhgooglefonts=function(c){return this.each(function(){var d,e,f;d=a(this),e=d.data("bfhgooglefonts"),f="object"==typeof c&&c,e||d.data("bfhgooglefonts",e=new b(this,f)),"string"==typeof c&&e[c].call(d)})},a.fn.bfhgooglefonts.Constructor=b,a.fn.bfhgooglefonts.defaults={font:"",available:"",subset:"",blank:!0},a.fn.bfhgooglefonts.noConflict=function(){return a.fn.bfhgooglefonts=c,this},a(document).ready(function(){a("form select.bfh-googlefonts, span.bfh-googlefonts, div.bfh-googlefonts").each(function(){var b;b=a(this),b.hasClass("bfh-selectbox")&&b.bfhselectbox(b.data()),b.bfhgooglefonts(b.data())})})}(window.jQuery),+function(a){"use strict";var b=function(b,c){this.options=a.extend({},a.fn.bfhlanguages.defaults,c),this.$element=a(b),this.$element.is("select")&&this.addLanguages(),this.$element.is("span")&&this.displayLanguage(),this.$element.hasClass("bfh-selectbox")&&this.addBootstrapLanguages()};b.prototype={constructor:b,getLanguages:function(){var a,b,c;if(this.options.available){c=[],this.options.available=this.options.available.split(",");for(b in this.options.available)this.options.available.hasOwnProperty(b)&&(-1!==this.options.available[b].indexOf("_")?(a=this.options.available[b].split("_"),c[a[0]]={name:BFHLanguagesList[a[0]],country:a[1]}):c[this.options.available[b]]=BFHLanguagesList[this.options.available[b]]);return c}return BFHLanguagesList},addLanguages:function(){var a,b,c;a=this.options.language,b=this.getLanguages(),this.$element.html(""),this.options.blank===!0&&this.$element.append('');for(c in b)b.hasOwnProperty(c)&&(b[c].hasOwnProperty("name")?this.$element.append('"):this.$element.append('"));this.$element.val(a)},addBootstrapLanguages:function(){var a,b,c,d,e,f;d=this.options.language,a=this.$element.find('input[type="hidden"]'),b=this.$element.find(".bfh-selectbox-option"),c=this.$element.find("[role=option]"),e=this.getLanguages(),c.html(""),this.options.blank===!0&&c.append('
        • ');for(f in e)e.hasOwnProperty(f)&&(e[f].hasOwnProperty("name")?this.options.flags===!0?c.append('
        • '+e[f].name.toProperCase()+"
        • "):c.append('
        • '+e[f].name.toProperCase()+" ("+BFHCountriesList[e[f].country]+")
        • "):c.append('
        • '+e[f]+"
        • "));this.$element.val(d)},displayLanguage:function(){var a;a=this.options.language,-1!==a.indexOf("_")?(a=a.split("_"),this.options.flags===!0?this.$element.html(' '+BFHLanguagesList[a[0]].toProperCase()):this.$element.html(BFHLanguagesList[a[0]].toProperCase()+" ("+BFHCountriesList[a[1]]+")")):this.$element.html(BFHLanguagesList[a].toProperCase())}};var c=a.fn.bfhlanguages;a.fn.bfhlanguages=function(c){return this.each(function(){var d,e,f;d=a(this),e=d.data("bfhlanguages"),f="object"==typeof c&&c,e||d.data("bfhlanguages",e=new b(this,f)),"string"==typeof c&&e[c].call(d)})},a.fn.bfhlanguages.Constructor=b,a.fn.bfhlanguages.defaults={language:"",available:"",flags:!1,blank:!0},a.fn.bfhlanguages.noConflict=function(){return a.fn.bfhlanguages=c,this},a(document).ready(function(){a("form select.bfh-languages, span.bfh-languages, div.bfh-languages").each(function(){var b;b=a(this),b.hasClass("bfh-selectbox")&&b.bfhselectbox(b.data()),b.bfhlanguages(b.data())})}),String.prototype.toProperCase=function(){return this.replace(/\w\S*/g,function(a){return a.charAt(0).toUpperCase()+a.substr(1).toLowerCase()})}}(window.jQuery),+function(a){"use strict";var b=function(b,c){this.options=a.extend({},a.fn.bfhnumber.defaults,c),this.$element=a(b),this.initInput()};b.prototype={constructor:b,initInput:function(){this.options.buttons===!0&&(this.$element.wrap('
          '),this.$element.parent().append(''),this.$element.parent().append('')),this.$element.on("change.bfhnumber.data-api",b.prototype.change),this.options.keyboard===!0&&this.$element.on("keydown.bfhnumber.data-api",b.prototype.keydown),this.options.buttons===!0&&this.$element.parent().on("mousedown.bfhnumber.data-api",".inc",b.prototype.btninc).on("mousedown.bfhnumber.data-api",".dec",b.prototype.btndec),this.formatNumber()},keydown:function(b){var c;if(c=a(this).data("bfhnumber"),c.$element.is(".disabled")||void 0!==c.$element.attr("disabled"))return!0;switch(b.which){case 38:c.increment();break;case 40:c.decrement()}return!0},mouseup:function(a){var b,c,d;b=a.data.btn,c=b.$element.data("timer"),d=b.$element.data("interval"),clearTimeout(c),clearInterval(d)},btninc:function(){var c,d;return c=a(this).parent().find(".bfh-number").data("bfhnumber"),c.$element.is(".disabled")||void 0!==c.$element.attr("disabled")?!0:(c.increment(),d=setTimeout(function(){var a;a=setInterval(function(){c.increment()},80),c.$element.data("interval",a)},750),c.$element.data("timer",d),a(document).one("mouseup",{btn:c},b.prototype.mouseup),!0)},btndec:function(){var c,d;return c=a(this).parent().find(".bfh-number").data("bfhnumber"),c.$element.is(".disabled")||void 0!==c.$element.attr("disabled")?!0:(c.decrement(),d=setTimeout(function(){var a;a=setInterval(function(){c.decrement()},80),c.$element.data("interval",a)},750),c.$element.data("timer",d),a(document).one("mouseup",{btn:c},b.prototype.mouseup),!0)},change:function(){var b;return b=a(this).data("bfhnumber"),b.$element.is(".disabled")||void 0!==b.$element.attr("disabled")?!0:(b.formatNumber(),!0)},increment:function(){var a;a=this.getValue(),a+=1,this.$element.val(a).change()},decrement:function(){var a;a=this.getValue(),a-=1,this.$element.val(a).change()},getValue:function(){var a;return a=this.$element.val(),"-1"!==a&&(a=String(a).replace(/\D/g,"")),0===String(a).length&&(a=this.options.min),parseInt(a)},formatNumber:function(){var a,b,c,d;if(a=this.getValue(),a>this.options.max&&(a=this.options.wrap===!0?this.options.min:this.options.max),ad;d+=1)a="0"+a;a!==this.$element.val()&&this.$element.val(a)}};var c=a.fn.bfhnumber;a.fn.bfhnumber=function(c){return this.each(function(){var d,e,f;d=a(this),e=d.data("bfhnumber"),f="object"==typeof c&&c,e||d.data("bfhnumber",e=new b(this,f)),"string"==typeof c&&e[c].call(d)})},a.fn.bfhnumber.Constructor=b,a.fn.bfhnumber.defaults={min:0,max:9999,zeros:!1,keyboard:!0,buttons:!0,wrap:!1},a.fn.bfhnumber.noConflict=function(){return a.fn.bfhnumber=c,this},a(document).ready(function(){a('form input[type="text"].bfh-number, form input[type="number"].bfh-number').each(function(){var b;b=a(this),b.bfhnumber(b.data())})})}(window.jQuery),+function(a){"use strict";function b(a,b){var c,d,e,f;for(c="",b=String(b).replace(/\D/g,""),d=0,e=0;d'+a(this).html()+""}),this.$element.html(''+''+''+''+""+'
          '+'
          '+'
            '+"
          "+"
          "+"
          "),this.$element.find("[role=option]").html(b),this.options.filter===!0&&this.$element.find(".bfh-selectbox-options").prepend('
          '),this.$element.val(this.options.value),this.$element.on("click.bfhselectbox.data-api touchstart.bfhselectbox.data-api",d,e.prototype.toggle).on("keydown.bfhselectbox.data-api",d+", [role=option]",e.prototype.keydown).on("mouseenter.bfhselectbox.data-api","[role=option] > li > a",e.prototype.mouseenter).on("click.bfhselectbox.data-api","[role=option] > li > a",e.prototype.select).on("click.bfhselectbox.data-api",".bfh-selectbox-filter",function(){return!1}).on("propertychange.bfhselectbox.data-api change.bfhselectbox.data-api input.bfhselectbox.data-api paste.bfhselectbox.data-api",".bfh-selectbox-filter",e.prototype.filter)},toggle:function(d){var e,f,g;if(e=a(this),f=c(e),f.is(".disabled")||void 0!==f.attr("disabled"))return!0;if(g=f.hasClass("open"),b(),!g){if(f.trigger(d=a.Event("show.bfhselectbox")),d.isDefaultPrevented())return!0;f.toggleClass("open").trigger("shown.bfhselectbox").find('[role=option] > li > [data-option="'+f.val()+'"]').focus()}return!1},filter:function(){var b,d,e;b=a(this),d=c(b),e=a("[role=option] li a",d),e.hide().filter(function(){return-1!==a(this).text().toUpperCase().indexOf(b.val().toUpperCase())}).show()},keydown:function(b){var f,g,h,i,j;return/(38|40|27)/.test(b.keyCode)?(f=a(this),b.preventDefault(),b.stopPropagation(),h=c(f),i=h.hasClass("open"),!i||i&&27===b.keyCode?(27===b.which&&h.find(d).focus(),f.click()):(g=a("[role=option] li:not(.divider) a:visible",h),g.length?(a("body").off("mouseenter.bfh-selectbox.data-api","[role=option] > li > a",e.prototype.mouseenter),j=g.index(g.filter(":focus")),38===b.keyCode&&j>0&&(j-=1),40===b.keyCode&&j li > a",e.prototype.mouseenter),void 0):!0)):!0},mouseenter:function(){var b;b=a(this),b.focus()},select:function(d){var e,f;return e=a(this),d.preventDefault(),d.stopPropagation(),e.is(".disabled")||void 0!==e.attr("disabled")?!0:(f=c(e),f.val(e.data("option")),f.trigger("change.bfhselectbox"),b(),void 0)}};var f=a.fn.bfhselectbox;a.fn.bfhselectbox=function(b){return this.each(function(){var c,d,f;c=a(this),d=c.data("bfhselectbox"),f="object"==typeof b&&b,this.type="bfhselectbox",d||c.data("bfhselectbox",d=new e(this,f)),"string"==typeof b&&d[b].call(c)})},a.fn.bfhselectbox.Constructor=e,a.fn.bfhselectbox.defaults={icon:"caret",input:"form-control",name:"",value:"",filter:!1},a.fn.bfhselectbox.noConflict=function(){return a.fn.bfhselectbox=f,this};var g;a.valHooks.div&&(g=a.valHooks.div),a.valHooks.div={get:function(b){return a(b).hasClass("bfh-selectbox")?a(b).find('input[type="hidden"]').val():g?g.get(b):void 0},set:function(b,c){var d,e;if(a(b).hasClass("bfh-selectbox"))d=a(b),d.find("li a[data-option='"+c+"']").length>0?e=d.find("li a[data-option='"+c+"']").html():d.find("li a").length>0?e=d.find("li a").eq(0).html():(c="",e=""),d.find('input[type="hidden"]').val(c),d.find(".bfh-selectbox-option").html(e);else if(g)return g.set(b,c)}},a(document).ready(function(){a("div.bfh-selectbox").each(function(){var b;b=a(this),b.bfhselectbox(b.data())})}),a(document).on("click.bfhselectbox.data-api",b)}(window.jQuery),+function(a){"use strict";var b=function(b,c){this.options=a.extend({},a.fn.bfhslider.defaults,c),this.$element=a(b),this.initSlider()};b.prototype={constructor:b,initSlider:function(){""===this.options.value&&(this.options.value=this.options.min),this.$element.html(''+'
          '),this.$element.find('input[type="hidden"]').val(this.options.value),this.updateHandle(this.options.value),this.$element.on("mousedown.bfhslider.data-api",b.prototype.mouseDown)},updateHandle:function(a){var b,c,d,e;e=this.options.max-this.options.min,c=this.$element.width(),d=this.$element.position().left,b=Math.round((a-this.options.min)*(c-20)/e+d),this.$element.find(".bfh-slider-handle").css("left",b+"px"),this.$element.find(".bfh-slider-value").text(a)},updateVal:function(a){var b,c,d,e,f;return f=this.options.max-this.options.min,b=this.$element.width(),c=this.$element.offset().left,d=c+b,c>a&&(a=c),a+20>d&&(a=d),e=(a-c)/b,e=Math.ceil(e*f+this.options.min),e===this.$element.val()?!0:(this.$element.val(e),this.$element.trigger("change.bfhslider"),void 0)},mouseDown:function(){var c;return c=a(this),c.is(".disabled")||void 0!==c.attr("disabled")?!0:(a(document).on("mousemove.bfhslider.data-api",{slider:c},b.prototype.mouseMove).one("mouseup.bfhslider.data-api",{slider:c},b.prototype.mouseUp),void 0)},mouseMove:function(a){var b;b=a.data.slider,b.data("bfhslider").updateVal(a.pageX)},mouseUp:function(b){var c;c=b.data.slider,c.data("bfhslider").updateVal(b.pageX),a(document).off("mousemove.bfhslider.data-api")}};var c=a.fn.bfhslider;a.fn.bfhslider=function(c){return this.each(function(){var d,e,f;d=a(this),e=d.data("bfhslider"),f="object"==typeof c&&c,this.type="bfhslider",e||d.data("bfhslider",e=new b(this,f)),"string"==typeof c&&e[c].call(d)})},a.fn.bfhslider.Constructor=b,a.fn.bfhslider.defaults={name:"",value:"",min:0,max:100},a.fn.bfhslider.noConflict=function(){return a.fn.bfhslider=c,this};var d;a.valHooks.div&&(d=a.valHooks.div),a.valHooks.div={get:function(b){return a(b).hasClass("bfh-slider")?a(b).find('input[type="hidden"]').val():d?d.get(b):void 0},set:function(b,c){if(a(b).hasClass("bfh-slider"))a(b).find('input[type="hidden"]').val(c),a(b).data("bfhslider").updateHandle(c);else if(d)return d.set(b,c)}},a(document).ready(function(){a("div.bfh-slider").each(function(){var b;b=a(this),b.bfhslider(b.data())})})}(window.jQuery),+function(a){"use strict";var b=function(b,c){this.options=a.extend({},a.fn.bfhstates.defaults,c),this.$element=a(b),this.$element.is("select")&&this.addStates(),this.$element.hasClass("bfh-selectbox")&&this.addBootstrapStates(),this.$element.is("span")&&this.displayState()};b.prototype={constructor:b,addStates:function(){var b,c;b=this.options.country,""!==b&&(c=a(document).find("#"+b),0!==c.length&&(b=c.val(),c.on("change",{state:this},this.changeCountry))),this.loadStates(b)},loadStates:function(a){var b,c;b=this.options.state,this.$element.html(""),this.options.blank===!0&&this.$element.append('');for(c in BFHStatesList[a])BFHStatesList[a].hasOwnProperty(c)&&this.$element.append('");this.$element.val(b)},changeCountry:function(b){var c,d,e;c=a(this),d=b.data.state,e=c.val(),d.loadStates(e)},addBootstrapStates:function(){var b,c;b=this.options.country,""!==b&&(c=a(document).find("#"+b),0!==c.length&&(b=c.find('input[type="hidden"]').val(),c.on("change.bfhselectbox",{state:this},this.changeBootstrapCountry))),this.loadBootstrapStates(b)},loadBootstrapStates:function(a){var b,c,d,e,f,g;e=this.options.state,f="",b=this.$element.find('input[type="hidden"]'),c=this.$element.find(".bfh-selectbox-option"),d=this.$element.find("[role=option]"),d.html(""),this.options.blank===!0&&d.append('
        • ');for(g in BFHStatesList[a])BFHStatesList[a].hasOwnProperty(g)&&(d.append('
        • '+BFHStatesList[a][g].name+"
        • "),BFHStatesList[a][g].code===e&&(f=BFHStatesList[a][g].name));this.$element.val(e)},changeBootstrapCountry:function(b){var c,d,e;c=a(this),d=b.data.state,e=c.val(),d.loadBootstrapStates(e)},displayState:function(){var a,b,c,d;a=this.options.country,b=this.options.state,c="";for(d in BFHStatesList[a])if(BFHStatesList[a].hasOwnProperty(d)&&BFHStatesList[a][d].code===b){c=BFHStatesList[a][d].name;break}this.$element.html(c)}};var c=a.fn.bfhstates;a.fn.bfhstates=function(c){return this.each(function(){var d,e,f;d=a(this),e=d.data("bfhstates"),f="object"==typeof c&&c,e||d.data("bfhstates",e=new b(this,f)),"string"==typeof c&&e[c].call(d)})},a.fn.bfhstates.Constructor=b,a.fn.bfhstates.defaults={country:"",state:"",blank:!0},a.fn.bfhstates.noConflict=function(){return a.fn.bfhstates=c,this},a(document).ready(function(){a("form select.bfh-states, span.bfh-states, div.bfh-states").each(function(){var b;b=a(this),b.hasClass("bfh-selectbox")&&b.bfhselectbox(b.data()),b.bfhstates(b.data())})})}(window.jQuery),+function(a){"use strict";function b(a,b){return a=String(a),1===a.length&&(a="0"+a),b=String(b),1===b.length&&(b="0"+b),a+BFHTimePickerDelimiter+b}function c(){var b;a(e).each(function(c){return b=d(a(this)),b.hasClass("open")?(b.trigger(c=a.Event("hide.bfhtimepicker")),c.isDefaultPrevented()?!0:(b.removeClass("open").trigger("hidden.bfhtimepicker"),void 0)):!0})}function d(a){return a.closest(".bfh-timepicker")}var e="[data-toggle=bfh-timepicker]",f=function(b,c){this.options=a.extend({},a.fn.bfhtimepicker.defaults,c),this.$element=a(b),this.initPopover()};f.prototype={constructor:f,setTime:function(){var a,c,d,e,f,g,h;a=this.options.time,g="",h="",""===a||"now"===a||void 0===a?(c=new Date,e=c.getHours(),f=c.getMinutes(),"12h"===this.options.mode&&(e>12?(e-=12,g=" "+BFHTimePickerModes.pm,h="pm"):(g=" "+BFHTimePickerModes.am,h="am")),"now"===a&&this.$element.find('.bfh-timepicker-toggle > input[type="text"]').val(b(e,f)+g),this.$element.data("hour",e),this.$element.data("minute",f),this.$element.data("mode",h)):(d=String(a).split(BFHTimePickerDelimiter),e=d[0],f=d[1],"12h"===this.options.mode&&(d=String(f).split(" "),f=d[0],h=d[1]===BFHTimePickerModes.pm?"pm":"am"),this.$element.find('.bfh-timepicker-toggle > input[type="text"]').val(a),this.$element.data("hour",e),this.$element.data("minute",f),this.$element.data("mode",h))},initPopover:function(){var b,c,d,g,h;b="",c="",d="",""!==this.options.icon&&("right"===this.options.align?c='':b='',d="input-group"),g="",h="23","12h"===this.options.mode&&(g='
          '+'
          '+BFHTimePickerModes.am+"
          "+'
          '+BFHTimePickerModes.pm+"
          "+"
          ",h="11"),this.$element.html('
          '+b+''+c+"
          "+'
          '+''+""+""+'"+'"+'"+g+""+""+"
          '+''+"'+BFHTimePickerDelimiter+"'+''+"
          "+"
          "),this.$element.on("click.bfhtimepicker.data-api touchstart.bfhtimepicker.data-api",e,f.prototype.toggle).on("click.bfhtimepicker.data-api touchstart.bfhtimepicker.data-api",".bfh-timepicker-popover > table",function(){return!1}),this.$element.find(".bfh-number").each(function(){var b;b=a(this),b.bfhnumber(b.data()),b.on("change",f.prototype.change)}),this.$element.find(".bfh-selectbox").each(function(){var b;b=a(this),b.bfhselectbox(b.data()),b.on("change.bfhselectbox",f.prototype.change)}),this.setTime(),this.updatePopover()},updatePopover:function(){var a,b,c;a=this.$element.data("hour"),b=this.$element.data("minute"),c=this.$element.data("mode"),this.$element.find(".hour input[type=text]").val(a).change(),this.$element.find(".minute input[type=text]").val(b).change(),this.$element.find(".bfh-selectbox").val(c)},change:function(){var b,c,e,f;return b=a(this),c=d(b),e=c.data("bfhtimepicker"),e&&"undefined"!==e&&(f="","12h"===e.options.mode&&(f=" "+BFHTimePickerModes[c.find(".bfh-selectbox").val()]),c.find('.bfh-timepicker-toggle > input[type="text"]').val(c.find(".hour input[type=text]").val()+BFHTimePickerDelimiter+c.find(".minute input[type=text]").val()+f),c.trigger("change.bfhtimepicker")),!1},toggle:function(b){var e,f,g;if(e=a(this),f=d(e),f.is(".disabled")||void 0!==f.attr("disabled"))return!0;if(g=f.hasClass("open"),c(),!g){if(f.trigger(b=a.Event("show.bfhtimepicker")),b.isDefaultPrevented())return!0;f.toggleClass("open").trigger("shown.bfhtimepicker"),e.focus()}return!1}};var g=a.fn.bfhtimepicker;a.fn.bfhtimepicker=function(b){return this.each(function(){var c,d,e;c=a(this),d=c.data("bfhtimepicker"),e="object"==typeof b&&b,this.type="bfhtimepicker",d||c.data("bfhtimepicker",d=new f(this,e)),"string"==typeof b&&d[b].call(c)})},a.fn.bfhtimepicker.Constructor=f,a.fn.bfhtimepicker.defaults={icon:"glyphicon glyphicon-time",align:"left",input:"form-control",placeholder:"",name:"",time:"now",mode:"24h"},a.fn.bfhtimepicker.noConflict=function(){return a.fn.bfhtimepicker=g,this};var h;a.valHooks.div&&(h=a.valHooks.div),a.valHooks.div={get:function(b){return a(b).hasClass("bfh-timepicker")?a(b).find('.bfh-timepicker-toggle > input[type="text"]').val():h?h.get(b):void 0},set:function(b,c){var d;if(a(b).hasClass("bfh-timepicker"))d=a(b).data("bfhtimepicker"),d.options.time=c,d.setTime(),d.updatePopover();else if(h)return h.set(b,c)}},a(document).ready(function(){a("div.bfh-timepicker").each(function(){var b;b=a(this),b.bfhtimepicker(b.data())})}),a(document).on("click.bfhtimepicker.data-api",c)}(window.jQuery),+function(a){"use strict";var b=function(b,c){this.options=a.extend({},a.fn.bfhtimezones.defaults,c),this.$element=a(b),this.$element.is("select")&&this.addTimezones(),this.$element.hasClass("bfh-selectbox")&&this.addBootstrapTimezones()};b.prototype={constructor:b,addTimezones:function(){var b,c;b=this.options.country,""!==b&&(c=a(document).find("#"+b),0!==c.length&&(b=c.val(),c.on("change",{timezone:this},this.changeCountry))),this.loadTimezones(b)},loadTimezones:function(a){var b,c;b=this.options.timezone,this.$element.html(""),this.options.blank===!0&&this.$element.append('');for(c in BFHTimezonesList[a])BFHTimezonesList[a].hasOwnProperty(c)&&this.$element.append('");this.$element.val(b)},changeCountry:function(b){var c,d,e;c=a(this),d=b.data.timezone,e=c.val(),d.loadTimezones(e)},addBootstrapTimezones:function(){var b,c;b=this.options.country,""!==b&&(c=a(document).find("#"+b),0!==c.length&&(b=c.find('input[type="hidden"]').val(),c.on("change.bfhselectbox",{timezone:this},this.changeBootstrapCountry))),this.loadBootstrapTimezones(b)},loadBootstrapTimezones:function(a){var b,c,d,e,f;e=this.options.timezone,b=this.$element.find('input[type="hidden"]'),c=this.$element.find(".bfh-selectbox-option"),d=this.$element.find("[role=option]"),d.html(""),this.options.blank===!0&&d.append('
        • ');for(f in BFHTimezonesList[a])BFHTimezonesList[a].hasOwnProperty(f)&&d.append('
        • '+BFHTimezonesList[a][f]+"
        • ");this.$element.val(e)},changeBootstrapCountry:function(b){var c,d,e;c=a(this),d=b.data.timezone,e=c.val(),d.loadBootstrapTimezones(e)}};var c=a.fn.bfhtimezones;a.fn.bfhtimezones=function(c){return this.each(function(){var d,e,f;d=a(this),e=d.data("bfhtimezones"),f="object"==typeof c&&c,e||d.data("bfhtimezones",e=new b(this,f)),"string"==typeof c&&e[c].call(d)})},a.fn.bfhtimezones.Constructor=b,a.fn.bfhtimezones.defaults={country:"",timezone:"",blank:!0},a.fn.bfhtimezones.noConflict=function(){return a.fn.bfhtimezones=c,this},a(document).ready(function(){a("form select.bfh-timezones, div.bfh-timezones").each(function(){var b;b=a(this),b.hasClass("bfh-selectbox")&&b.bfhselectbox(b.data()),b.bfhtimezones(b.data())})})}(window.jQuery); -/** - * @preserve - * Project: Bootstrap Hover Dropdown - * Author: Cameron Spear - * Version: v2.2.1 - * Contributors: Mattia Larentis - * Dependencies: Bootstrap's Dropdown plugin, jQuery - * Description: A simple plugin to enable Bootstrap dropdowns to active on hover and provide a nice user experience. - * License: MIT - * Homepage: http://cameronspear.com/blog/bootstrap-dropdown-on-hover-plugin/ - */ -;(function ($, window, undefined) { - // outside the scope of the jQuery plugin to - // keep track of all dropdowns - var $allDropdowns = $(); - - // if instantlyCloseOthers is true, then it will instantly - // shut other nav items when a new one is hovered over - $.fn.dropdownHover = function (options) { - // don't do anything if touch is supported - // (plugin causes some issues on mobile) - if('ontouchstart' in document) return this; // don't want to affect chaining - - // the element we really care about - // is the dropdown-toggle's parent - $allDropdowns = $allDropdowns.add(this.parent()); - - return this.each(function () { - var $this = $(this), - $parent = $this.parent(), - defaults = { - delay: 500, - hoverDelay: 0, - instantlyCloseOthers: true - }, - data = { - delay: $(this).data('delay'), - hoverDelay: $(this).data('hover-delay'), - instantlyCloseOthers: $(this).data('close-others') - }, - showEvent = 'show.bs.dropdown', - hideEvent = 'hide.bs.dropdown', - // shownEvent = 'shown.bs.dropdown', - // hiddenEvent = 'hidden.bs.dropdown', - settings = $.extend(true, {}, defaults, options, data), - timeout, timeoutHover; - - $parent.hover(function (event) { - // so a neighbor can't open the dropdown - if(!$parent.hasClass('open') && !$this.is(event.target)) { - // stop this event, stop executing any code - // in this callback but continue to propagate - return true; - } - - openDropdown(event); - }, function () { - // clear timer for hover event - window.clearTimeout(timeoutHover) - timeout = window.setTimeout(function () { - $this.attr('aria-expanded', 'false'); - $parent.removeClass('open'); - $this.trigger(hideEvent); - }, settings.delay); - }); - - // this helps with button groups! - $this.hover(function (event) { - // this helps prevent a double event from firing. - // see https://github.com/CWSpear/bootstrap-hover-dropdown/issues/55 - if(!$parent.hasClass('open') && !$parent.is(event.target)) { - // stop this event, stop executing any code - // in this callback but continue to propagate - return true; - } - - openDropdown(event); - }); - - // handle submenus - $parent.find('.dropdown-submenu').each(function (){ - var $this = $(this); - var subTimeout; - $this.hover(function () { - window.clearTimeout(subTimeout); - $this.children('.dropdown-menu').show(); - // always close submenu siblings instantly - $this.siblings().children('.dropdown-menu').hide(); - }, function () { - var $submenu = $this.children('.dropdown-menu'); - subTimeout = window.setTimeout(function () { - $submenu.hide(); - }, settings.delay); - }); - }); - - function openDropdown(event) { - if($this.parents(".navbar").find(".navbar-toggle").is(":visible")) { - // If we're inside a navbar, don't do anything when the - // navbar is collapsed, as it makes the navbar pretty unusable. - return; - } - - // clear dropdown timeout here so it doesnt close before it should - window.clearTimeout(timeout); - // restart hover timer - window.clearTimeout(timeoutHover); - - // delay for hover event. - timeoutHover = window.setTimeout(function () { - $allDropdowns.find(':focus').blur(); - - if(settings.instantlyCloseOthers === true) - $allDropdowns.removeClass('open'); - - // clear timer for hover event - window.clearTimeout(timeoutHover); - $this.attr('aria-expanded', 'true'); - $parent.addClass('open'); - $this.trigger(showEvent); - }, settings.hoverDelay); - } - }); - }; - - $(document).ready(function () { - // apply dropdownHover to all elements with the data-hover="dropdown" attribute - $('[data-hover="dropdown"]').dropdownHover(); - }); -})(jQuery, window); - /* ============================================================= * bootstrap3-typeahead.js v4.0.2 * https://github.com/bassjobsen/Bootstrap-3-Typeahead @@ -10768,7 +10629,7 @@ if(!jQuery)throw new Error("Bootstrap Form Helpers requires jQuery");var BFHCoun })); /** - * matchesSelector v2.0.1 + * matchesSelector v2.0.2 * matchesSelector( element, '.selector' ) * MIT license */ @@ -10794,7 +10655,7 @@ if(!jQuery)throw new Error("Bootstrap Form Helpers requires jQuery");var BFHCoun 'use strict'; var matchesMethod = ( function() { - var ElemProto = Element.prototype; + var ElemProto = window.Element.prototype; // check for the standard method name first if ( ElemProto.matches ) { return 'matches'; @@ -10822,7 +10683,7 @@ if(!jQuery)throw new Error("Bootstrap Form Helpers requires jQuery");var BFHCoun })); /** - * Fizzy UI utils v2.0.3 + * Fizzy UI utils v2.0.4 * MIT license */ @@ -10883,7 +10744,8 @@ utils.makeArray = function( obj ) { if ( Array.isArray( obj ) ) { // use object if already an array ary = obj; - } else if ( obj && typeof obj.length == 'number' ) { + } else if ( obj && typeof obj == 'object' && + typeof obj.length == 'number' ) { // convert nodeList to array for ( var i=0; i < obj.length; i++ ) { ary.push( obj[i] ); @@ -13271,9 +13133,15 @@ return t=a?function(t){return t&&a(r(t))}:function(t){return t&&r(t)}}function e /* Given an array of various sizes of the same image and a container width, * return the best image. */ - var selectBest = function (containerWidth, imageSizes) { + var selectBest = function (containerWidth, containerHeight, imageSizes) { var devicePixelRatio = window.devicePixelRatio || 1; + var deviceOrientation = getDeviceOrientation(); + var windowOrientation = getWindowOrientation(); + var wrapperOrientation = (containerHeight > containerWidth) ? + 'portrait' : + (containerWidth > containerHeight ? 'landscape' : 'square'); + var lastAllowedImage = 0; var testWidth; @@ -13292,6 +13160,24 @@ return t=a?function(t){return t&&a(r(t))}:function(t){return t&&r(t)}}function e continue; } + if (image.deviceOrientation && image.deviceOrientation !== deviceOrientation) { + // We disallowed choosing this image for current device orientation, + // So skip this one. + continue; + } + + if (image.windowOrientation && image.windowOrientation !== deviceOrientation) { + // We disallowed choosing this image for current window orientation, + // So skip this one. + continue; + } + + if (image.orientation && image.orientation !== wrapperOrientation) { + // We disallowed choosing this image for current element's orientation, + // So skip this one. + continue; + } + // Mark this one as the last one we investigated // which does not violate device pixel ratio rules. // We may choose this one later if there's no match. @@ -13352,7 +13238,7 @@ return t=a?function(t){return t&&a(r(t))}:function(t){return t&&r(t)}}function e for (var i = 0; i < images.length; i++) { if ($.isArray(images[i])) { images[i] = widthInsertSort(images[i]); - var chosen = selectBest(containerWidth, images[i]); + var chosen = selectBest(containerWidth, containerHeight, images[i]); chosenImages.push(chosen); } else { // In case a new image was pushed in, process it: @@ -13855,8 +13741,8 @@ return t=a?function(t){return t&&a(r(t))}:function(t){return t&&r(t)}}function e , bgWidth = rootWidth , bgHeight = bgWidth / this.$itemWrapper.data('ratio') , evt = $.Event('backstretch.resize', { -              relatedTarget: this.$container[0] -            }) + relatedTarget: this.$container[0] + }) , bgOffset , alignX = this._currentImage.alignX === undefined ? this.options.alignX : this._currentImage.alignX , alignY = this._currentImage.alignY === undefined ? this.options.alignY : this._currentImage.alignY; @@ -13983,7 +13869,7 @@ return t=a?function(t){return t&&a(r(t))}:function(t){return t&&r(t)}}function e // Now we can clear the background on the element, to spare memory if (!that.options.bypassCss && !that.isBody) { - that.$container.css('background', 'none'); + that.$container.css('background-image', 'none'); } // Trigger the "after" and "show" events @@ -14151,7 +14037,7 @@ return t=a?function(t){return t&&a(r(t))}:function(t){return t&&r(t)}}function e }; /** - * Video Abstraction Layer + * Video Abstraction Layer * * Static methods: * > VideoWrapper.loadYoutubeAPI() -> Call in order to load the Youtube API. @@ -14510,6 +14396,36 @@ return t=a?function(t){return t&&a(r(t))}:function(t){return t&&r(t)}}function e }, 50); }; + var getDeviceOrientation = function () { + + if ('matchMedia' in window) { + if (window.matchMedia("(orientation: portrait)").matches) { + return 'portrait'; + } else if (window.matchMedia("(orientation: landscape)").matches) { + return 'landscape'; + } + } + + if (screen.height > screen.width) { + return 'portrait'; + } + + // Even square devices have orientation, + // but a desktop browser may be too old for `matchMedia`. + // Defaulting to `landscape` for the VERY rare case of a square desktop screen is good enough. + return 'landscape'; + }; + + var getWindowOrientation = function () { + if (window.innerHeight > window.innerWidth) { + return 'portrait'; + } + if (window.innerWidth > window.innerHeight) { + return 'landscape'; + } + + return 'square'; + }; /* SUPPORTS FIXED POSITION? * @@ -14712,4789 +14628,5400 @@ return t=a?function(t){return t&&a(r(t))}:function(t){return t&&r(t)}}function e } })(jQuery); -/* - * jQuery Plugin: Tokenizing Autocomplete Text Entry - * Version 1.6.1 - * - * Copyright (c) 2009 James Smith (http://loopj.com) - * Licensed jointly under the GPL and MIT licenses, - * choose which one suits your project best! - * +/*! + * jQuery Form Plugin + * version: 4.0.1 + * Requires jQuery v1.7 or later + * Copyright 2017 Kevin Morris + * Copyright 2006 M. Alsup + * Project repository: https://github.com/jquery-form/form + * Dual licensed under the MIT and LGPLv3 licenses. + * https://github.com/jquery-form/form#license */ -;(function ($) { +/*global ActiveXObject */ - // Default settings - var DEFAULT_SETTINGS = { - // Search settings - method: "GET", - queryParam: "q", - searchDelay: 300, - minChars: 1, - propertyToSearch: "name", - jsonContainer: null, - contentType: "json", - excludeCurrent: false, - excludeCurrentParameter: "x", +// AMD support +(function (factory) { + "use strict"; + if (typeof define === 'function' && define.amd) { + // using AMD; register as anon module + define(['jquery'], factory); + } if (typeof module !== 'undefined') { + factory(require('jquery')); + } else { + // no AMD; invoke directly + factory( (typeof(jQuery) != 'undefined') ? jQuery : window.Zepto ); + } +} - // Prepopulation settings - prePopulate: null, - processPrePopulate: false, +(function($) { +"use strict"; - // Display settings - hintText: "Type in a search term", - noResultsText: "No results", - searchingText: "Searching...", - deleteText: "×", - animateDropdown: true, - placeholder: null, - theme: null, - zindex: 999, - resultsLimit: null, +/* + Usage Note: + ----------- + Do not use both ajaxSubmit and ajaxForm on the same form. These + functions are mutually exclusive. Use ajaxSubmit if you want + to bind your own submit handler to the form. For example, + + $(document).ready(function() { + $('#myForm').on('submit', function(e) { + e.preventDefault(); // <-- important + $(this).ajaxSubmit({ + target: '#output' + }); + }); + }); - enableHTML: false, + Use ajaxForm when you want the plugin to manage all the event binding + for you. For example, - resultsFormatter: function(item) { - var string = item[this.propertyToSearch]; - return "
        • " + (this.enableHTML ? string : _escapeHTML(string)) + "
        • "; - }, + $(document).ready(function() { + $('#myForm').ajaxForm({ + target: '#output' + }); + }); - tokenFormatter: function(item) { - var string = item[this.propertyToSearch]; - return "
        • " + (this.enableHTML ? string : _escapeHTML(string)) + "

        • "; - }, + You can also use ajaxForm with delegation (requires jQuery v1.7+), so the + form does not have to exist when you invoke ajaxForm: - // Tokenization settings - tokenLimit: null, - tokenDelimiter: ",", - preventDuplicates: false, - tokenValue: "id", + $('#myForm').ajaxForm({ + delegation: true, + target: '#output' + }); - // Behavioral settings - allowFreeTagging: false, - allowTabOut: false, - autoSelectFirstResult: false, + When using ajaxForm, the ajaxSubmit function will be invoked for you + at the appropriate time. +*/ - // Callbacks - onResult: null, - onCachedResult: null, - onAdd: null, - onFreeTaggingAdd: null, - onDelete: null, - onReady: null, +/** + * Feature detection + */ +var feature = {}; +feature.fileapi = $("").get(0).files !== undefined; +feature.formdata = window.FormData !== undefined; + +var hasProp = !!$.fn.prop; + +// attr2 uses prop when it can but checks the return type for +// an expected string. This accounts for the case where a form +// contains inputs with names like "action" or "method"; in those +// cases "prop" returns the element +$.fn.attr2 = function() { + if ( ! hasProp ) { + return this.attr.apply(this, arguments); + } + var val = this.prop.apply(this, arguments); + if ( ( val && val.jquery ) || typeof val === 'string' ) { + return val; + } + return this.attr.apply(this, arguments); +}; - // Other settings - idPrefix: "token-input-", +/** + * ajaxSubmit() provides a mechanism for immediately submitting + * an HTML form using AJAX. + * + * @param object|string options jquery.form.js parameters or custom url for submission + * @param object data extraData + * @param string dataType ajax dataType + * @param function onSuccess ajax success callback function + */ +$.fn.ajaxSubmit = function(options, data, dataType, onSuccess) { + /*jshint scripturl:true */ - // Keep track if the input is currently in disabled mode - disabled: false - }; + // fast fail if nothing selected (http://dev.jquery.com/ticket/2752) + if (!this.length) { + log('ajaxSubmit: skipping submit process - no element selected'); + return this; + } - // Default classes to use when theming - var DEFAULT_CLASSES = { - tokenList: "token-input-list", - token: "token-input-token", - tokenReadOnly: "token-input-token-readonly", - tokenDelete: "token-input-delete-token", - selectedToken: "token-input-selected-token", - highlightedToken: "token-input-highlighted-token", - dropdown: "token-input-dropdown", - dropdownItem: "token-input-dropdown-item", - dropdownItem2: "token-input-dropdown-item2", - selectedDropdownItem: "token-input-selected-dropdown-item", - inputToken: "token-input-input-token", - focused: "token-input-focused", - disabled: "token-input-disabled" - }; + var method, action, url, $form = this; - // Input box position "enum" - var POSITION = { - BEFORE: 0, - AFTER: 1, - END: 2 - }; + if (typeof options == 'function') { + options = { success: options }; - // Keys "enum" - var KEY = { - BACKSPACE: 8, - TAB: 9, - ENTER: 13, - ESCAPE: 27, - SPACE: 32, - PAGE_UP: 33, - PAGE_DOWN: 34, - END: 35, - HOME: 36, - LEFT: 37, - UP: 38, - RIGHT: 39, - DOWN: 40, - NUMPAD_ENTER: 108, - COMMA: 188 - }; + } else if ( typeof options == 'string' || ( options === false && arguments.length > 0 ) ) { + options = { + 'url' : options, + 'data' : data, + 'dataType' : dataType + }; - var HTML_ESCAPES = { - '&': '&', - '<': '<', - '>': '>', - '"': '"', - "'": ''', - '/': '/' - }; + if(typeof onSuccess == 'function') + { + options.success = onSuccess; + } - var HTML_ESCAPE_CHARS = /[&<>"'\/]/g; + } else if ( options === undefined ) { + options = {}; + } - function coerceToString(val) { - return String((val === null || val === undefined) ? '' : val); - } + method = options.type || this.attr2('method'); + action = options.url || this.attr2('action'); - function _escapeHTML(text) { - return coerceToString(text).replace(HTML_ESCAPE_CHARS, function(match) { - return HTML_ESCAPES[match]; - }); - } + url = (typeof action === 'string') ? $.trim(action) : ''; + url = url || window.location.href || ''; + if (url) { + // clean url (don't include hash vaue) + url = (url.match(/^([^#]+)/)||[])[1]; + } - // Additional public (exposed) methods - var methods = { - init: function(url_or_data_or_function, options) { - var settings = $.extend({}, DEFAULT_SETTINGS, options || {}); + options = $.extend(true, { + url: url, + success: $.ajaxSettings.success, + type: method || $.ajaxSettings.type, + iframeSrc: /^https/i.test(window.location.href || '') ? 'javascript:false' : 'about:blank' + }, options); + + // hook for manipulating the form data before it is extracted; + // convenient for use with rich editors like tinyMCE or FCKEditor + var veto = {}; + this.trigger('form-pre-serialize', [this, options, veto]); + if (veto.veto) { + log('ajaxSubmit: submit vetoed via form-pre-serialize trigger'); + return this; + } - return this.each(function () { - $(this).data("settings", settings); - $(this).data("tokenInputObject", new $.TokenList(this, url_or_data_or_function, settings)); - }); - }, - clear: function() { - this.data("tokenInputObject").clear(); - return this; - }, - add: function(item) { - this.data("tokenInputObject").add(item); - return this; - }, - remove: function(item) { - this.data("tokenInputObject").remove(item); - return this; - }, - get: function() { - return this.data("tokenInputObject").getTokens(); - }, - toggleDisabled: function(disable) { - this.data("tokenInputObject").toggleDisabled(disable); - return this; - }, - setOptions: function(options){ - $(this).data("settings", $.extend({}, $(this).data("settings"), options || {})); - return this; - }, - destroy: function () { - if (this.data("tokenInputObject")) { - this.data("tokenInputObject").clear(); - var tmpInput = this; - var closest = this.parent(); - closest.empty(); - tmpInput.show(); - closest.append(tmpInput); - return tmpInput; - } - } - }; + // provide opportunity to alter form data before it is serialized + if (options.beforeSerialize && options.beforeSerialize(this, options) === false) { + log('ajaxSubmit: submit aborted via beforeSerialize callback'); + return this; + } - // Expose the .tokenInput function to jQuery as a plugin - $.fn.tokenInput = function (method) { - // Method calling and initialization logic - if(methods[method]) { - return methods[method].apply(this, Array.prototype.slice.call(arguments, 1)); - } else { - return methods.init.apply(this, arguments); - } - }; + var traditional = options.traditional; + if ( traditional === undefined ) { + traditional = $.ajaxSettings.traditional; + } - // TokenList class for each input - $.TokenList = function (input, url_or_data, settings) { - // - // Initialization - // + var elements = []; + var qx, a = this.formToArray(options.semantic, elements, options.filtering); + if (options.data) { + var optionsData = $.isFunction(options.data) ? options.data(a) : options.data; + options.extraData = optionsData; + qx = $.param(optionsData, traditional); + } - // Configure the data source - if (typeof(url_or_data) === "string" || typeof(url_or_data) === "function") { - // Set the url to query against - $(input).data("settings").url = url_or_data; + // give pre-submit callback an opportunity to abort the submit + if (options.beforeSubmit && options.beforeSubmit(a, this, options) === false) { + log('ajaxSubmit: submit aborted via beforeSubmit callback'); + return this; + } - // If the URL is a function, evaluate it here to do our initalization work - var url = computeURL(); + // fire vetoable 'validate' event + this.trigger('form-submit-validate', [a, this, options, veto]); + if (veto.veto) { + log('ajaxSubmit: submit vetoed via form-submit-validate trigger'); + return this; + } - // Make a smart guess about cross-domain if it wasn't explicitly specified - if ($(input).data("settings").crossDomain === undefined && typeof url === "string") { - if(url.indexOf("://") === -1) { - $(input).data("settings").crossDomain = false; - } else { - $(input).data("settings").crossDomain = (location.href.split(/\/+/g)[1] !== url.split(/\/+/g)[1]); - } - } - } else if (typeof(url_or_data) === "object") { - // Set the local data to search through - $(input).data("settings").local_data = url_or_data; - } + var q = $.param(a, traditional); + if (qx) { + q = ( q ? (q + '&' + qx) : qx ); + } + if (options.type.toUpperCase() == 'GET') { + options.url += (options.url.indexOf('?') >= 0 ? '&' : '?') + q; + options.data = null; // data is null for 'get' + } + else { + options.data = q; // data is the query string for 'post' + } - // Build class names - if($(input).data("settings").classes) { - // Use custom class names - $(input).data("settings").classes = $.extend({}, DEFAULT_CLASSES, $(input).data("settings").classes); - } else if($(input).data("settings").theme) { - // Use theme-suffixed default class names - $(input).data("settings").classes = {}; - $.each(DEFAULT_CLASSES, function(key, value) { - $(input).data("settings").classes[key] = value + "-" + $(input).data("settings").theme; - }); - } else { - $(input).data("settings").classes = DEFAULT_CLASSES; - } + var callbacks = []; + if (options.resetForm) { + callbacks.push(function() { $form.resetForm(); }); + } + if (options.clearForm) { + callbacks.push(function() { $form.clearForm(options.includeHidden); }); + } - // Save the tokens - var saved_tokens = []; + // perform a load on the target only if dataType is not provided + if (!options.dataType && options.target) { + var oldSuccess = options.success || function(){}; + callbacks.push(function(data) { + var fn = options.replaceTarget ? 'replaceWith' : 'html'; + $(options.target)[fn](data).each(oldSuccess, arguments); + }); + } + else if (options.success) { + if ($.isArray(options.success)) { + $.merge(callbacks, options.success); + } else { + callbacks.push(options.success); + } + } - // Keep track of the number of tokens in the list - var token_count = 0; + options.success = function(data, status, xhr) { // jQuery 1.4+ passes xhr as 3rd arg + var context = options.context || this ; // jQuery 1.4+ supports scope context + for (var i=0, max=callbacks.length; i < max; i++) { + callbacks[i].apply(context, [data, status, xhr || $form, $form]); + } + }; - // Basic cache to save on db hits - var cache = new $.TokenList.Cache(); + if (options.error) { + var oldError = options.error; + options.error = function(xhr, status, error) { + var context = options.context || this; + oldError.apply(context, [xhr, status, error, $form]); + }; + } - // Keep track of the timeout, old vals - var timeout; - var input_val; + if (options.complete) { + var oldComplete = options.complete; + options.complete = function(xhr, status) { + var context = options.context || this; + oldComplete.apply(context, [xhr, status, $form]); + }; + } - // Create a new text input an attach keyup events - var input_box = $("") - .css({ - outline: "none" - }) - .attr("id", $(input).data("settings").idPrefix + input.id) - .focus(function () { - if ($(input).data("settings").disabled) { - return false; - } else - if ($(input).data("settings").tokenLimit === null || $(input).data("settings").tokenLimit !== token_count) { - show_dropdown_hint(); - } - token_list.addClass($(input).data("settings").classes.focused); - }) - .blur(function () { - hide_dropdown(); + // are there files to upload? - if ($(input).data("settings").allowFreeTagging) { - add_freetagging_tokens(); - } + // [value] (issue #113), also see comment: + // https://github.com/malsup/form/commit/588306aedba1de01388032d5f42a60159eea9228#commitcomment-2180219 + var fileInputs = $('input[type=file]:enabled', this).filter(function() { return $(this).val() !== ''; }); - $(this).val(""); - token_list.removeClass($(input).data("settings").classes.focused); - }) - .bind("keyup keydown blur update", resize_input) - .keydown(function (event) { - var previous_token; - var next_token; + var hasFileInputs = fileInputs.length > 0; + var mp = 'multipart/form-data'; + var multipart = ($form.attr('enctype') == mp || $form.attr('encoding') == mp); - switch(event.keyCode) { - case KEY.LEFT: - case KEY.RIGHT: - case KEY.UP: - case KEY.DOWN: - if(this.value.length === 0) { - previous_token = input_token.prev(); - next_token = input_token.next(); + var fileAPI = feature.fileapi && feature.formdata; + log("fileAPI :" + fileAPI); + var shouldUseFrame = (hasFileInputs || multipart) && !fileAPI; - if((previous_token.length && previous_token.get(0) === selected_token) || (next_token.length && next_token.get(0) === selected_token)) { - // Check if there is a previous/next token and it is selected - if(event.keyCode === KEY.LEFT || event.keyCode === KEY.UP) { - deselect_token($(selected_token), POSITION.BEFORE); - } else { - deselect_token($(selected_token), POSITION.AFTER); - } - } else if((event.keyCode === KEY.LEFT || event.keyCode === KEY.UP) && previous_token.length) { - // We are moving left, select the previous token if it exists - select_token($(previous_token.get(0))); - } else if((event.keyCode === KEY.RIGHT || event.keyCode === KEY.DOWN) && next_token.length) { - // We are moving right, select the next token if it exists - select_token($(next_token.get(0))); - } - } else { - var dropdown_item = null; + var jqxhr; - if (event.keyCode === KEY.DOWN || event.keyCode === KEY.RIGHT) { - dropdown_item = $(dropdown).find('li').first(); + // options.iframe allows user to force iframe mode + // 06-NOV-09: now defaulting to iframe mode if file input is detected + if (options.iframe !== false && (options.iframe || shouldUseFrame)) { + // hack to fix Safari hang (thanks to Tim Molendijk for this) + // see: http://groups.google.com/group/jquery-dev/browse_thread/thread/36395b7ab510dd5d + if (options.closeKeepAlive) { + $.get(options.closeKeepAlive, function() { + jqxhr = fileUploadIframe(a); + }); + } + else { + jqxhr = fileUploadIframe(a); + } + } + else if ((hasFileInputs || multipart) && fileAPI) { + jqxhr = fileUploadXhr(a); + } + else { + jqxhr = $.ajax(options); + } - if (selected_dropdown_item) { - dropdown_item = $(selected_dropdown_item).next(); - } - } else { - dropdown_item = $(dropdown).find('li').last(); + $form.removeData('jqxhr').data('jqxhr', jqxhr); - if (selected_dropdown_item) { - dropdown_item = $(selected_dropdown_item).prev(); - } - } + // clear element array + for (var k=0; k < elements.length; k++) { + elements[k] = null; + } - select_dropdown_item(dropdown_item); - } + // fire 'notify' event + this.trigger('form-submit-notify', [this, options]); + return this; - break; + // utility fn for deep serialization + function deepSerialize(extraData){ + var serialized = $.param(extraData, options.traditional).split('&'); + var len = serialized.length; + var result = []; + var i, part; + for (i=0; i < len; i++) { + part = serialized[i].split('='); + // #278; use array instead of object storage, favoring array serializations + result.push([decodeURIComponent(part[0]), decodeURIComponent(part[1])]); + } + return result; + } - case KEY.BACKSPACE: - previous_token = input_token.prev(); + // XMLHttpRequest Level 2 file uploads (big hat tip to francois2metz) + function fileUploadXhr(a) { + var formdata = new FormData(); - if (this.value.length === 0) { - if (selected_token) { - delete_token($(selected_token)); - hidden_input.change(); - } else if(previous_token.length) { - select_token($(previous_token.get(0))); - } + for (var i=0; i < a.length; i++) { + formdata.append(a[i].name, a[i].value); + } - return false; - } else if($(this).val().length === 1) { - hide_dropdown(); - } else { - // set a timeout just long enough to let this function finish. - setTimeout(function(){ do_search(); }, 5); - } - break; + if (options.extraData) { + var serializedData = deepSerialize(options.extraData); + for (i=0; i < serializedData.length; i++) { + if (serializedData[i]) { + formdata.append(serializedData[i][0], serializedData[i][1]); + } + } + } - case KEY.TAB: - case KEY.ENTER: - case KEY.NUMPAD_ENTER: - case KEY.COMMA: - if(selected_dropdown_item) { - add_token($(selected_dropdown_item).data("tokeninput")); - hidden_input.change(); - } else { - if ($(input).data("settings").allowFreeTagging) { - if($(input).data("settings").allowTabOut && $(this).val() === "") { - return true; - } else { - add_freetagging_tokens(); - } - } else { - $(this).val(""); - if($(input).data("settings").allowTabOut) { - return true; - } - } - event.stopPropagation(); - event.preventDefault(); - } - return false; + options.data = null; - case KEY.ESCAPE: - hide_dropdown(); - return true; + var s = $.extend(true, {}, $.ajaxSettings, options, { + contentType: false, + processData: false, + cache: false, + type: method || 'POST' + }); - default: - if (String.fromCharCode(event.which)) { - // set a timeout just long enough to let this function finish. - setTimeout(function(){ do_search(); }, 5); - } - break; - } - }); + if (options.uploadProgress) { + // workaround because jqXHR does not expose upload property + s.xhr = function() { + var xhr = $.ajaxSettings.xhr(); + if (xhr.upload) { + xhr.upload.addEventListener('progress', function(event) { + var percent = 0; + var position = event.loaded || event.position; /*event.position is deprecated*/ + var total = event.total; + if (event.lengthComputable) { + percent = Math.ceil(position / total * 100); + } + options.uploadProgress(event, position, total, percent); + }, false); + } + return xhr; + }; + } - // Keep reference for placeholder - if (settings.placeholder) { - input_box.attr("placeholder", settings.placeholder); - } + s.data = null; + var beforeSend = s.beforeSend; + s.beforeSend = function(xhr, o) { + //Send FormData() provided by user + if (options.formData) { + o.data = options.formData; + } + else { + o.data = formdata; + } + if(beforeSend) { + beforeSend.call(this, xhr, o); + } + }; + return $.ajax(s); + } - // Keep a reference to the original input box - var hidden_input = $(input) - .hide() - .val("") - .focus(function () { - focus_with_timeout(input_box); - }) - .blur(function () { - input_box.blur(); + // private function for handling file uploads (hat tip to YAHOO!) + function fileUploadIframe(a) { + var form = $form[0], el, i, s, g, id, $io, io, xhr, sub, n, timedOut, timeoutHandle; + var deferred = $.Deferred(); - //return the object to this can be referenced in the callback functions. - return hidden_input; - }) - ; + // #341 + deferred.abort = function(status) { + xhr.abort(status); + }; - // Keep a reference to the selected token and dropdown item - var selected_token = null; - var selected_token_index = 0; - var selected_dropdown_item = null; + if (a) { + // ensure that every serialized input is still enabled + for (i=0; i < elements.length; i++) { + el = $(elements[i]); + if ( hasProp ) { + el.prop('disabled', false); + } + else { + el.removeAttr('disabled'); + } + } + } - // The list to store the token items in - var token_list = $("
          diff --git a/views/config_providers.mako b/views/config_providers.mako index cf492995da..76cfc6a17f 100644 --- a/views/config_providers.mako +++ b/views/config_providers.mako @@ -84,7 +84,7 @@ $('#config-components').tabs(); ${('!', '')[bool(cur_provider.get_id() not in app.BROKEN_PROVIDERS.split(','))]} - + @@ -161,10 +161,10 @@ $('#config-components').tabs(); % if hasattr(cur_newznab_provider, 'enable_manualsearch'):
          @@ -252,10 +252,10 @@ $('#config-components').tabs(); % if hasattr(cur_nzb_provider, 'enable_manualsearch'):
          @@ -531,10 +531,10 @@ $('#config-components').tabs(); % if hasattr(cur_torrent_provider, 'enable_manualsearch'):
          @@ -716,7 +716,7 @@ $('#config-components').tabs();
          -
          +
          @@ -637,6 +636,21 @@
          +
          + +
          @@ -94,9 +94,8 @@ - +

          ${"Season " + str(epResult["season"]) if int(epResult["season"]) > 0 else "Specials"} - % if not any([i for i in sql_results if epResult['season'] == i['season'] and int(i['status']) == 1]): search % endif @@ -139,13 +138,12 @@ - +

          ${"Season " + str(epResult["season"]) if int(epResult["season"]) else "Specials"} % if not any([i for i in sql_results if epResult['season'] == i['season'] and int(i['status']) == 1]): search % endif

          -
          % if not app.DISPLAY_ALL_SEASONS: @@ -183,14 +181,14 @@ <% cur_season = int(epResult["season"]) %> % endif - + % if int(epResult["status"]) != UNAIRED: % endif - ${( - ${( - + ${( + ${( + <% text = str(epResult['episode']) if epLoc != '' and epLoc is not None: @@ -202,8 +200,8 @@ %> ${text} - ${epResult["absolute_number"]} - + ${epResult["absolute_number"]} + - + - + % if epResult["description"] != "" and epResult["description"] is not None: % else: @@ -235,13 +233,13 @@ % endif ${epResult["name"]} - ${epLoc if Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED] else ''} - + ${epLoc if Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED] else ''} + % if epResult["file_size"] and Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED]: ${pretty_file_size(epResult["file_size"])} % endif - + % if int(epResult['airdate']) != 1: ## Lets do this exactly like ComingEpisodes and History ## Avoid issues with dateutil's _isdst on Windows but still provide air dates @@ -254,7 +252,7 @@ Never % endif - + % if app.DOWNLOAD_URL and epResult['location'] and Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED]: <% filename = epResult['location'] @@ -266,7 +264,7 @@ Download % endif - + % for flag in (epResult["subtitles"] or '').split(','): % if flag.strip() and Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED]: % if flag != 'und': @@ -281,11 +279,11 @@ <% cur_status, cur_quality = Quality.split_composite_status(int(epResult["status"])) %> % if cur_quality != Quality.NONE: - ${statusStrings[cur_status]} ${renderQualityPill(cur_quality)} + ${statusStrings[cur_status]} ${renderQualityPill(cur_quality)} % else: - ${statusStrings[cur_status]} + ${statusStrings[cur_status]} % endif - + % if int(epResult["season"]) != 0: % if (int(epResult["status"]) in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + Quality.DOWNLOADED ) and app.USE_FAILED_DOWNLOADS: retry @@ -302,11 +300,13 @@ % endfor + % if sql_results: Season contains ${epCount} episodes with total filesize: ${pretty_file_size(epSize)} + % endif - +
          diff --git a/views/editShow.mako b/views/editShow.mako index dcf79058a5..bef59044c4 100644 --- a/views/editShow.mako +++ b/views/editShow.mako @@ -20,8 +20,8 @@ % endif <%block name="content"> + - % if not header is UNDEFINED:

          ${header}

          % else: diff --git a/views/errorlogs.mako b/views/errorlogs.mako index 0c028d3c14..c1352fd27d 100644 --- a/views/errorlogs.mako +++ b/views/errorlogs.mako @@ -13,7 +13,9 @@ pre { <%block name="content"> + <% + from mako.filters import html_escape if logLevel == logger.WARNING: errors = classes.WarningViewer.errors title = 'WARNING logs' @@ -33,14 +35,12 @@ pre {
          -
          +
           % if errors:
          -% for logline in errors[:500]:
          -${logline}
          -% endfor
          +${'\n'.join([html_escape(logline) for logline in errors[:500]])}
           % else:
          -There are no events to display.
          +There are no events to display.
           % endif
           
          diff --git a/views/history.mako b/views/history.mako index 28241282b0..261311cc11 100644 --- a/views/history.mako +++ b/views/history.mako @@ -16,7 +16,8 @@ <%block name="content"> <%namespace file="/inc_defs.mako" import="renderQualityPill"/> - + +
          % if not header is UNDEFINED: @@ -77,13 +78,13 @@ % for hItem in historyResults: <% composite = Quality.split_composite_status(int(hItem.action)) %> - + <% airDate = sbdatetime.sbfdatetime(datetime.strptime(str(hItem.date), History.date_format), show_seconds=True) %> <% isoDate = datetime.strptime(str(hItem.date), History.date_format).isoformat('T') %> - ${hItem.show_name} - ${"S%02i" % int(hItem.season)}${"E%02i" % int(hItem.episode)} ${'Proper' if hItem.proper_tags else ''} - + ${hItem.show_name} - ${"S%02i" % int(hItem.season)}${"E%02i" % int(hItem.episode)} ${'Proper' if hItem.proper_tags else ''} + % if composite.status == SUBTITLED: % endif @@ -95,7 +96,7 @@ % endif - + % if composite.status in [DOWNLOADED, ARCHIVED]: % if hItem.provider != "-1": ${hItem.provider} @@ -118,7 +119,7 @@ % endif ${composite.quality} - ${renderQualityPill(composite.quality)} + ${renderQualityPill(composite.quality)} % endfor @@ -145,16 +146,16 @@ % for hItem in compactResults: - + <% airDate = sbdatetime.sbfdatetime(datetime.strptime(str(hItem.actions[0].date), History.date_format), show_seconds=True) %> <% isoDate = datetime.strptime(str(hItem.actions[0].date), History.date_format).isoformat('T') %> - + <% proper_tags = [action.proper_tags for action in hItem.actions if action.proper_tags] %> ${hItem.show_name} - ${"S%02i" % int(hItem.index.season)}${"E%02i" % int(hItem.index.episode)} ${'Proper' if proper_tags else ''} - + % for cur_action in sorted(hItem.actions, key=lambda x: x.date): <% composite = Quality.split_composite_status(int(cur_action.action)) %> % if composite.status == SNATCHED: @@ -176,7 +177,7 @@ % endif % endfor - + % for cur_action in sorted(hItem.actions): <% composite = Quality.split_composite_status(int(cur_action.action)) %> % if composite.status in [DOWNLOADED, ARCHIVED]: @@ -189,7 +190,7 @@ % endfor % if app.USE_SUBTITLES: - + % for cur_action in sorted(hItem.actions): <% composite = Quality.split_composite_status(int(cur_action.action)) %> % if composite.status == SUBTITLED: @@ -201,7 +202,7 @@ % endfor % endif - ${renderQualityPill(composite.quality)} + ${renderQualityPill(composite.quality)} % endfor diff --git a/views/home.mako b/views/home.mako index cd2988a181..dcd5608285 100644 --- a/views/home.mako +++ b/views/home.mako @@ -12,7 +12,11 @@ <%block name="content"> - +<%! + random_show = choice(app.showList) if app.showList else None +%> + +
          @@ -21,20 +25,19 @@
          Direction: -
          Sort By: - +
          @@ -53,7 +56,18 @@ % else:

          ${title}

          % endif +
          +
          + +
          +
          +
          +
          % if app.HOME_LAYOUT != 'poster': @@ -66,7 +80,7 @@   % endif Layout: - % for curIndexer in indexerApi().indexers.iteritems(): - + % for curIndexer in indexerApi().indexers.items(): + % endfor diff --git a/views/inc_qualityChooser.mako b/views/inc_qualityChooser.mako index 9508d1732f..9d9cb5e2fd 100644 --- a/views/inc_qualityChooser.mako +++ b/views/inc_qualityChooser.mako @@ -39,17 +39,23 @@ selected = None
          -
          +
          Quality setting explanation:
          -
          This will download any of these qualities and then stops searching:
          -
          Downloads any of these qualities:
          -
          But it will stop searching when one of these is downloaded:
          +
          This will download any of these qualities and then stops searching:
          +
          Downloads any of these qualities:
          +
          But it will stop searching when one of these is downloaded:
          -
          +
          diff --git a/views/layouts/main.mako b/views/layouts/main.mako index c6c4bb40cb..1463ef1d5f 100644 --- a/views/layouts/main.mako +++ b/views/layouts/main.mako @@ -38,6 +38,7 @@ } + @@ -62,15 +63,11 @@ <%include file="/partials/footer.mako" />
          - + - - - - - + @@ -111,11 +108,11 @@ + - <%block name="scripts" /> diff --git a/views/manage.mako b/views/manage.mako index 5d50117f66..f825ecb6c2 100644 --- a/views/manage.mako +++ b/views/manage.mako @@ -43,6 +43,7 @@ Scene Anime Season folders + DVD Order Paused Subtitle Default Ep Status @@ -61,7 +62,7 @@ - + @@ -86,25 +87,26 @@ curRemove = "" %> - - ${cur_show.name} - ${renderQualityPill(cur_show.quality, showTitle=True)} - N', 'yes16.png - N', 'yes16.png - N', 'yes16.png - N', 'yes16.png - N', 'yes16.png - N', 'yes16.png - ${statusStrings[cur_show.default_ep_status]} - ${cur_show.status} - ${curUpdate} - ${curRefresh} - ${curRename} + + ${cur_show.name} + ${renderQualityPill(cur_show.quality, showTitle=True)} + N', 'yes16.png + N', 'yes16.png + N', 'yes16.png + N', 'yes16.png + N', 'yes16.png + N', 'yes16.png + N', 'yes16.png + ${statusStrings[cur_show.default_ep_status]} + ${cur_show.status} + ${curUpdate} + ${curRefresh} + ${curRename} % if app.USE_SUBTITLES: - ${curSubtitle} + ${curSubtitle} % endif - ${curDelete} - ${curRemove} + ${curDelete} + ${curRemove} % endfor diff --git a/views/manage_backlogOverview.mako b/views/manage_backlogOverview.mako index 7a9b430dee..f5ec1eed12 100644 --- a/views/manage_backlogOverview.mako +++ b/views/manage_backlogOverview.mako @@ -1,10 +1,8 @@ <%inherit file="/layouts/main.mako"/> <%! from medusa import app - import datetime - from medusa.common import ARCHIVED, DOWNLOADED,Overview, Quality, qualityPresets, statusStrings - from medusa.helper.common import episode_num - from medusa import sbdatetime, network_timezones + from medusa.common import ARCHIVED, DOWNLOADED, Overview, Quality, qualityPresets, statusStrings + from medusa import sbdatetime %> <%block name="scripts"> <%block name="content"> -
          - % if not header is UNDEFINED: -

          ${header}

          - % else: -

          ${title}

          - % endif -
          -
          +
          +
          +% if not header is UNDEFINED: +

          ${header}

          +% else: +

          ${title}

          +% endif +
          +
          +
          +
          +
          @@ -54,7 +58,6 @@ % endfor
          - <%include file="subtitle_modal.mako"/>
          @@ -70,8 +73,14 @@
          +
          +
          +

          -

          Releases waiting minimum ratio

          +
          +
          +

          Releases waiting minimum ratio

          +
          @@ -101,5 +110,7 @@ % endfor
          -
          +
          +
          +
          diff --git a/views/partials/footer.mako b/views/partials/footer.mako index d8480f9adb..f5bf0d27f7 100644 --- a/views/partials/footer.mako +++ b/views/partials/footer.mako @@ -60,3 +60,8 @@ % endif +
          + + + +
          diff --git a/views/partials/home/banner.mako b/views/partials/home/banner.mako index e91f701d04..3227866c2d 100644 --- a/views/partials/home/banner.mako +++ b/views/partials/home/banner.mako @@ -139,13 +139,13 @@ ${cur_show.name} % if cur_show.network: - ${cur_show.network} + ${cur_show.network} ${cur_show.network} % else: No Network @@ -158,7 +158,12 @@ [imdb] % endif - + % if cur_show.externals.get('trakt_id'): + + [trakt] + + % endif + ${indexerApi(cur_show.indexer).name} @@ -175,15 +180,7 @@ ${('No', 'Yes')[bool(paused)]} - <% - display_status = cur_show.status - if None is not display_status: - if re.search(r'(?i)(?:new|returning)\s*series', cur_show.status): - display_status = 'Continuing' - elif re.search('(?i)(?:nded)', cur_show.status): - display_status = 'Ended' - %> - ${display_status} + ${cur_show.status} <% have_xem = bool(get_xem_numbering_for_show(cur_show.indexerid, cur_show.indexer, refresh_data=False)) %> diff --git a/views/partials/home/poster.mako b/views/partials/home/poster.mako index c6d81471e4..098e3b6353 100644 --- a/views/partials/home/poster.mako +++ b/views/partials/home/poster.mako @@ -84,7 +84,7 @@
          - +