chore: refactor code quality issues

This commit is contained in:
Aksh Gupta 2021-03-10 13:07:43 +05:30
parent debd382837
commit 79858975a9
13 changed files with 19 additions and 18 deletions

View file

@ -729,10 +729,10 @@ class Results(object):
def _get_indexed_flex_attrs(self):
""" Index flexible attributes by the entity id they belong to
"""
flex_values = dict()
flex_values = {}
for row in self.flex_rows:
if row['entity_id'] not in flex_values:
flex_values[row['entity_id']] = dict()
flex_values[row['entity_id']] = {}
flex_values[row['entity_id']][row['key']] = row['value']

View file

@ -187,7 +187,7 @@ class ImportSession(object):
self.logger = self._setup_logging(loghandler)
self.paths = paths
self.query = query
self._is_resuming = dict()
self._is_resuming = {}
self._merged_items = set()
self._merged_dirs = set()

View file

@ -791,7 +791,7 @@ def _store_dict(option, opt_str, value, parser):
if option_values is None:
# This is the first supplied ``key=value`` pair of option.
# Initialize empty dictionary and get a reference to it.
setattr(parser.values, dest, dict())
setattr(parser.values, dest, {})
option_values = getattr(parser.values, dest)
try:

View file

@ -21,6 +21,7 @@ use of the wide range of MPD clients.
from __future__ import division, absolute_import, print_function
import re
import sys
from string import Template
import traceback
import random
@ -334,7 +335,7 @@ class BaseServer(object):
def cmd_kill(self, conn):
"""Exits the server process."""
exit(0)
sys.exit(0)
def cmd_close(self, conn):
"""Closes the connection."""

View file

@ -33,7 +33,7 @@ from beetsplug.info import make_key_filter, library_data, tag_data
class ExportEncoder(json.JSONEncoder):
"""Deals with dates because JSON doesn't have a standard"""
def default(self, o):
if isinstance(o, datetime) or isinstance(o, date):
if isinstance(o, (datetime, date)):
return o.isoformat()
return json.JSONEncoder.default(self, o)

View file

@ -504,7 +504,7 @@ class FanartTV(RemoteArtSource):
matches = []
# can there be more than one releasegroupid per response?
for mbid, art in data.get(u'albums', dict()).items():
for mbid, art in data.get(u'albums', {}).items():
# there might be more art referenced, e.g. cdart, and an albumcover
# might not be present, even if the request was successful
if album.mb_releasegroupid == mbid and u'albumcover' in art:

View file

@ -110,7 +110,7 @@ class FishPlugin(BeetsPlugin):
# Collect commands, their aliases, and their help text
cmd_names_help = []
for cmd in beetcmds:
names = [alias for alias in cmd.aliases]
names = list(cmd.aliases)
names.append(cmd.name)
for name in names:
cmd_names_help.append((name, cmd.help))
@ -238,7 +238,7 @@ def get_all_commands(beetcmds):
# Formatting for Fish to complete command options
word = ""
for cmd in beetcmds:
names = [alias for alias in cmd.aliases]
names = list(cmd.aliases)
names.append(cmd.name)
for name in names:
name = _escape(name)

View file

@ -27,7 +27,7 @@ class ImportAddedPlugin(BeetsPlugin):
# album.path for old albums that were replaced by a reimported album
self.replaced_album_paths = None
# item path in the library to the mtime of the source file
self.item_mtime = dict()
self.item_mtime = {}
register = self.register_listener
register('import_task_created', self.check_config)

View file

@ -235,7 +235,7 @@ def make_key_filter(include):
matchers.append(re.compile(key + '$'))
def filter_(data):
filtered = dict()
filtered = {}
for key, value in data.items():
if any([m.match(key) for m in matchers]):
filtered[key] = value

View file

@ -123,7 +123,7 @@ class MBSyncPlugin(BeetsPlugin):
# Map release track and recording MBIDs to their information.
# Recordings can appear multiple times on a release, so each MBID
# maps to a list of TrackInfo objects.
releasetrack_index = dict()
releasetrack_index = {}
track_index = defaultdict(list)
for track_info in album_info.tracks:
releasetrack_index[track_info.release_track_id] = track_info

View file

@ -216,7 +216,7 @@ class MissingPlugin(BeetsPlugin):
"""Query MusicBrainz to determine items missing from `album`.
"""
item_mbids = [x.mb_trackid for x in album.items()]
if len([i for i in album.items()]) < album.albumtotal:
if len(list(album.items())) < album.albumtotal:
# fetch missing items
# TODO: Implement caching that without breaking other stuff
album_info = hooks.album_for_mbid(album.mb_albumid)

View file

@ -1139,7 +1139,7 @@ class ReplayGainPlugin(BeetsPlugin):
tag_vals = self.tag_specific_values(album.items())
store_track_gain, store_album_gain, target_level, peak = tag_vals
discs = dict()
discs = {}
if self.per_disc:
for item in album.items():
if discs.get(item.disc) is None:
@ -1172,7 +1172,7 @@ class ReplayGainPlugin(BeetsPlugin):
self._apply(
self.backend_instance.compute_album_gain, args=(),
kwds={
"items": [i for i in items],
"items": list(items),
"target_level": target_level,
"peak": peak
},
@ -1288,7 +1288,7 @@ class ReplayGainPlugin(BeetsPlugin):
try:
self._log.info('interrupted')
self.terminate_pool()
exit(0)
sys.exit(0)
except SystemExit:
# Silence raised SystemExit ~ exit(0)
pass

View file

@ -148,7 +148,7 @@ class SubsonicPlaylistPlugin(BeetsPlugin):
def send(self, endpoint, params=None):
if params is None:
params = dict()
params = {}
a, b = self.generate_token()
params['u'] = self.config['username']
params['t'] = a
@ -163,7 +163,7 @@ class SubsonicPlaylistPlugin(BeetsPlugin):
return resp
def get_playlists(self, ids):
output = dict()
output = {}
for playlist_id in ids:
name, tracks = self.get_playlist(playlist_id)
for track in tracks: