mirror of
https://github.com/SlavikMIPT/tgcloud.git
synced 2025-02-12 11:12:09 +00:00
Refactored by Sourcery
This commit is contained in:
parent
15a44135e9
commit
f2e38ff496
4 changed files with 72 additions and 71 deletions
|
@ -1025,25 +1025,26 @@ class DedupFS(fuse.Fuse): # {{{1
|
|||
self.__report_timings()
|
||||
|
||||
def __report_timings(self): # {{{3
|
||||
if self.logger.isEnabledFor(logging.DEBUG):
|
||||
timings = [(self.time_spent_traversing_tree, 'Traversing the tree'),
|
||||
(self.time_spent_caching_nodes, 'Caching tree nodes'),
|
||||
(self.time_spent_interning, 'Interning path components'),
|
||||
(self.time_spent_writing_blocks, 'Writing data blocks'),
|
||||
(self.time_spent_hashing, 'Hashing data blocks'),
|
||||
(self.time_spent_querying_tree, 'Querying the tree')]
|
||||
maxdescwidth = max([len(l) for t, l in timings]) + 3
|
||||
timings.sort(reverse=True)
|
||||
uptime = time.time() - self.fs_mounted_at
|
||||
printed_heading = False
|
||||
for timespan, description in timings:
|
||||
percentage = timespan / (uptime / 100)
|
||||
if percentage >= 1:
|
||||
if not printed_heading:
|
||||
self.logger.debug("Cumulative timings of slowest operations:")
|
||||
printed_heading = True
|
||||
self.logger.debug(
|
||||
" - %-*s%s (%i%%)" % (maxdescwidth, description + ':', format_timespan(timespan), percentage))
|
||||
if not self.logger.isEnabledFor(logging.DEBUG):
|
||||
return
|
||||
timings = [(self.time_spent_traversing_tree, 'Traversing the tree'),
|
||||
(self.time_spent_caching_nodes, 'Caching tree nodes'),
|
||||
(self.time_spent_interning, 'Interning path components'),
|
||||
(self.time_spent_writing_blocks, 'Writing data blocks'),
|
||||
(self.time_spent_hashing, 'Hashing data blocks'),
|
||||
(self.time_spent_querying_tree, 'Querying the tree')]
|
||||
maxdescwidth = max(len(l) for t, l in timings) + 3
|
||||
timings.sort(reverse=True)
|
||||
uptime = time.time() - self.fs_mounted_at
|
||||
printed_heading = False
|
||||
for timespan, description in timings:
|
||||
percentage = timespan / (uptime / 100)
|
||||
if percentage >= 1:
|
||||
if not printed_heading:
|
||||
self.logger.debug("Cumulative timings of slowest operations:")
|
||||
printed_heading = True
|
||||
self.logger.debug(
|
||||
" - %-*s%s (%i%%)" % (maxdescwidth, description + ':', format_timespan(timespan), percentage))
|
||||
|
||||
def report_disk_usage(self): # {{{3
|
||||
disk_usage = self.__fetchval('PRAGMA page_size') * self.__fetchval('PRAGMA page_count')
|
||||
|
@ -1063,7 +1064,7 @@ class DedupFS(fuse.Fuse): # {{{1
|
|||
self.memory_usage = memory_usage
|
||||
|
||||
def __report_throughput(self, nbytes=None, nseconds=None, label=None): # {{{3
|
||||
if nbytes == None:
|
||||
if nbytes is None:
|
||||
self.bytes_read, self.time_spent_reading = \
|
||||
self.__report_throughput(self.bytes_read, self.time_spent_reading, "read")
|
||||
self.bytes_written, self.time_spent_writing = \
|
||||
|
@ -1078,6 +1079,9 @@ class DedupFS(fuse.Fuse): # {{{1
|
|||
return nbytes, nseconds
|
||||
|
||||
def __report_top_blocks(self): # {{{3
|
||||
if not self.logger.isEnabledFor(logging.DEBUG):
|
||||
return
|
||||
printed_header = False
|
||||
query = """
|
||||
SELECT * FROM (
|
||||
SELECT *, COUNT(*) AS "count" FROM "index"
|
||||
|
@ -1086,21 +1090,19 @@ class DedupFS(fuse.Fuse): # {{{1
|
|||
"count" > 1 AND
|
||||
hash_id = hashes.id
|
||||
LIMIT 10 """
|
||||
if self.logger.isEnabledFor(logging.DEBUG):
|
||||
printed_header = False
|
||||
for row in self.conn.execute(query):
|
||||
if not printed_header:
|
||||
self.logger.debug("A listing of the most used blocks follows:")
|
||||
printed_header = True
|
||||
msg = "Block #%s of %s has been used %i times: %r"
|
||||
preview = row['value']
|
||||
max_length = 60
|
||||
if len(preview) < max_length:
|
||||
preview = str(preview)
|
||||
else:
|
||||
preview = preview[0: max_length] + '...'
|
||||
nbytes = format_size(len(row['value']))
|
||||
self.logger.debug(msg, row['hash_id'], nbytes, row['count'], preview)
|
||||
for row in self.conn.execute(query):
|
||||
if not printed_header:
|
||||
self.logger.debug("A listing of the most used blocks follows:")
|
||||
printed_header = True
|
||||
msg = "Block #%s of %s has been used %i times: %r"
|
||||
preview = row['value']
|
||||
max_length = 60
|
||||
if len(preview) < max_length:
|
||||
preview = str(preview)
|
||||
else:
|
||||
preview = preview[0: max_length] + '...'
|
||||
nbytes = format_size(len(row['value']))
|
||||
self.logger.debug(msg, row['hash_id'], nbytes, row['count'], preview)
|
||||
|
||||
def __gc_hook(self, nested=False): # {{{3
|
||||
# Don't collect any garbage for nested calls.
|
||||
|
@ -1116,19 +1118,21 @@ class DedupFS(fuse.Fuse): # {{{1
|
|||
self.gc_hook_last_run = time.time()
|
||||
|
||||
def __collect_garbage(self): # {{{3
|
||||
if self.gc_enabled and not self.read_only:
|
||||
start_time = time.time()
|
||||
self.logger.info("Performing garbage collection (this might take a while) ..")
|
||||
self.should_vacuum = False
|
||||
for method in self.__collect_strings, self.__collect_inodes, \
|
||||
self.__collect_indices, self.__collect_blocks, self.__vacuum_metastore:
|
||||
sub_start_time = time.time()
|
||||
msg = method()
|
||||
if msg:
|
||||
elapsed_time = time.time() - sub_start_time
|
||||
self.logger.info(msg, format_timespan(elapsed_time))
|
||||
elapsed_time = time.time() - start_time
|
||||
self.logger.info("Finished garbage collection in %s.", format_timespan(elapsed_time))
|
||||
if not self.gc_enabled or self.read_only:
|
||||
return
|
||||
|
||||
start_time = time.time()
|
||||
self.logger.info("Performing garbage collection (this might take a while) ..")
|
||||
self.should_vacuum = False
|
||||
for method in self.__collect_strings, self.__collect_inodes, \
|
||||
self.__collect_indices, self.__collect_blocks, self.__vacuum_metastore:
|
||||
sub_start_time = time.time()
|
||||
msg = method()
|
||||
if msg:
|
||||
elapsed_time = time.time() - sub_start_time
|
||||
self.logger.info(msg, format_timespan(elapsed_time))
|
||||
elapsed_time = time.time() - start_time
|
||||
self.logger.info("Finished garbage collection in %s.", format_timespan(elapsed_time))
|
||||
|
||||
def __collect_strings(self): # {{{4
|
||||
count = self.conn.execute('DELETE FROM strings WHERE id NOT IN (SELECT name FROM tree)').rowcount
|
||||
|
@ -1150,7 +1154,7 @@ class DedupFS(fuse.Fuse): # {{{1
|
|||
|
||||
def __collect_blocks(self): # {{{4
|
||||
should_reorganize = False
|
||||
for row in self.conn.execute('SELECT hash FROM hashes WHERE id NOT IN (SELECT hash_id FROM "index")'):
|
||||
for _ in self.conn.execute('SELECT hash FROM hashes WHERE id NOT IN (SELECT hash_id FROM "index")'):
|
||||
# del self.blocks[str(row[0])]
|
||||
should_reorganize = True
|
||||
if should_reorganize:
|
||||
|
|
|
@ -73,8 +73,12 @@ def get_compat_0_1():
|
|||
|
||||
|
||||
# API version to be used
|
||||
fuse_python_api = __getenv__('FUSE_PYTHON_API', '^[\d.]+$',
|
||||
lambda x: tuple([int(i) for i in x.split('.')]))
|
||||
fuse_python_api = __getenv__(
|
||||
'FUSE_PYTHON_API',
|
||||
'^[\d.]+$',
|
||||
lambda x: tuple(int(i) for i in x.split('.')),
|
||||
)
|
||||
|
||||
|
||||
# deprecated way of API specification
|
||||
compat_0_1 = __getenv__('FUSE_PYTHON_COMPAT', '^(0.1|ALL)$', lambda x: True)
|
||||
|
@ -126,9 +130,7 @@ class FuseArgs(SubOptsHive):
|
|||
def mount_expected(self):
|
||||
if self.getmod('showhelp'):
|
||||
return False
|
||||
if self.getmod('showversion'):
|
||||
return False
|
||||
return True
|
||||
return not self.getmod('showversion')
|
||||
|
||||
def assemble(self):
|
||||
"""Mangle self into an argument array"""
|
||||
|
@ -141,9 +143,7 @@ class FuseArgs(SubOptsHive):
|
|||
if v:
|
||||
args.append(self.fuse_modifiers[m])
|
||||
|
||||
opta = []
|
||||
for o, v in self.optdict.iteritems():
|
||||
opta.append(o + '=' + v)
|
||||
opta = [o + '=' + v for o, v in self.optdict.iteritems()]
|
||||
opta.extend(self.optlist)
|
||||
|
||||
if opta:
|
||||
|
@ -167,7 +167,7 @@ class FuseArgs(SubOptsHive):
|
|||
class FuseFormatter(SubbedOptIndentedFormatter):
|
||||
|
||||
def __init__(self, **kw):
|
||||
if not 'indent_increment' in kw:
|
||||
if 'indent_increment' not in kw:
|
||||
kw['indent_increment'] = 4
|
||||
SubbedOptIndentedFormatter.__init__(self, **kw)
|
||||
|
||||
|
@ -338,7 +338,7 @@ class FuseOptParse(SubbedOptParse):
|
|||
"having options or specifying the `subopt' attribute conflicts with `mountopt' attribute")
|
||||
opts = ('-o',)
|
||||
attrs['subopt'] = attrs.pop('mountopt')
|
||||
if not 'dest' in attrs:
|
||||
if 'dest' not in attrs:
|
||||
attrs['dest'] = attrs['subopt']
|
||||
|
||||
SubbedOptParse.add_option(self, *opts, **attrs)
|
||||
|
@ -559,8 +559,7 @@ def feature_needs(*feas):
|
|||
maxva[0] = max(maxva[0], fp)
|
||||
continue
|
||||
if isinstance(fp, list) or isinstance(fp, tuple):
|
||||
for f in fp:
|
||||
yield f
|
||||
yield from fp
|
||||
continue
|
||||
ma = isinstance(fp, str) and re.compile("(!\s*|)re:(.*)").match(fp)
|
||||
if isinstance(fp, type(re.compile(''))) or ma:
|
||||
|
@ -574,7 +573,7 @@ def feature_needs(*feas):
|
|||
yield f
|
||||
continue
|
||||
ma = re.compile("has_(.*)").match(fp)
|
||||
if ma and ma.groups()[0] in Fuse._attrs and not fp in fmap:
|
||||
if ma and ma.groups()[0] in Fuse._attrs and fp not in fmap:
|
||||
yield 21
|
||||
continue
|
||||
yield fmap[fp]
|
||||
|
@ -728,8 +727,10 @@ class Fuse(object):
|
|||
if get_compat_0_1():
|
||||
args = self.main_0_1_preamble()
|
||||
|
||||
d = {'multithreaded': self.multithreaded and 1 or 0}
|
||||
d['fuse_args'] = args or self.fuse_args.assemble()
|
||||
d = {
|
||||
'multithreaded': self.multithreaded and 1 or 0,
|
||||
'fuse_args': args or self.fuse_args.assemble(),
|
||||
}
|
||||
|
||||
for t in 'file_class', 'dir_class':
|
||||
if hasattr(self, t):
|
||||
|
|
|
@ -53,8 +53,8 @@ def download_block(hash_uid, filename):
|
|||
|
||||
entity = client.get_entity(client.get_me())
|
||||
messages = client.get_messages(entity, limit=1, search=hash_uid)
|
||||
for i in range(len(messages)):
|
||||
msg = messages[i]
|
||||
for message in messages:
|
||||
msg = message
|
||||
if msg.message == hash_uid:
|
||||
# FIFO = f"dpipe_{hash_uid}"
|
||||
# import errno
|
||||
|
|
|
@ -187,11 +187,7 @@ class TelegramClientX(TelegramClient):
|
|||
# Set a default file name if None was specified
|
||||
file_id = helpers.generate_random_long()
|
||||
if not file_name:
|
||||
if isinstance(file, str):
|
||||
file_name = os.path.basename(file)
|
||||
else:
|
||||
file_name = str(file_id)
|
||||
|
||||
file_name = os.path.basename(file) if isinstance(file, str) else str(file_id)
|
||||
# Determine whether the file is too big (over 10MB) or not
|
||||
# Telegram does make a distinction between smaller or larger files
|
||||
is_large = file_size > 10 * 1024 * 1024
|
||||
|
|
Loading…
Reference in a new issue