Run latest pyupgrade

This commit is contained in:
Adrian Moennich 2021-04-14 12:36:41 +02:00
parent 3c37b3a9de
commit 84381e8eea
8 changed files with 15 additions and 15 deletions

View File

@ -87,7 +87,7 @@ def _created(obj, **kwargs):
elif isinstance(obj, SubContribution):
parent = obj.contribution
else:
raise TypeError('Unexpected object: {}'.format(type(obj).__name__))
raise TypeError(f'Unexpected object: {type(obj).__name__}')
if parent:
_register_change(parent, ChangeType.data_changed)
_register_change(obj, ChangeType.created)

View File

@ -58,7 +58,7 @@ class MARCXMLGenerator:
xg = XMLGen(init=False)
xg.openTag('record')
xg.openTag('datafield', [['tag', '970'], ['ind1', ' '], ['ind2', ' ']])
xg.writeTag('subfield', 'INDICO.{}'.format(compound_id(obj)), [['code', 'a']])
xg.writeTag('subfield', f'INDICO.{compound_id(obj)}', [['code', 'a']])
xg.closeTag('datafield')
xg.openTag('datafield', [['tag', '980'], ['ind1', ' '], ['ind2', ' ']])
xg.writeTag('subfield', 'DELETED', [['code', 'c']])

View File

@ -53,12 +53,12 @@ class PiwikRequest:
for key, value in query_params.items():
if isinstance(value, list):
value = ','.join(value)
query += '{}={}&'.format(str(key), str(value))
query += f'{key}={value}&'
return query[:-1]
def get_query_url(self, **query_params):
"""Return the url for a Piwik API query"""
return '{}?{}'.format(self.api_url, self.get_query(query_params))
return f'{self.api_url}?{self.get_query(query_params)}'
def _perform_call(self, query_url, default_response=None, timeout=10):
"""Returns the raw results from the API"""

View File

@ -35,7 +35,7 @@ class PiwikQueryReportEventGraphBase(PiwikQueryReportEventBase):
if png.startswith(b'GD extension must be loaded'):
current_plugin.logger.warning('Piwik server answered on ImageGraph.get: %s', png)
return
return 'data:image/png;base64,{}'.format(b64encode(png).decode())
return f'data:image/png;base64,{b64encode(png).decode()}'
class PiwikQueryReportEventGraphCountries(PiwikQueryReportEventGraphBase):

View File

@ -120,7 +120,7 @@ class ReportGeneral(ReportBase):
cid = (contribution.legacy_mapping.legacy_contribution_id if contribution.legacy_mapping
else contribution.id)
key = f'{contribution.event_id}t{cid}'
self.contributions[key] = '{} ({})'.format(contribution.title, format_time(contribution.start_dt))
self.contributions[key] = f'{contribution.title} ({format_time(contribution.start_dt)})'
class ReportVisitsPerDay(ReportBase):

View File

@ -187,11 +187,11 @@ class S3Importer:
match = _new_path_re.match(obj.storage_file_id)
if match:
# already in the current format
assert obj.storage_file_id == new_storage_path.replace('-0-', '-{}-'.format(match.group(2)))
assert obj.storage_file_id == new_storage_path.replace('-0-', f'-{match.group(2)}-')
return obj.storage_file_id, new_filename
else:
match = _new_path_re.match(new_storage_path)
return '{}{}{}'.format(match.group(1), crc32(obj.storage_file_id), match.group(3)), new_filename
return f'{match.group(1)}{crc32(obj.storage_file_id)}{match.group(3)}', new_filename
def queue_for_rclone(self, obj, bucket, key):
# XXX: we assume the file is local so the context manager doesn't create a
@ -447,7 +447,7 @@ def copy(source_backend_names, bucket_names, static_bucket_name, s3_endpoint, s3
raise click.UsageError('All but the last bucket name need to contain criteria')
matches = [(re.match(r'^(<|>|==|<=|>=)\s*(\d{4}):(.+)$', name), backend) for name, backend in bucket_names[:-1]]
if not all(x[0] for x in matches):
raise click.UsageError("Could not parse '{}'".format(bucket_names[matches.index(None)]))
raise click.UsageError(f"Could not parse '{bucket_names[matches.index(None)]}'")
criteria = [(match.groups(), backend) for match, backend in matches]
# Build and compile a function to get the bucket/backend name to avoid
# processing the criteria for every single file (can be millions for large
@ -456,11 +456,11 @@ def copy(source_backend_names, bucket_names, static_bucket_name, s3_endpoint, s3
if criteria:
for i, ((op, value, bucket), backend) in enumerate(criteria):
code.append(' {}if dt.year {} {}:'.format('el' if i else '', op, value))
code.append(' bucket, backend = {!r}'.format((bucket, backend)))
code.append(f' bucket, backend = {(bucket, backend)!r}')
code.append(' else:')
code.append(' bucket, backend = {!r}'.format(bucket_names[-1]))
code.append(f' bucket, backend = {bucket_names[-1]!r}')
else:
code.append(' bucket, backend = {!r}'.format(bucket_names[-1]))
code.append(f' bucket, backend = {bucket_names[-1]!r}')
code.append(' bucket = bucket.replace("<year>", dt.strftime("%Y"))')
code.append(' bucket = bucket.replace("<month>", dt.strftime("%m"))')
code.append(' bucket = bucket.replace("<week>", dt.strftime("%W"))')

View File

@ -193,7 +193,7 @@ class S3Storage(S3StorageBase):
raise StorageError('Bucket name cannot be longer than 63 chars')
def __repr__(self):
return '<{}: {}>'.format(type(self).__name__, self.bucket_name)
return f'<{type(self).__name__}: {self.bucket_name}>'
def _get_current_bucket_name(self):
return self.bucket_name
@ -225,7 +225,7 @@ class DynamicS3Storage(S3StorageBase):
self._check_bucket_secret()
def __repr__(self):
return '<{}: {}>'.format(type(self).__name__, self.bucket_name_template)
return f'<{type(self).__name__}: {self.bucket_name_template}>'
def _check_bucket_secret(self):
if not self.bucket_secret:

View File

@ -53,7 +53,7 @@ def _get_config():
def _update_meta(data):
path = Path('_meta/setup.cfg')
content = path.read_text()
new_content = re.sub(r'(?<={}\n).*(?=\n{})'.format(re.escape(START_MARKER), re.escape(END_MARKER)), data, content,
new_content = re.sub(fr'(?<={re.escape(START_MARKER)}\n).*(?=\n{re.escape(END_MARKER)})', data, content,
flags=re.DOTALL)
if content == new_content:
return False