mirror of
https://gitlab.com/fdroid/fdroidserver.git
synced 2024-11-18 20:50:10 +01:00
lint.py: use pathlib and support Windows
This commit is contained in:
parent
8b17fbf703
commit
6bafb036ee
@ -3983,9 +3983,7 @@ YAML_LINT_CONFIG = {'extends': 'default',
|
|||||||
|
|
||||||
|
|
||||||
def run_yamllint(path, indent=0):
|
def run_yamllint(path, indent=0):
|
||||||
|
path = Path(path)
|
||||||
# TODO: Remove this
|
|
||||||
path = str(path)
|
|
||||||
try:
|
try:
|
||||||
import yamllint.config
|
import yamllint.config
|
||||||
import yamllint.linter
|
import yamllint.linter
|
||||||
@ -3993,10 +3991,10 @@ def run_yamllint(path, indent=0):
|
|||||||
return ''
|
return ''
|
||||||
|
|
||||||
result = []
|
result = []
|
||||||
with open(path, 'r', encoding='utf-8') as f:
|
with path.open('r', encoding='utf-8') as f:
|
||||||
problems = yamllint.linter.run(f, yamllint.config.YamlLintConfig(json.dumps(YAML_LINT_CONFIG)))
|
problems = yamllint.linter.run(f, yamllint.config.YamlLintConfig(json.dumps(YAML_LINT_CONFIG)))
|
||||||
for problem in problems:
|
for problem in problems:
|
||||||
result.append(' ' * indent + path + ':' + str(problem.line) + ': ' + problem.message)
|
result.append(' ' * indent + str(path) + ':' + str(problem.line) + ': ' + problem.message)
|
||||||
return '\n'.join(result)
|
return '\n'.join(result)
|
||||||
|
|
||||||
|
|
||||||
|
@ -17,11 +17,11 @@
|
|||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
import glob
|
|
||||||
import os
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
import platform
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from . import _
|
from . import _
|
||||||
from . import common
|
from . import common
|
||||||
@ -33,8 +33,12 @@ options = None
|
|||||||
|
|
||||||
|
|
||||||
def enforce_https(domain):
|
def enforce_https(domain):
|
||||||
return (re.compile(r'^http://([^/]*\.)?' + re.escape(domain) + r'(/.*)?', re.IGNORECASE),
|
return (
|
||||||
domain + " URLs should always use https://")
|
re.compile(
|
||||||
|
r'^http://([^/]*\.)?' + re.escape(domain) + r'(/.*)?', re.IGNORECASE
|
||||||
|
),
|
||||||
|
domain + " URLs should always use https://",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
https_enforcings = [
|
https_enforcings = [
|
||||||
@ -59,8 +63,10 @@ https_enforcings = [
|
|||||||
|
|
||||||
|
|
||||||
def forbid_shortener(domain):
|
def forbid_shortener(domain):
|
||||||
return (re.compile(r'https?://[^/]*' + re.escape(domain) + r'/.*'),
|
return (
|
||||||
_("URL shorteners should not be used"))
|
re.compile(r'https?://[^/]*' + re.escape(domain) + r'/.*'),
|
||||||
|
_("URL shorteners should not be used"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
http_url_shorteners = [
|
http_url_shorteners = [
|
||||||
@ -119,70 +125,98 @@ http_url_shorteners = [
|
|||||||
forbid_shortener('➡.ws'),
|
forbid_shortener('➡.ws'),
|
||||||
]
|
]
|
||||||
|
|
||||||
http_checks = https_enforcings + http_url_shorteners + [
|
http_checks = (
|
||||||
(re.compile(r'^(?!https?://)[^/]+'),
|
https_enforcings
|
||||||
_("URL must start with https:// or http://")),
|
+ http_url_shorteners
|
||||||
(re.compile(r'^https://(github|gitlab)\.com(/[^/]+){2,3}\.git'),
|
+ [
|
||||||
_("Appending .git is not necessary")),
|
(
|
||||||
(re.compile(r'^https://[^/]*(github|gitlab|bitbucket|rawgit|githubusercontent)\.[a-zA-Z]+/([^/]+/){2,3}master/'),
|
re.compile(r'^(?!https?://)[^/]+'),
|
||||||
_("Use /HEAD instead of /master to point at a file in the default branch")),
|
_("URL must start with https:// or http://"),
|
||||||
]
|
),
|
||||||
|
(
|
||||||
|
re.compile(r'^https://(github|gitlab)\.com(/[^/]+){2,3}\.git'),
|
||||||
|
_("Appending .git is not necessary"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
re.compile(
|
||||||
|
r'^https://[^/]*(github|gitlab|bitbucket|rawgit|githubusercontent)\.[a-zA-Z]+/([^/]+/){2,3}master/'
|
||||||
|
),
|
||||||
|
_("Use /HEAD instead of /master to point at a file in the default branch"),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
regex_checks = {
|
regex_checks = {
|
||||||
'WebSite': http_checks,
|
'WebSite': http_checks,
|
||||||
'SourceCode': http_checks,
|
'SourceCode': http_checks,
|
||||||
'Repo': https_enforcings,
|
'Repo': https_enforcings,
|
||||||
'UpdateCheckMode': https_enforcings,
|
'UpdateCheckMode': https_enforcings,
|
||||||
'IssueTracker': http_checks + [
|
'IssueTracker': http_checks
|
||||||
(re.compile(r'.*github\.com/[^/]+/[^/]+/*$'),
|
+ [
|
||||||
_("/issues is missing")),
|
(re.compile(r'.*github\.com/[^/]+/[^/]+/*$'), _("/issues is missing")),
|
||||||
(re.compile(r'.*gitlab\.com/[^/]+/[^/]+/*$'),
|
(re.compile(r'.*gitlab\.com/[^/]+/[^/]+/*$'), _("/issues is missing")),
|
||||||
_("/issues is missing")),
|
|
||||||
],
|
],
|
||||||
'Donate': http_checks + [
|
'Donate': http_checks
|
||||||
(re.compile(r'.*flattr\.com'),
|
+ [
|
||||||
_("Flattr donation methods belong in the FlattrID: field")),
|
(
|
||||||
(re.compile(r'.*liberapay\.com'),
|
re.compile(r'.*flattr\.com'),
|
||||||
_("Liberapay donation methods belong in the Liberapay: field")),
|
_("Flattr donation methods belong in the FlattrID: field"),
|
||||||
(re.compile(r'.*opencollective\.com'),
|
),
|
||||||
_("OpenCollective donation methods belong in the OpenCollective: field")),
|
(
|
||||||
|
re.compile(r'.*liberapay\.com'),
|
||||||
|
_("Liberapay donation methods belong in the Liberapay: field"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
re.compile(r'.*opencollective\.com'),
|
||||||
|
_("OpenCollective donation methods belong in the OpenCollective: field"),
|
||||||
|
),
|
||||||
],
|
],
|
||||||
'Changelog': http_checks,
|
'Changelog': http_checks,
|
||||||
'Author Name': [
|
'Author Name': [
|
||||||
(re.compile(r'^\s'),
|
(re.compile(r'^\s'), _("Unnecessary leading space")),
|
||||||
_("Unnecessary leading space")),
|
(re.compile(r'.*\s$'), _("Unnecessary trailing space")),
|
||||||
(re.compile(r'.*\s$'),
|
|
||||||
_("Unnecessary trailing space")),
|
|
||||||
],
|
],
|
||||||
'Summary': [
|
'Summary': [
|
||||||
(re.compile(r'.*\b(free software|open source)\b.*', re.IGNORECASE),
|
(
|
||||||
_("No need to specify that the app is Free Software")),
|
re.compile(r'.*\b(free software|open source)\b.*', re.IGNORECASE),
|
||||||
(re.compile(r'.*((your|for).*android|android.*(app|device|client|port|version))', re.IGNORECASE),
|
_("No need to specify that the app is Free Software"),
|
||||||
_("No need to specify that the app is for Android")),
|
),
|
||||||
(re.compile(r'.*[a-z0-9][.!?]( |$)'),
|
(
|
||||||
_("Punctuation should be avoided")),
|
re.compile(
|
||||||
(re.compile(r'^\s'),
|
r'.*((your|for).*android|android.*(app|device|client|port|version))',
|
||||||
_("Unnecessary leading space")),
|
re.IGNORECASE,
|
||||||
(re.compile(r'.*\s$'),
|
),
|
||||||
_("Unnecessary trailing space")),
|
_("No need to specify that the app is for Android"),
|
||||||
|
),
|
||||||
|
(re.compile(r'.*[a-z0-9][.!?]( |$)'), _("Punctuation should be avoided")),
|
||||||
|
(re.compile(r'^\s'), _("Unnecessary leading space")),
|
||||||
|
(re.compile(r'.*\s$'), _("Unnecessary trailing space")),
|
||||||
],
|
],
|
||||||
'Description': https_enforcings + http_url_shorteners + [
|
'Description': https_enforcings
|
||||||
(re.compile(r'\s*[*#][^ .]'),
|
+ http_url_shorteners
|
||||||
_("Invalid bulleted list")),
|
+ [
|
||||||
(re.compile(r'https://f-droid.org/[a-z][a-z](_[A-Za-z]{2,4})?/'),
|
(re.compile(r'\s*[*#][^ .]'), _("Invalid bulleted list")),
|
||||||
_("Locale included in f-droid.org URL")),
|
(
|
||||||
(re.compile(r'^\s'),
|
re.compile(r'https://f-droid.org/[a-z][a-z](_[A-Za-z]{2,4})?/'),
|
||||||
_("Unnecessary leading space")),
|
_("Locale included in f-droid.org URL"),
|
||||||
(re.compile(r'.*\s$'),
|
),
|
||||||
_("Unnecessary trailing space")),
|
(re.compile(r'^\s'), _("Unnecessary leading space")),
|
||||||
(re.compile(r'.*<(applet|base|body|button|embed|form|head|html|iframe|img|input|link|object|picture|script|source|style|svg|video).*', re.IGNORECASE),
|
(re.compile(r'.*\s$'), _("Unnecessary trailing space")),
|
||||||
_("Forbidden HTML tags")),
|
(
|
||||||
(re.compile(r'''.*\s+src=["']javascript:.*'''),
|
re.compile(
|
||||||
_("Javascript in HTML src attributes")),
|
r'.*<(applet|base|body|button|embed|form|head|html|iframe|img|input|link|object|picture|script|source|style|svg|video).*',
|
||||||
|
re.IGNORECASE,
|
||||||
|
),
|
||||||
|
_("Forbidden HTML tags"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
re.compile(r'''.*\s+src=["']javascript:.*'''),
|
||||||
|
_("Javascript in HTML src attributes"),
|
||||||
|
),
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
locale_pattern = re.compile(r'^[a-z]{2,3}(-[A-Z][A-Z])?$')
|
locale_pattern = re.compile(r"[a-z]{2,3}(-([A-Z][a-zA-Z]+|\d+|[a-z]+))*")
|
||||||
|
|
||||||
|
|
||||||
def check_regexes(app):
|
def check_regexes(app):
|
||||||
@ -215,8 +249,7 @@ def get_lastbuild(builds):
|
|||||||
|
|
||||||
|
|
||||||
def check_update_check_data_url(app):
|
def check_update_check_data_url(app):
|
||||||
"""UpdateCheckData must have a valid HTTPS URL to protect checkupdates runs
|
"""UpdateCheckData must have a valid HTTPS URL to protect checkupdates runs"""
|
||||||
"""
|
|
||||||
if app.UpdateCheckData and app.UpdateCheckMode == 'HTTP':
|
if app.UpdateCheckData and app.UpdateCheckMode == 'HTTP':
|
||||||
urlcode, codeex, urlver, verex = app.UpdateCheckData.split('|')
|
urlcode, codeex, urlver, verex = app.UpdateCheckData.split('|')
|
||||||
for url in (urlcode, urlver):
|
for url in (urlcode, urlver):
|
||||||
@ -229,33 +262,40 @@ def check_update_check_data_url(app):
|
|||||||
|
|
||||||
|
|
||||||
def check_vercode_operation(app):
|
def check_vercode_operation(app):
|
||||||
if app.VercodeOperation and not common.VERCODE_OPERATION_RE.match(app.VercodeOperation):
|
if app.VercodeOperation and not common.VERCODE_OPERATION_RE.match(
|
||||||
|
app.VercodeOperation
|
||||||
|
):
|
||||||
yield _('Invalid VercodeOperation: {field}').format(field=app.VercodeOperation)
|
yield _('Invalid VercodeOperation: {field}').format(field=app.VercodeOperation)
|
||||||
|
|
||||||
|
|
||||||
def check_ucm_tags(app):
|
def check_ucm_tags(app):
|
||||||
lastbuild = get_lastbuild(app.get('Builds', []))
|
lastbuild = get_lastbuild(app.get('Builds', []))
|
||||||
if (lastbuild is not None
|
if (
|
||||||
|
lastbuild is not None
|
||||||
and lastbuild.commit
|
and lastbuild.commit
|
||||||
and app.UpdateCheckMode == 'RepoManifest'
|
and app.UpdateCheckMode == 'RepoManifest'
|
||||||
and not lastbuild.commit.startswith('unknown')
|
and not lastbuild.commit.startswith('unknown')
|
||||||
and lastbuild.versionCode == app.CurrentVersionCode
|
and lastbuild.versionCode == app.CurrentVersionCode
|
||||||
and not lastbuild.forcevercode
|
and not lastbuild.forcevercode
|
||||||
and any(s in lastbuild.commit for s in '.,_-/')):
|
and any(s in lastbuild.commit for s in '.,_-/')
|
||||||
yield _("Last used commit '{commit}' looks like a tag, but UpdateCheckMode is '{ucm}'")\
|
):
|
||||||
.format(commit=lastbuild.commit, ucm=app.UpdateCheckMode)
|
yield _(
|
||||||
|
"Last used commit '{commit}' looks like a tag, but UpdateCheckMode is '{ucm}'"
|
||||||
|
).format(commit=lastbuild.commit, ucm=app.UpdateCheckMode)
|
||||||
|
|
||||||
|
|
||||||
def check_char_limits(app):
|
def check_char_limits(app):
|
||||||
limits = config['char_limits']
|
limits = config['char_limits']
|
||||||
|
|
||||||
if len(app.Summary) > limits['summary']:
|
if len(app.Summary) > limits['summary']:
|
||||||
yield _("Summary of length {length} is over the {limit} char limit")\
|
yield _("Summary of length {length} is over the {limit} char limit").format(
|
||||||
.format(length=len(app.Summary), limit=limits['summary'])
|
length=len(app.Summary), limit=limits['summary']
|
||||||
|
)
|
||||||
|
|
||||||
if len(app.Description) > limits['description']:
|
if len(app.Description) > limits['description']:
|
||||||
yield _("Description of length {length} is over the {limit} char limit")\
|
yield _("Description of length {length} is over the {limit} char limit").format(
|
||||||
.format(length=len(app.Description), limit=limits['description'])
|
length=len(app.Description), limit=limits['description']
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def check_old_links(app):
|
def check_old_links(app):
|
||||||
@ -272,8 +312,9 @@ def check_old_links(app):
|
|||||||
for f in ['WebSite', 'SourceCode', 'IssueTracker', 'Changelog']:
|
for f in ['WebSite', 'SourceCode', 'IssueTracker', 'Changelog']:
|
||||||
v = app.get(f)
|
v = app.get(f)
|
||||||
if any(s in v for s in old_sites):
|
if any(s in v for s in old_sites):
|
||||||
yield _("App is in '{repo}' but has a link to {url}")\
|
yield _("App is in '{repo}' but has a link to {url}").format(
|
||||||
.format(repo=app.Repo, url=v)
|
repo=app.Repo, url=v
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def check_useless_fields(app):
|
def check_useless_fields(app):
|
||||||
@ -286,8 +327,14 @@ filling_ucms = re.compile(r'^(Tags.*|RepoManifest.*)')
|
|||||||
|
|
||||||
def check_checkupdates_ran(app):
|
def check_checkupdates_ran(app):
|
||||||
if filling_ucms.match(app.UpdateCheckMode):
|
if filling_ucms.match(app.UpdateCheckMode):
|
||||||
if not app.AutoName and not app.CurrentVersion and app.CurrentVersionCode == '0':
|
if (
|
||||||
yield _("UpdateCheckMode is set but it looks like checkupdates hasn't been run yet")
|
not app.AutoName
|
||||||
|
and not app.CurrentVersion
|
||||||
|
and app.CurrentVersionCode == '0'
|
||||||
|
):
|
||||||
|
yield _(
|
||||||
|
"UpdateCheckMode is set but it looks like checkupdates hasn't been run yet"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def check_empty_fields(app):
|
def check_empty_fields(app):
|
||||||
@ -295,7 +342,8 @@ def check_empty_fields(app):
|
|||||||
yield _("Categories are not set")
|
yield _("Categories are not set")
|
||||||
|
|
||||||
|
|
||||||
all_categories = set([
|
all_categories = set(
|
||||||
|
[
|
||||||
"Connectivity",
|
"Connectivity",
|
||||||
"Development",
|
"Development",
|
||||||
"Games",
|
"Games",
|
||||||
@ -313,7 +361,8 @@ all_categories = set([
|
|||||||
"Theming",
|
"Theming",
|
||||||
"Time",
|
"Time",
|
||||||
"Writing",
|
"Writing",
|
||||||
])
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def check_categories(app):
|
def check_categories(app):
|
||||||
@ -376,7 +425,9 @@ def check_bulleted_lists(app):
|
|||||||
if line[0] == lchar and line[1] == ' ':
|
if line[0] == lchar and line[1] == ' ':
|
||||||
lcount += 1
|
lcount += 1
|
||||||
if lcount > 2 and lchar not in validchars:
|
if lcount > 2 and lchar not in validchars:
|
||||||
yield _("Description has a list (%s) but it isn't bulleted (*) nor numbered (#)") % lchar
|
yield _(
|
||||||
|
"Description has a list (%s) but it isn't bulleted (*) nor numbered (#)"
|
||||||
|
) % lchar
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
lchar = line[0]
|
lchar = line[0]
|
||||||
@ -389,50 +440,61 @@ def check_builds(app):
|
|||||||
for build in app.get('Builds', []):
|
for build in app.get('Builds', []):
|
||||||
if build.disable:
|
if build.disable:
|
||||||
if build.disable.startswith('Generated by import.py'):
|
if build.disable.startswith('Generated by import.py'):
|
||||||
yield _("Build generated by `fdroid import` - remove disable line once ready")
|
yield _(
|
||||||
|
"Build generated by `fdroid import` - remove disable line once ready"
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
for s in ['master', 'origin', 'HEAD', 'default', 'trunk']:
|
for s in ['master', 'origin', 'HEAD', 'default', 'trunk']:
|
||||||
if build.commit and build.commit.startswith(s):
|
if build.commit and build.commit.startswith(s):
|
||||||
yield _("Branch '{branch}' used as commit in build '{versionName}'")\
|
yield _(
|
||||||
.format(branch=s, versionName=build.versionName)
|
"Branch '{branch}' used as commit in build '{versionName}'"
|
||||||
|
).format(branch=s, versionName=build.versionName)
|
||||||
for srclib in build.srclibs:
|
for srclib in build.srclibs:
|
||||||
if '@' in srclib:
|
if '@' in srclib:
|
||||||
ref = srclib.split('@')[1].split('/')[0]
|
ref = srclib.split('@')[1].split('/')[0]
|
||||||
if ref.startswith(s):
|
if ref.startswith(s):
|
||||||
yield _("Branch '{branch}' used as commit in srclib '{srclib}'")\
|
yield _(
|
||||||
.format(branch=s, srclib=srclib)
|
"Branch '{branch}' used as commit in srclib '{srclib}'"
|
||||||
|
).format(branch=s, srclib=srclib)
|
||||||
else:
|
else:
|
||||||
yield _('srclibs missing name and/or @') + ' (srclibs: ' + srclib + ')'
|
yield _(
|
||||||
|
'srclibs missing name and/or @'
|
||||||
|
) + ' (srclibs: ' + srclib + ')'
|
||||||
for key in build.keys():
|
for key in build.keys():
|
||||||
if key not in supported_flags:
|
if key not in supported_flags:
|
||||||
yield _('%s is not an accepted build field') % key
|
yield _('%s is not an accepted build field') % key
|
||||||
|
|
||||||
|
|
||||||
def check_files_dir(app):
|
def check_files_dir(app):
|
||||||
dir_path = os.path.join('metadata', app.id)
|
dir_path = Path('metadata') / app.id
|
||||||
if not os.path.isdir(dir_path):
|
if not dir_path.is_dir():
|
||||||
return
|
return
|
||||||
files = set()
|
files = set()
|
||||||
for name in os.listdir(dir_path):
|
for path in dir_path.iterdir():
|
||||||
path = os.path.join(dir_path, name)
|
name = path.name
|
||||||
if not (os.path.isfile(path) or name == 'signatures' or locale_pattern.match(name)):
|
if not (
|
||||||
|
path.is_file() or name == 'signatures' or locale_pattern.fullmatch(name)
|
||||||
|
):
|
||||||
yield _("Found non-file at %s") % path
|
yield _("Found non-file at %s") % path
|
||||||
continue
|
continue
|
||||||
files.add(name)
|
files.add(name)
|
||||||
|
|
||||||
used = {'signatures', }
|
used = {
|
||||||
|
'signatures',
|
||||||
|
}
|
||||||
for build in app.get('Builds', []):
|
for build in app.get('Builds', []):
|
||||||
for fname in build.patch:
|
for fname in build.patch:
|
||||||
if fname not in files:
|
if fname not in files:
|
||||||
yield _("Unknown file '{filename}' in build '{versionName}'")\
|
yield _("Unknown file '{filename}' in build '{versionName}'").format(
|
||||||
.format(filename=fname, versionName=build.versionName)
|
filename=fname, versionName=build.versionName
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
used.add(fname)
|
used.add(fname)
|
||||||
|
|
||||||
for name in files.difference(used):
|
for name in files.difference(used):
|
||||||
if locale_pattern.match(name):
|
if locale_pattern.fullmatch(name):
|
||||||
continue
|
continue
|
||||||
yield _("Unused file at %s") % os.path.join(dir_path, name)
|
yield _("Unused file at %s") % (dir_path / name)
|
||||||
|
|
||||||
|
|
||||||
def check_format(app):
|
def check_format(app):
|
||||||
@ -446,41 +508,49 @@ def check_license_tag(app):
|
|||||||
return
|
return
|
||||||
if app.License not in config['lint_licenses']:
|
if app.License not in config['lint_licenses']:
|
||||||
if config['lint_licenses'] == APPROVED_LICENSES:
|
if config['lint_licenses'] == APPROVED_LICENSES:
|
||||||
yield _('Unexpected license tag "{}"! Only use FSF or OSI '
|
yield _(
|
||||||
'approved tags from https://spdx.org/license-list') \
|
'Unexpected license tag "{}"! Only use FSF or OSI '
|
||||||
.format(app.License)
|
'approved tags from https://spdx.org/license-list'
|
||||||
|
).format(app.License)
|
||||||
else:
|
else:
|
||||||
yield _('Unexpected license tag "{}"! Only use license tags '
|
yield _(
|
||||||
'configured in your config file').format(app.License)
|
'Unexpected license tag "{}"! Only use license tags '
|
||||||
|
'configured in your config file'
|
||||||
|
).format(app.License)
|
||||||
|
|
||||||
|
|
||||||
def check_extlib_dir(apps):
|
def check_extlib_dir(apps):
|
||||||
dir_path = os.path.join('build', 'extlib')
|
dir_path = Path('build/extlib')
|
||||||
unused_extlib_files = set()
|
extlib_files = set()
|
||||||
for root, dirs, files in os.walk(dir_path):
|
for path in dir_path.glob('**/*'):
|
||||||
for name in files:
|
if path.is_file():
|
||||||
unused_extlib_files.add(os.path.join(root, name)[len(dir_path) + 1:])
|
extlib_files.add(path.relative_to(dir_path))
|
||||||
|
|
||||||
used = set()
|
used = set()
|
||||||
for app in apps:
|
for app in apps:
|
||||||
for build in app.get('Builds', []):
|
for build in app.get('Builds', []):
|
||||||
for path in build.extlibs:
|
for path in build.extlibs:
|
||||||
if path not in unused_extlib_files:
|
if path not in extlib_files:
|
||||||
yield _("{appid}: Unknown extlib {path} in build '{versionName}'")\
|
yield _(
|
||||||
.format(appid=app.id, path=path, versionName=build.versionName)
|
"{appid}: Unknown extlib {path} in build '{versionName}'"
|
||||||
|
).format(appid=app.id, path=path, versionName=build.versionName)
|
||||||
else:
|
else:
|
||||||
used.add(path)
|
used.add(path)
|
||||||
|
|
||||||
for path in unused_extlib_files.difference(used):
|
for path in extlib_files.difference(used):
|
||||||
if any(path.endswith(s) for s in [
|
if path.name not in [
|
||||||
'.gitignore',
|
'.gitignore',
|
||||||
'source.txt', 'origin.txt', 'md5.txt',
|
'source.txt',
|
||||||
'LICENSE', 'LICENSE.txt',
|
'origin.txt',
|
||||||
'COPYING', 'COPYING.txt',
|
'md5.txt',
|
||||||
'NOTICE', 'NOTICE.txt',
|
'LICENSE',
|
||||||
]):
|
'LICENSE.txt',
|
||||||
continue
|
'COPYING',
|
||||||
yield _("Unused extlib at %s") % os.path.join(dir_path, path)
|
'COPYING.txt',
|
||||||
|
'NOTICE',
|
||||||
|
'NOTICE.txt',
|
||||||
|
]:
|
||||||
|
yield _("Unused extlib at %s") % (dir_path / path)
|
||||||
|
|
||||||
|
|
||||||
def check_app_field_types(app):
|
def check_app_field_types(app):
|
||||||
@ -493,39 +563,69 @@ def check_app_field_types(app):
|
|||||||
continue
|
continue
|
||||||
elif field == 'Builds':
|
elif field == 'Builds':
|
||||||
if not isinstance(v, list):
|
if not isinstance(v, list):
|
||||||
yield(_("{appid}: {field} must be a '{type}', but it is a '{fieldtype}'!")
|
yield (
|
||||||
.format(appid=app.id, field=field,
|
_(
|
||||||
type='list', fieldtype=v.__class__.__name__))
|
"{appid}: {field} must be a '{type}', but it is a '{fieldtype}'!"
|
||||||
|
).format(
|
||||||
|
appid=app.id,
|
||||||
|
field=field,
|
||||||
|
type='list',
|
||||||
|
fieldtype=v.__class__.__name__,
|
||||||
|
)
|
||||||
|
)
|
||||||
elif t == metadata.TYPE_LIST and not isinstance(v, list):
|
elif t == metadata.TYPE_LIST and not isinstance(v, list):
|
||||||
yield(_("{appid}: {field} must be a '{type}', but it is a '{fieldtype}!'")
|
yield (
|
||||||
.format(appid=app.id, field=field,
|
_(
|
||||||
type='list', fieldtype=v.__class__.__name__))
|
"{appid}: {field} must be a '{type}', but it is a '{fieldtype}!'"
|
||||||
|
).format(
|
||||||
|
appid=app.id,
|
||||||
|
field=field,
|
||||||
|
type='list',
|
||||||
|
fieldtype=v.__class__.__name__,
|
||||||
|
)
|
||||||
|
)
|
||||||
elif t == metadata.TYPE_STRING and not type(v) in (str, bool, dict):
|
elif t == metadata.TYPE_STRING and not type(v) in (str, bool, dict):
|
||||||
yield(_("{appid}: {field} must be a '{type}', but it is a '{fieldtype}'!")
|
yield (
|
||||||
.format(appid=app.id, field=field,
|
_(
|
||||||
type='str', fieldtype=v.__class__.__name__))
|
"{appid}: {field} must be a '{type}', but it is a '{fieldtype}'!"
|
||||||
|
).format(
|
||||||
|
appid=app.id,
|
||||||
|
field=field,
|
||||||
|
type='str',
|
||||||
|
fieldtype=v.__class__.__name__,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def check_for_unsupported_metadata_files(basedir=""):
|
def check_for_unsupported_metadata_files(basedir=""):
|
||||||
"""Checks whether any non-metadata files are in metadata/"""
|
"""Checks whether any non-metadata files are in metadata/"""
|
||||||
|
basedir = Path(basedir)
|
||||||
global config
|
global config
|
||||||
|
|
||||||
|
if not (basedir / 'metadata').exists():
|
||||||
|
return False
|
||||||
return_value = False
|
return_value = False
|
||||||
for f in glob.glob(basedir + 'metadata/*') + glob.glob(basedir + 'metadata/.*'):
|
for f in (basedir / 'metadata').iterdir():
|
||||||
if os.path.isdir(f):
|
if f.is_dir():
|
||||||
if not os.path.exists(f + '.yml'):
|
if not Path(str(f) + '.yml').exists():
|
||||||
print(_('"%s/" has no matching metadata file!') % f)
|
print(_('"%s/" has no matching metadata file!') % f)
|
||||||
return_value = True
|
return_value = True
|
||||||
elif f.endswith('.yml'):
|
elif f.suffix == '.yml':
|
||||||
packageName = os.path.splitext(os.path.basename(f))[0]
|
packageName = f.stem
|
||||||
if not common.is_valid_package_name(packageName):
|
if not common.is_valid_package_name(packageName):
|
||||||
print('"' + packageName + '" is an invalid package name!\n'
|
print(
|
||||||
+ 'https://developer.android.com/studio/build/application-id')
|
'"'
|
||||||
|
+ packageName
|
||||||
|
+ '" is an invalid package name!\n'
|
||||||
|
+ 'https://developer.android.com/studio/build/application-id'
|
||||||
|
)
|
||||||
return_value = True
|
return_value = True
|
||||||
else:
|
else:
|
||||||
print(_('"{path}" is not a supported file format (use: metadata/*.yml)')
|
print(
|
||||||
.format(path=f.replace(basedir, '')))
|
_(
|
||||||
|
'"{path}" is not a supported file format (use: metadata/*.yml)'
|
||||||
|
).format(path=f.relative_to(basedir))
|
||||||
|
)
|
||||||
return_value = True
|
return_value = True
|
||||||
|
|
||||||
return return_value
|
return return_value
|
||||||
@ -556,8 +656,11 @@ def check_current_version_code(app):
|
|||||||
if active_builds == 0:
|
if active_builds == 0:
|
||||||
return # all builds are disabled
|
return # all builds are disabled
|
||||||
if cv is not None and int(cv) < min_versionCode:
|
if cv is not None and int(cv) < min_versionCode:
|
||||||
yield(_('CurrentVersionCode {cv} is less than oldest build entry {versionCode}')
|
yield (
|
||||||
.format(cv=cv, versionCode=min_versionCode))
|
_(
|
||||||
|
'CurrentVersionCode {cv} is less than oldest build entry {versionCode}'
|
||||||
|
).format(cv=cv, versionCode=min_versionCode)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@ -567,12 +670,25 @@ def main():
|
|||||||
# Parse command line...
|
# Parse command line...
|
||||||
parser = ArgumentParser()
|
parser = ArgumentParser()
|
||||||
common.setup_global_opts(parser)
|
common.setup_global_opts(parser)
|
||||||
parser.add_argument("-f", "--format", action="store_true", default=False,
|
parser.add_argument(
|
||||||
help=_("Also warn about formatting issues, like rewritemeta -l"))
|
"-f",
|
||||||
parser.add_argument('--force-yamllint', action="store_true", default=False,
|
"--format",
|
||||||
help=_("When linting the entire repository yamllint is disabled by default. "
|
action="store_true",
|
||||||
"This option forces yamllint regardless."))
|
default=False,
|
||||||
parser.add_argument("appid", nargs='*', help=_("application ID of file to operate on"))
|
help=_("Also warn about formatting issues, like rewritemeta -l"),
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--force-yamllint',
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help=_(
|
||||||
|
"When linting the entire repository yamllint is disabled by default. "
|
||||||
|
"This option forces yamllint regardless."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"appid", nargs='*', help=_("application ID of file to operate on")
|
||||||
|
)
|
||||||
metadata.add_metadata_arguments(parser)
|
metadata.add_metadata_arguments(parser)
|
||||||
options = parser.parse_args()
|
options = parser.parse_args()
|
||||||
metadata.warnings_action = options.W
|
metadata.warnings_action = options.W
|
||||||
@ -586,7 +702,7 @@ def main():
|
|||||||
anywarns = check_for_unsupported_metadata_files()
|
anywarns = check_for_unsupported_metadata_files()
|
||||||
|
|
||||||
apps_check_funcs = []
|
apps_check_funcs = []
|
||||||
if len(options.appid) == 0:
|
if not options.appid:
|
||||||
# otherwise it finds tons of unused extlibs
|
# otherwise it finds tons of unused extlibs
|
||||||
apps_check_funcs.append(check_extlib_dir)
|
apps_check_funcs.append(check_extlib_dir)
|
||||||
for check_func in apps_check_funcs:
|
for check_func in apps_check_funcs:
|
||||||
@ -600,29 +716,37 @@ def main():
|
|||||||
|
|
||||||
if options.force_yamllint:
|
if options.force_yamllint:
|
||||||
import yamllint # throw error if it is not installed
|
import yamllint # throw error if it is not installed
|
||||||
|
|
||||||
yamllint # make pyflakes ignore this
|
yamllint # make pyflakes ignore this
|
||||||
|
|
||||||
# only run yamllint when linting individual apps.
|
# only run yamllint when linting individual apps.
|
||||||
if len(options.appid) > 0 or options.force_yamllint:
|
if options.appid or options.force_yamllint:
|
||||||
|
|
||||||
# run yamllint on app metadata
|
# run yamllint on app metadata
|
||||||
ymlpath = os.path.join('metadata', appid + '.yml')
|
ymlpath = Path('metadata') / (appid + '.yml')
|
||||||
if os.path.isfile(ymlpath):
|
if ymlpath.is_file():
|
||||||
yamllintresult = common.run_yamllint(ymlpath)
|
yamllintresult = common.run_yamllint(ymlpath)
|
||||||
if yamllintresult != '':
|
if yamllintresult:
|
||||||
print(yamllintresult)
|
print(yamllintresult)
|
||||||
|
|
||||||
# run yamllint on srclib metadata
|
# run yamllint on srclib metadata
|
||||||
srclibs = set()
|
srclibs = set()
|
||||||
for build in app.get('Builds', []):
|
for build in app.get('Builds', []):
|
||||||
for srclib in build.srclibs:
|
for srclib in build.srclibs:
|
||||||
srclibs.add(srclib)
|
name, _ref, _number, _subdir = common.parse_srclib_spec(srclib)
|
||||||
|
srclibs.add(name + '.yml')
|
||||||
for srclib in srclibs:
|
for srclib in srclibs:
|
||||||
name, ref, number, subdir = common.parse_srclib_spec(srclib)
|
srclibpath = Path('srclibs') / srclib
|
||||||
srclibpath = os.path.join('srclibs', name + '.yml')
|
if srclibpath.is_file():
|
||||||
if os.path.isfile(srclibpath):
|
if platform.system() == 'Windows':
|
||||||
|
# Handle symlink on Windows
|
||||||
|
symlink = srclibpath.read_text()
|
||||||
|
if symlink in srclibs:
|
||||||
|
continue
|
||||||
|
elif (srclibpath.parent / symlink).is_file():
|
||||||
|
srclibpath = srclibpath.parent / symlink
|
||||||
yamllintresult = common.run_yamllint(srclibpath)
|
yamllintresult = common.run_yamllint(srclibpath)
|
||||||
if yamllintresult != '':
|
if yamllintresult:
|
||||||
print(yamllintresult)
|
print(yamllintresult)
|
||||||
|
|
||||||
app_check_funcs = [
|
app_check_funcs = [
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
# http://www.drdobbs.com/testing/unit-testing-with-python/240165163
|
# http://www.drdobbs.com/testing/unit-testing-with-python/240165163
|
||||||
|
|
||||||
import inspect
|
|
||||||
import logging
|
import logging
|
||||||
import optparse
|
import optparse
|
||||||
import os
|
import os
|
||||||
@ -10,13 +9,12 @@ import shutil
|
|||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import unittest
|
import unittest
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
localmodule = os.path.realpath(
|
localmodule = Path(__file__).resolve().parent.parent
|
||||||
os.path.join(os.path.dirname(inspect.getfile(inspect.currentframe())), '..')
|
print('localmodule: ' + str(localmodule))
|
||||||
)
|
|
||||||
print('localmodule: ' + localmodule)
|
|
||||||
if localmodule not in sys.path:
|
if localmodule not in sys.path:
|
||||||
sys.path.insert(0, localmodule)
|
sys.path.insert(0, str(localmodule))
|
||||||
|
|
||||||
import fdroidserver.common
|
import fdroidserver.common
|
||||||
import fdroidserver.lint
|
import fdroidserver.lint
|
||||||
@ -27,26 +25,34 @@ class LintTest(unittest.TestCase):
|
|||||||
'''fdroidserver/lint.py'''
|
'''fdroidserver/lint.py'''
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
logging.basicConfig(level=logging.INFO)
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
self.basedir = os.path.join(localmodule, 'tests')
|
self.basedir = localmodule / 'tests'
|
||||||
self.tmpdir = os.path.abspath(os.path.join(self.basedir, '..', '.testfiles'))
|
self.tmpdir = localmodule / '.testfiles'
|
||||||
if not os.path.exists(self.tmpdir):
|
self.tmpdir.mkdir(exist_ok=True)
|
||||||
os.makedirs(self.tmpdir)
|
# TODO: Python3.6: Accepts a path-like object.
|
||||||
os.chdir(self.basedir)
|
os.chdir(str(self.basedir))
|
||||||
|
|
||||||
def test_check_for_unsupported_metadata_files(self):
|
def test_check_for_unsupported_metadata_files(self):
|
||||||
self.assertTrue(fdroidserver.lint.check_for_unsupported_metadata_files())
|
self.assertTrue(fdroidserver.lint.check_for_unsupported_metadata_files())
|
||||||
|
|
||||||
tmptestsdir = tempfile.mkdtemp(prefix=inspect.currentframe().f_code.co_name,
|
with tempfile.TemporaryDirectory(dir=str(self.tmpdir)) as testdir:
|
||||||
dir=self.tmpdir)
|
testdir = Path(testdir)
|
||||||
self.assertFalse(fdroidserver.lint.check_for_unsupported_metadata_files(tmptestsdir + '/'))
|
self.assertFalse(
|
||||||
shutil.copytree(os.path.join(localmodule, 'tests', 'metadata'),
|
fdroidserver.lint.check_for_unsupported_metadata_files(testdir)
|
||||||
os.path.join(tmptestsdir, 'metadata'),
|
)
|
||||||
ignore=shutil.ignore_patterns('apk', 'dump', '*.json'))
|
# TODO: Python3.6: Accepts a path-like object.
|
||||||
self.assertFalse(fdroidserver.lint.check_for_unsupported_metadata_files(tmptestsdir + '/'))
|
shutil.copytree(
|
||||||
with open(os.path.join(tmptestsdir, 'metadata', 'org.adaway.json'), 'w') as fp:
|
str(self.basedir / 'metadata'),
|
||||||
fp.write('placeholder')
|
str(testdir / 'metadata'),
|
||||||
self.assertTrue(fdroidserver.lint.check_for_unsupported_metadata_files(tmptestsdir + '/'))
|
ignore=shutil.ignore_patterns('apk', 'dump', '*.json'),
|
||||||
|
)
|
||||||
|
self.assertFalse(
|
||||||
|
fdroidserver.lint.check_for_unsupported_metadata_files(testdir)
|
||||||
|
)
|
||||||
|
(testdir / 'metadata/org.adaway.json').write_text('placeholder')
|
||||||
|
self.assertTrue(
|
||||||
|
fdroidserver.lint.check_for_unsupported_metadata_files(testdir)
|
||||||
|
)
|
||||||
|
|
||||||
def test_forbidden_html_tags(self):
|
def test_forbidden_html_tags(self):
|
||||||
config = dict()
|
config = dict()
|
||||||
@ -130,7 +136,9 @@ class LintTest(unittest.TestCase):
|
|||||||
fields = {
|
fields = {
|
||||||
'AntiFeatures': {
|
'AntiFeatures': {
|
||||||
'good': [
|
'good': [
|
||||||
['KnownVuln', ],
|
[
|
||||||
|
'KnownVuln',
|
||||||
|
],
|
||||||
['NonFreeNet', 'KnownVuln'],
|
['NonFreeNet', 'KnownVuln'],
|
||||||
],
|
],
|
||||||
'bad': [
|
'bad': [
|
||||||
@ -140,7 +148,9 @@ class LintTest(unittest.TestCase):
|
|||||||
},
|
},
|
||||||
'Categories': {
|
'Categories': {
|
||||||
'good': [
|
'good': [
|
||||||
['Sports & Health', ],
|
[
|
||||||
|
'Sports & Health',
|
||||||
|
],
|
||||||
['Multimedia', 'Graphics'],
|
['Multimedia', 'Graphics'],
|
||||||
],
|
],
|
||||||
'bad': [
|
'bad': [
|
||||||
@ -154,7 +164,9 @@ class LintTest(unittest.TestCase):
|
|||||||
],
|
],
|
||||||
'bad': [
|
'bad': [
|
||||||
[],
|
[],
|
||||||
['nope', ],
|
[
|
||||||
|
'nope',
|
||||||
|
],
|
||||||
29,
|
29,
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
@ -320,8 +332,6 @@ class LintTest(unittest.TestCase):
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
os.chdir(os.path.dirname(__file__))
|
|
||||||
|
|
||||||
parser = optparse.OptionParser()
|
parser = optparse.OptionParser()
|
||||||
parser.add_option(
|
parser.add_option(
|
||||||
"-v",
|
"-v",
|
||||||
|
Loading…
Reference in New Issue
Block a user