mirror of https://github.com/mpv-player/mpv
ci/lint: enable pyflakes and pycodestyle checks
This change required some minor rework to make the code conform to the following: - Restrict line length to 88 character - Use spaces rather than tabs (only affect ci/lint-commit-msg.py) - Use f-strings rather than % formatting or `str.format()`
This commit is contained in:
parent
bee3598ba6
commit
e5a383a525
|
@ -54,13 +54,17 @@ def get_rapths(objfile):
|
|||
# resolve @loader_path
|
||||
if line_clean.startswith('@loader_path/'):
|
||||
line_clean = line_clean[len('@loader_path/'):]
|
||||
line_clean = os.path.normpath(os.path.join(os.path.dirname(objfile), line_clean))
|
||||
line_clean = os.path.join(os.path.dirname(objfile), line_clean)
|
||||
line_clean = os.path.normpath(line_clean)
|
||||
rpaths.append(line_clean)
|
||||
|
||||
return rpaths
|
||||
|
||||
def get_rpaths_dev_tools(binary):
|
||||
command = "otool -l '%s' | grep -A2 LC_RPATH | grep path | grep \"Xcode\\|CommandLineTools\"" % binary
|
||||
command = (
|
||||
f"otool -l '{binary}' | grep -A2 LC_RPATH | grep path | "
|
||||
"grep \"Xcode\\|CommandLineTools\""
|
||||
)
|
||||
result = subprocess.check_output(command, shell = True, universal_newlines=True)
|
||||
pathRe = re.compile(r"^\s*path (.*) \(offset \d*\)$")
|
||||
output = []
|
||||
|
@ -90,26 +94,41 @@ def resolve_lib_path(objfile, lib, rapths):
|
|||
|
||||
def check_vulkan_max_version(version):
|
||||
try:
|
||||
subprocess.check_output("pkg-config vulkan --max-version=" + version, shell = True)
|
||||
subprocess.check_output(
|
||||
f"pkg-config vulkan --max-version={version}",
|
||||
shell=True,
|
||||
)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def get_homebrew_prefix():
|
||||
# set default to standard ARM path, intel path is already in the vulkan loader search array
|
||||
# set default to standard ARM path, intel path is already in the vulkan
|
||||
# loader search array
|
||||
result = "/opt/homebrew"
|
||||
try:
|
||||
result = subprocess.check_output("brew --prefix", universal_newlines=True, shell=True, stderr=subprocess.DEVNULL).strip()
|
||||
result = subprocess.check_output(
|
||||
"brew --prefix",
|
||||
universal_newlines=True,
|
||||
shell=True,
|
||||
stderr=subprocess.DEVNULL
|
||||
).strip()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return result
|
||||
|
||||
def install_name_tool_change(old, new, objfile):
|
||||
subprocess.call(["install_name_tool", "-change", old, new, objfile], stderr=subprocess.DEVNULL)
|
||||
subprocess.call(
|
||||
["install_name_tool", "-change", old, new, objfile],
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
|
||||
def install_name_tool_id(name, objfile):
|
||||
subprocess.call(["install_name_tool", "-id", name, objfile], stderr=subprocess.DEVNULL)
|
||||
subprocess.call(
|
||||
["install_name_tool", "-id", name, objfile],
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
|
||||
def install_name_tool_add_rpath(rpath, binary):
|
||||
subprocess.call(["install_name_tool", "-add_rpath", rpath, binary])
|
||||
|
@ -173,7 +192,10 @@ def process_swift_libraries(binary):
|
|||
swiftLibPath = os.path.join(swiftStdlibTool, '../../lib/swift-5.0/macosx')
|
||||
swiftLibPath = os.path.abspath(swiftLibPath)
|
||||
|
||||
command = [swiftStdlibTool, '--copy', '--platform', 'macosx', '--scan-executable', binary, '--destination', lib_path(binary)]
|
||||
command = [
|
||||
swiftStdlibTool, '--copy', '--platform', 'macosx', '--scan-executable',
|
||||
binary, '--destination', lib_path(binary)
|
||||
]
|
||||
|
||||
if os.path.exists(swiftLibPath):
|
||||
command.extend(['--source-libraries', swiftLibPath])
|
||||
|
@ -221,7 +243,8 @@ def process_vulkan_loader(binary, loaderName, loaderRelativeFolder, libraryNode)
|
|||
|
||||
loaderSystemFile = open(loaderSystemPath, 'r')
|
||||
loaderJsonData = json.load(loaderSystemFile)
|
||||
librarySystemPath = os.path.join(loaderSystemFolder, loaderJsonData[libraryNode]["library_path"])
|
||||
libraryPath = loaderJsonData[libraryNode]["library_path"]
|
||||
librarySystemPath = os.path.join(loaderSystemFolder, libraryPath)
|
||||
|
||||
if not os.path.exists(librarySystemPath):
|
||||
print(">>> could not find loader library " + librarySystemPath)
|
||||
|
@ -230,14 +253,16 @@ def process_vulkan_loader(binary, loaderName, loaderRelativeFolder, libraryNode)
|
|||
print(">>> modifiying and writing loader json " + loaderName)
|
||||
loaderBundleFile = open(loaderBundlePath, 'w')
|
||||
loaderLibraryName = os.path.basename(librarySystemPath)
|
||||
loaderJsonData[libraryNode]["library_path"] = os.path.join(libraryRelativeFolder, loaderLibraryName)
|
||||
library_path = os.path.join(libraryRelativeFolder, loaderLibraryName)
|
||||
loaderJsonData[libraryNode]["library_path"] = library_path
|
||||
json.dump(loaderJsonData, loaderBundleFile, indent=4)
|
||||
|
||||
print(">>> copying loader library " + loaderLibraryName)
|
||||
frameworkBundleFolder = os.path.join(loaderBundleFolder, libraryRelativeFolder)
|
||||
if not os.path.exists(frameworkBundleFolder):
|
||||
os.makedirs(frameworkBundleFolder)
|
||||
shutil.copy(librarySystemPath, os.path.join(frameworkBundleFolder, loaderLibraryName))
|
||||
library_target_path = os.path.join(frameworkBundleFolder, loaderLibraryName)
|
||||
shutil.copy(librarySystemPath, library_target_path)
|
||||
|
||||
def remove_dev_tools_rapths(binary):
|
||||
for path in get_rpaths_dev_tools(binary):
|
||||
|
@ -262,7 +287,12 @@ def process(binary):
|
|||
print(">> copying and processing vulkan loader")
|
||||
process_vulkan_loader(binary, "MoltenVK_icd.json", "vulkan/icd.d", "ICD")
|
||||
if check_vulkan_max_version("1.3.261.1"):
|
||||
process_vulkan_loader(binary, "VkLayer_khronos_synchronization2.json", "vulkan/explicit_layer.d", "layer")
|
||||
process_vulkan_loader(
|
||||
binary,
|
||||
"VkLayer_khronos_synchronization2.json",
|
||||
"vulkan/explicit_layer.d",
|
||||
"layer",
|
||||
)
|
||||
|
||||
if __name__ == "__main__":
|
||||
process(sys.argv[1])
|
||||
|
|
|
@ -31,13 +31,21 @@ if __name__ == "__main__":
|
|||
if not mpv_desktop["X-KDE-Protocols"]:
|
||||
raise ValueError("Missing X-KDE-Protocols entry in mpv.desktop file")
|
||||
|
||||
mpv_protocols = check_output([sys.argv[2], "--no-config", "--list-protocols"], encoding="UTF-8")
|
||||
mpv_protocols = set(line.strip(" :/") for line in mpv_protocols.splitlines() if "://" in line)
|
||||
mpv_protocols = check_output(
|
||||
[sys.argv[2], "--no-config", "--list-protocols"],
|
||||
encoding="UTF-8",
|
||||
)
|
||||
mpv_protocols = {
|
||||
line.strip(" :/")
|
||||
for line in mpv_protocols.splitlines()
|
||||
if "://" in line
|
||||
}
|
||||
if len(mpv_protocols) == 0:
|
||||
raise ValueError("Unable to parse any protocols from mpv '--list-protocols'")
|
||||
|
||||
protocol_list = set(mpv_desktop["X-KDE-Protocols"].strip().split(","))
|
||||
mpv_desktop["X-KDE-Protocols"] = ",".join(sorted(mpv_protocols & protocol_list)) + "\n"
|
||||
compatible_protocols = sorted(mpv_protocols & protocol_list)
|
||||
mpv_desktop["X-KDE-Protocols"] = ",".join(compatible_protocols) + "\n"
|
||||
|
||||
with open(sys.argv[3], "w", encoding="UTF-8") as f:
|
||||
f.write("[Desktop Entry]" + "\n")
|
||||
|
|
|
@ -29,8 +29,11 @@ def find_macos_sdk():
|
|||
|
||||
# use xcode tools when installed, still necessary for xcode versions <12.0
|
||||
try:
|
||||
sdk_version = check_output([xcodebuild, '-sdk', 'macosx', '-version', 'ProductVersion'],
|
||||
encoding="UTF-8", stderr=subprocess.DEVNULL)
|
||||
sdk_version = check_output(
|
||||
[xcodebuild, '-sdk', 'macosx', '-version', 'ProductVersion'],
|
||||
encoding="UTF-8",
|
||||
stderr=subprocess.DEVNULL
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
|
|
@ -27,7 +27,10 @@ def find_swift_lib():
|
|||
|
||||
xcode_path = check_output([xcode_select, "-p"], encoding="UTF-8")
|
||||
|
||||
swift_lib_dir = os.path.join(xcode_path, "Toolchains/XcodeDefault.xctoolchain/usr/lib/swift/macosx")
|
||||
swift_lib_dir = os.path.join(
|
||||
xcode_path,
|
||||
"Toolchains/XcodeDefault.xctoolchain/usr/lib/swift/macosx"
|
||||
)
|
||||
if os.path.isdir(swift_lib_dir):
|
||||
return swift_lib_dir
|
||||
|
||||
|
|
|
@ -341,7 +341,7 @@ def generate_C_header(out):
|
|||
for el in elementlist:
|
||||
if not el.subelements:
|
||||
continue
|
||||
printf(out, 'extern const struct ebml_elem_desc {0.structname}_desc;'.format(el))
|
||||
printf(out, f'extern const struct ebml_elem_desc {el.structname}_desc;')
|
||||
|
||||
printf(out)
|
||||
printf(out, '#define MAX_EBML_SUBELEMENTS', max(len(el.subelements)
|
||||
|
@ -431,7 +431,8 @@ def parse_one(s, depth, parent, maxlen):
|
|||
this_length = len(elid) / 2 + size + length
|
||||
if elem is not None:
|
||||
if elem.valtype != 'skip':
|
||||
print(" " * depth, '[' + elid + ']', elem.name, 'size:', length, 'value:', end=' ')
|
||||
indent = " " * depth
|
||||
print(f"{indent} [{elid}] {elem.name} size: {length} value:", end=' ')
|
||||
if elem.valtype == 'sub':
|
||||
print('subelements:')
|
||||
while length > 0:
|
||||
|
|
|
@ -42,7 +42,8 @@ def sign_bundle(binary_name):
|
|||
resolved_dir = os.path.join(bundle_path(binary_name), dir)
|
||||
for root, _dirs, files in os.walk(resolved_dir):
|
||||
for f in files:
|
||||
subprocess.run(['codesign', '--force', '-s', '-', os.path.join(root, f)])
|
||||
path = os.path.join(root, f)
|
||||
subprocess.run(['codesign', '--force', '-s', '-', path])
|
||||
subprocess.run(['codesign', '--force', '-s', '-', bundle_path(binary_name)])
|
||||
|
||||
def bundle_version(src_path):
|
||||
|
|
|
@ -70,7 +70,16 @@ def get_event(event, evtype):
|
|||
G.events[event] = e
|
||||
return G.events[event]
|
||||
|
||||
colors = [(0.0, 0.5, 0.0), (0.0, 0.0, 1.0), (0.0, 0.0, 0.0), (1.0, 0.0, 0.0), (0.75, 0.75, 0), (0.0, 0.75, 0.75), (0.75, 0, 0.75)]
|
||||
colors = [
|
||||
(0.0, 0.5, 0.0),
|
||||
(0.0, 0.0, 1.0),
|
||||
(0.0, 0.0, 0.0),
|
||||
(1.0, 0.0, 0.0),
|
||||
(0.75, 0.75, 0),
|
||||
(0.0, 0.75, 0.75),
|
||||
(0.75, 0, 0.75)
|
||||
]
|
||||
|
||||
def mkColor(t):
|
||||
return pg.mkColor(int(t[0] * 255), int(t[1] * 255), int(t[2] * 255))
|
||||
|
||||
|
|
|
@ -7,106 +7,110 @@ import re
|
|||
from typing import Dict, Tuple, Callable, Optional
|
||||
|
||||
def call(cmd) -> str:
|
||||
sys.stdout.flush()
|
||||
ret = subprocess.run(cmd, check=True, stdout=subprocess.PIPE, text=True)
|
||||
return ret.stdout
|
||||
sys.stdout.flush()
|
||||
ret = subprocess.run(cmd, check=True, stdout=subprocess.PIPE, text=True)
|
||||
return ret.stdout
|
||||
|
||||
lint_rules: Dict[str, Tuple[Callable, str]] = {}
|
||||
|
||||
# A lint rule should return True if everything is okay
|
||||
def lint_rule(description: str):
|
||||
def f(func):
|
||||
assert func.__name__ not in lint_rules.keys()
|
||||
lint_rules[func.__name__] = (func, description)
|
||||
return f
|
||||
def f(func):
|
||||
assert func.__name__ not in lint_rules.keys()
|
||||
lint_rules[func.__name__] = (func, description)
|
||||
return f
|
||||
|
||||
def get_commit_range() -> Optional[str]:
|
||||
if len(sys.argv) > 1:
|
||||
return sys.argv[1]
|
||||
# https://github.com/actions/runner/issues/342#issuecomment-590670059
|
||||
event_name = os.environ["GITHUB_EVENT_NAME"]
|
||||
with open(os.environ["GITHUB_EVENT_PATH"], "rb") as f:
|
||||
event = json.load(f)
|
||||
if event_name == "push":
|
||||
if event["created"] or event["forced"]:
|
||||
print("Skipping logic on branch creation or force-push")
|
||||
return None
|
||||
return event["before"] + "..." + event["after"]
|
||||
elif event_name == "pull_request":
|
||||
return event["pull_request"]["base"]["sha"] + ".." + event["pull_request"]["head"]["sha"]
|
||||
return None
|
||||
if len(sys.argv) > 1:
|
||||
return sys.argv[1]
|
||||
# https://github.com/actions/runner/issues/342#issuecomment-590670059
|
||||
event_name = os.environ["GITHUB_EVENT_NAME"]
|
||||
with open(os.environ["GITHUB_EVENT_PATH"], "rb") as f:
|
||||
event = json.load(f)
|
||||
if event_name == "push":
|
||||
if event["created"] or event["forced"]:
|
||||
print("Skipping logic on branch creation or force-push")
|
||||
return None
|
||||
return event["before"] + "..." + event["after"]
|
||||
elif event_name == "pull_request":
|
||||
base = event["pull_request"]["base"]["sha"]
|
||||
head = event["pull_request"]["head"]["sha"]
|
||||
return f"{base}..{head}"
|
||||
return None
|
||||
|
||||
def do_lint(commit_range: str) -> bool:
|
||||
commits = call(["git", "log", "--pretty=format:%H %s", commit_range]).splitlines()
|
||||
print(f"Linting {len(commits)} commit(s):")
|
||||
any_failed = False
|
||||
for commit in commits:
|
||||
sha, _, _ = commit.partition(' ')
|
||||
body = call(["git", "show", "-s", "--format=%B", sha]).splitlines()
|
||||
failed = []
|
||||
if len(body) == 0:
|
||||
failed.append("* Commit message must not be empty")
|
||||
else:
|
||||
for k, v in lint_rules.items():
|
||||
if not v[0](body):
|
||||
failed.append(f"* {v[1]} [{k}]")
|
||||
if failed:
|
||||
any_failed = True
|
||||
print("-" * 40)
|
||||
sys.stdout.flush()
|
||||
subprocess.run(["git", "-P", "show", "-s", sha])
|
||||
print("\nhas the following issues:")
|
||||
print("\n".join(failed))
|
||||
print("-" * 40)
|
||||
return any_failed
|
||||
commits = call(["git", "log", "--pretty=format:%H %s", commit_range]).splitlines()
|
||||
print(f"Linting {len(commits)} commit(s):")
|
||||
any_failed = False
|
||||
for commit in commits:
|
||||
sha, _, _ = commit.partition(' ')
|
||||
body = call(["git", "show", "-s", "--format=%B", sha]).splitlines()
|
||||
failed = []
|
||||
if len(body) == 0:
|
||||
failed.append("* Commit message must not be empty")
|
||||
else:
|
||||
for k, v in lint_rules.items():
|
||||
if not v[0](body):
|
||||
failed.append(f"* {v[1]} [{k}]")
|
||||
if failed:
|
||||
any_failed = True
|
||||
print("-" * 40)
|
||||
sys.stdout.flush()
|
||||
subprocess.run(["git", "-P", "show", "-s", sha])
|
||||
print("\nhas the following issues:")
|
||||
print("\n".join(failed))
|
||||
print("-" * 40)
|
||||
|
||||
################################################################################
|
||||
|
||||
NO_PREFIX_WHITELIST = r"^Revert \"(.*)\"|^Reapply \"(.*)\"|^Release [0-9]|^Update MPV_VERSION$"
|
||||
NO_PREFIX_WHITELIST = \
|
||||
r"^Revert \"(.*)\"|^Reapply \"(.*)\"|^Release [0-9]|^Update MPV_VERSION$"
|
||||
|
||||
@lint_rule("Subject line must contain a prefix identifying the sub system")
|
||||
def subsystem_prefix(body):
|
||||
return (re.search(NO_PREFIX_WHITELIST, body[0]) or
|
||||
re.search(r"^[\w/\.{},-]+: ", body[0]))
|
||||
return (re.search(NO_PREFIX_WHITELIST, body[0]) or
|
||||
re.search(r"^[\w/\.{},-]+: ", body[0]))
|
||||
|
||||
@lint_rule("First word after : must be lower case")
|
||||
def description_lowercase(body):
|
||||
# Allow all caps for acronyms and options with --
|
||||
return (re.search(NO_PREFIX_WHITELIST, body[0]) or
|
||||
re.search(r": (?:[A-Z]{2,} |--[a-z]|[a-z0-9])", body[0]))
|
||||
# Allow all caps for acronyms and options with --
|
||||
return (re.search(NO_PREFIX_WHITELIST, body[0]) or
|
||||
re.search(r": (?:[A-Z]{2,} |--[a-z]|[a-z0-9])", body[0]))
|
||||
|
||||
@lint_rule("Subject line must not end with a full stop")
|
||||
def no_dot(body):
|
||||
return not body[0].rstrip().endswith('.')
|
||||
return not body[0].rstrip().endswith('.')
|
||||
|
||||
@lint_rule("There must be an empty line between subject and extended description")
|
||||
def empty_line(body):
|
||||
return len(body) == 1 or body[1].strip() == ""
|
||||
return len(body) == 1 or body[1].strip() == ""
|
||||
|
||||
# been seeing this one all over github lately, must be the webshits
|
||||
@lint_rule("Do not use 'conventional commits' style")
|
||||
def no_cc(body):
|
||||
return not re.search(r"(?i)^(feat|fix|chore|refactor)[!:(]", body[0])
|
||||
return not re.search(r"(?i)^(feat|fix|chore|refactor)[!:(]", body[0])
|
||||
|
||||
@lint_rule("History must be linear, no merge commits")
|
||||
def no_merge(body):
|
||||
return not body[0].startswith("Merge ")
|
||||
return not body[0].startswith("Merge ")
|
||||
|
||||
@lint_rule("Subject line should be shorter than 72 characters")
|
||||
def line_too_long(body):
|
||||
revert = re.search(r"^Revert \"(.*)\"|^Reapply \"(.*)\"", body[0])
|
||||
return revert or len(body[0]) <= 72
|
||||
revert = re.search(r"^Revert \"(.*)\"|^Reapply \"(.*)\"", body[0])
|
||||
return revert or len(body[0]) <= 72
|
||||
|
||||
@lint_rule("Prefix should not include file extension (use `vo_gpu: ...` not `vo_gpu.c: ...`)")
|
||||
@lint_rule(
|
||||
"Prefix should not include file extension (use `vo_gpu: ...` not `vo_gpu.c: ...`)"
|
||||
)
|
||||
def no_file_exts(body):
|
||||
return not re.search(r"[a-z0-9]\.([chm]|cpp|swift|py): ", body[0])
|
||||
return not re.search(r"[a-z0-9]\.([chm]|cpp|swift|py): ", body[0])
|
||||
|
||||
################################################################################
|
||||
|
||||
if __name__ == "__main__":
|
||||
commit_range = get_commit_range()
|
||||
if commit_range is None:
|
||||
exit(0)
|
||||
print("Commit range:", commit_range)
|
||||
any_failed = do_lint(commit_range)
|
||||
exit(1 if any_failed else 0)
|
||||
commit_range = get_commit_range()
|
||||
if commit_range is None:
|
||||
exit(0)
|
||||
print("Commit range:", commit_range)
|
||||
any_failed = do_lint(commit_range)
|
||||
exit(1 if any_failed else 0)
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
[tool.ruff]
|
||||
line-length = 100
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = [
|
||||
"F", # pyflakes
|
||||
"E", "W", # pycodestyle
|
||||
]
|
Loading…
Reference in New Issue