Qual: Backport pre-commit to 17.0 (#27948)

Backport the pre-commit tool to bring extra checks to the older versions
so that they are fixed earlier before merging in upper versions.

Other PRs will be created for backporting to 18.0 and 19.0.
The PRs should be accepted in reverse order:
- develop;
- 19.0:
- 18.0;
- 17.0.

That should avoid merge conflicts and ensure that options (and ignored
messages or steps) are adapted to the version branch.
This commit is contained in:
MDW 2024-02-03 00:10:49 +01:00 committed by GitHub
parent a237f5dc54
commit ed3bfb152a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 3394 additions and 0 deletions

588
.github/logToCs.py vendored Executable file
View File

@ -0,0 +1,588 @@
#!/usr/bin/env python3
# pylint: disable=invalid-name
"""
Convert a log to CheckStyle format.
Url: https://github.com/mdeweerd/LogToCheckStyle
The log can then be used for generating annotations in a github action.
Note: this script is very young and "quick and dirty".
Patterns can be added to "PATTERNS" to match more messages.
# Examples
Assumes that logToCs.py is available as .github/logToCs.py.
## Example 1:
```yaml
- run: |
pre-commit run -all-files | tee pre-commit.log
.github/logToCs.py pre-commit.log pre-commit.xml
- uses: staabm/annotate-pull-request-from-checkstyle-action@v1
with:
files: pre-commit.xml
notices-as-warnings: true # optional
```
## Example 2:
```yaml
- run: |
pre-commit run --all-files | tee pre-commit.log
- name: Add results to PR
if: ${{ always() }}
run: |
.github/logToCs.py pre-commit.log | cs2pr
```
Author(s):
- https://github.com/mdeweerd
License: MIT License
"""
import argparse
import datetime as dt
import json
import os
import re
import sys
import xml.etree.ElementTree as ET # nosec
def remove_prefix(string, prefix):
"""
Remove prefix from string
Provided for backward compatibility.
"""
if prefix and string.startswith(prefix):
return string[len(prefix) :]
return string
def convert_notices_to_checkstyle(notices, root_path=None):
"""
Convert annotation list to CheckStyle xml string
"""
root = ET.Element("checkstyle")
for fields in notices:
add_error_entry(root, **fields, root_path=root_path)
return ET.tostring(root, encoding="utf_8").decode("utf_8")
def convert_lines_to_notices(lines):
"""
Convert provided message to CheckStyle format.
"""
notices = []
for line in lines:
fields = parse_message(line)
if fields:
notices.append(fields)
return notices
def convert_text_to_notices(text):
"""
Convert provided message to CheckStyle format.
"""
return parse_file(text)
# Initial version for Checkrun from:
# https://github.com/tayfun/flake8-your-pr/blob/50a175cde4dd26a656734c5b64ba1e5bb27151cb/src/main.py#L7C1-L123C36
# MIT Licence
class CheckRun:
"""
Represents the check run
"""
GITHUB_TOKEN = os.environ.get("GITHUB_TOKEN", None)
GITHUB_EVENT_PATH = os.environ.get("GITHUB_EVENT_PATH", None)
URI = "https://api.github.com"
API_VERSION = "2022-11-28"
ACCEPT_HEADER_VALUE = "application/vnd.github+json"
AUTH_HEADER_VALUE = f"Bearer {GITHUB_TOKEN}"
# This is the max annotations Github API accepts in one go.
MAX_ANNOTATIONS = 50
def __init__(self):
"""
Initialise Check Run object with information from checkrun
"""
self.read_event_file()
self.read_meta_data()
def read_event_file(self):
"""
Read the event file to get the event information later.
"""
if self.GITHUB_EVENT_PATH is None:
raise ValueError("Not running in github workflow")
with open(self.GITHUB_EVENT_PATH, encoding="utf_8") as event_file:
self.event = json.loads(event_file.read())
def read_meta_data(self):
"""
Get meta data from event information
"""
self.repo_full_name = self.event["repository"]["full_name"]
pull_request = self.event.get("pull_request")
print("%r", self.event)
if pull_request:
self.head_sha = pull_request["head"]["sha"]
else:
print("%r", self.event)
check_suite = self.event.get("check_suite", None)
if check_suite is not None:
self.head_sha = check_suite["pull_requests"][0]["base"]["sha"]
else:
self.head_sha = None # Can't annotate?
def submit( # pylint: disable=too-many-arguments
self,
notices,
title=None,
summary=None,
text=None,
conclusion=None,
):
"""
Submit annotations to github
See:
https://docs.github.com/en/rest/checks/runs?apiVersion=2022-11-28
#update-a-check-run
:param conclusion: success, failure
"""
# pylint: disable=import-outside-toplevel
import requests # Import here to not impose presence of module
if self.head_sha is None:
return
output = {
"annotations": notices[: CheckRun.MAX_ANNOTATIONS],
}
if title is not None:
output["title"] = title
if summary is not None:
output["summary"] = summary
if text is not None:
output["text"] = text
if conclusion is None:
# action_required, cancelled, failure, neutral, success
# skipped, stale, timed_out
if bool(notices):
conclusion = "failure"
else:
conclusion = "success"
payload = {
"name": "log-to-pr-annotation",
"head_sha": self.head_sha,
"status": "completed", # queued, in_progress, completed
"conclusion": conclusion,
# "started_at": dt.datetime.now(dt.timezone.utc).isoformat(),
"completed_at": dt.datetime.now(dt.timezone.utc).isoformat(),
"output": output,
}
# Create the check-run
response = requests.post(
f"{self.URI}/repos/{self.repo_full_name}/check-runs",
headers={
"Accept": self.ACCEPT_HEADER_VALUE,
"Authorization": self.AUTH_HEADER_VALUE,
"X-GitHub-Api-Version": self.API_VERSION,
},
json=payload,
timeout=30,
)
print(response.content)
response.raise_for_status()
ANY_REGEX = r".*?"
FILE_REGEX = r"\s*(?P<file_name>\S.*?)\s*?"
FILEGROUP_REGEX = r"\s*(?P<file_group>\S.*?)\s*?"
EOL_REGEX = r"[\r\n]"
LINE_REGEX = r"\s*(?P<line>\d+?)\s*?"
COLUMN_REGEX = r"\s*(?P<column>\d+?)\s*?"
SEVERITY_REGEX = r"\s*(?P<severity>error|warning|notice|style|info)\s*?"
MSG_REGEX = r"\s*(?P<message>.+?)\s*?"
MULTILINE_MSG_REGEX = r"\s*(?P<message>(?:.|.[\r\n])+)"
# cpplint confidence index
CONFIDENCE_REGEX = r"\s*\[(?P<confidence>\d+)\]\s*?"
# List of message patterns, add more specific patterns earlier in the list
# Creating patterns by using constants makes them easier to define and read.
PATTERNS = [
# beautysh
# File ftp.sh: error: "esac" before "case" in line 90.
re.compile(
f"^File {FILE_REGEX}:{SEVERITY_REGEX}:"
f" {MSG_REGEX} in line {LINE_REGEX}.$"
),
# beautysh
# File socks4echo.sh: error: indent/outdent mismatch: -2.
re.compile(f"^File {FILE_REGEX}:{SEVERITY_REGEX}: {MSG_REGEX}$"),
# yamllint
# ##[group].pre-commit-config.yaml
# ##[error]97:14 [trailing-spaces] trailing spaces
# ##[endgroup]
re.compile(rf"^##\[group\]{FILEGROUP_REGEX}$"), # Start file group
re.compile(
rf"^##\[{SEVERITY_REGEX}\]{LINE_REGEX}:{COLUMN_REGEX}{MSG_REGEX}$"
), # Msg
re.compile(r"^##(?P<file_endgroup>\[endgroup\])$"), # End file group
# File socks4echo.sh: error: indent/outdent mismatch: -2.
re.compile(f"^File {FILE_REGEX}:{SEVERITY_REGEX}: {MSG_REGEX}$"),
# Emacs style
# path/to/file:845:5: error - Expected 1 space after closing brace
re.compile(
rf"^{FILE_REGEX}:{LINE_REGEX}:{COLUMN_REGEX}:{SEVERITY_REGEX}"
rf"-?\s{MSG_REGEX}$"
),
# ESLint (JavaScript Linter), RoboCop, shellcheck
# path/to/file.js:10:2: Some linting issue
# path/to/file.rb:10:5: Style/Indentation: Incorrect indentation detected
# path/to/script.sh:10:1: SC2034: Some shell script issue
re.compile(f"^{FILE_REGEX}:{LINE_REGEX}:{COLUMN_REGEX}: {MSG_REGEX}$"),
# Cpplint default output:
# '%s:%s: %s [%s] [%d]\n'
# % (filename, linenum, message, category, confidence)
re.compile(f"^{FILE_REGEX}:{LINE_REGEX}:{MSG_REGEX}{CONFIDENCE_REGEX}$"),
# MSVC
# file.cpp(10): error C1234: Some error message
re.compile(
f"^{FILE_REGEX}\\({LINE_REGEX}\\):{SEVERITY_REGEX}{MSG_REGEX}$"
),
# Java compiler
# File.java:10: error: Some error message
re.compile(f"^{FILE_REGEX}:{LINE_REGEX}:{SEVERITY_REGEX}:{MSG_REGEX}$"),
# Python
# File ".../logToCs.py", line 90 (note: code line follows)
re.compile(f'^File "{FILE_REGEX}", line {LINE_REGEX}$'),
# Pylint, others
# path/to/file.py:10: [C0111] Missing docstring
# others
re.compile(f"^{FILE_REGEX}:{LINE_REGEX}: {MSG_REGEX}$"),
# Shellcheck:
# In script.sh line 76:
re.compile(
f"^In {FILE_REGEX} line {LINE_REGEX}:{EOL_REGEX}?"
f"({MULTILINE_MSG_REGEX})?{EOL_REGEX}{EOL_REGEX}"
),
# eslint:
# /path/to/filename
# 14:5 error Unexpected trailing comma comma-dangle
re.compile(
f"^{FILE_REGEX}{EOL_REGEX}"
rf"\s+{LINE_REGEX}:{COLUMN_REGEX}\s+{SEVERITY_REGEX}\s+{MSG_REGEX}$"
),
]
# Exceptionnaly some regexes match messages that are not error.
# This pattern matches those exceptions
EXCLUDE_MSG_PATTERN = re.compile(
r"^("
r"Placeholder pattern" # To remove on first message pattern
r")"
)
# Exceptionnaly some regexes match messages that are not error.
# This pattern matches those exceptions
EXCLUDE_FILE_PATTERN = re.compile(
r"^("
# Codespell: (appears as a file name):
r"Used config files\b"
r")"
)
# Severities available in CodeSniffer report format
SEVERITY_NOTICE = "notice"
SEVERITY_WARNING = "warning"
SEVERITY_ERROR = "error"
def strip_ansi(text: str):
"""
Strip ANSI escape sequences from string (colors, etc)
"""
return re.sub(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])", "", text)
def parse_file(text):
"""
Parse all messages in a file
Returns the fields in a dict.
"""
# pylint: disable=too-many-branches,too-many-statements
# regex required to allow same group names
try:
import regex # pylint: disable=import-outside-toplevel
except ImportError as exc:
raise ImportError(
"The 'parsefile' method requires 'python -m pip install regex'"
) from exc
patterns = [pattern.pattern for pattern in PATTERNS]
# patterns = [PATTERNS[0].pattern]
file_group = None # The file name for the group (if any)
full_regex = "(?:(?:" + (")|(?:".join(patterns)) + "))"
results = []
for fields in regex.finditer(
full_regex, strip_ansi(text), regex.MULTILINE | regex.IGNORECASE
):
if not fields:
continue
result = fields.groupdict()
if len(result) == 0:
continue
severity = result.get("severity", None)
file_name = result.get("file_name", None)
confidence = result.pop("confidence", None)
new_file_group = result.pop("file_group", None)
file_endgroup = result.pop("file_endgroup", None)
message = result.get("message", None)
if new_file_group is not None:
# Start of file_group, just store file
file_group = new_file_group
continue
if file_endgroup is not None:
file_group = None
continue
if file_name is None:
if file_group is not None:
file_name = file_group
result["file_name"] = file_name
else:
# No filename, skip
continue
else:
if EXCLUDE_FILE_PATTERN.search(file_name):
# This file_name is excluded
continue
if message is not None:
if EXCLUDE_MSG_PATTERN.search(message):
# This message is excluded
continue
if confidence is not None:
# Convert confidence level of cpplint
# to warning, etc.
confidence = int(confidence)
if confidence <= 1:
severity = SEVERITY_NOTICE
elif confidence >= 5:
severity = SEVERITY_ERROR
else:
severity = SEVERITY_WARNING
if severity is None:
severity = SEVERITY_ERROR
else:
severity = severity.lower()
if severity in ["info", "style"]:
severity = SEVERITY_NOTICE
result["severity"] = severity
results.append(result)
return results
def parse_message(message):
"""
Parse message until it matches a pattern.
Returns the fields in a dict.
"""
for pattern in PATTERNS:
fields = pattern.match(message, re.IGNORECASE)
if not fields:
continue
result = fields.groupdict()
if len(result) == 0:
continue
if "confidence" in result:
# Convert confidence level of cpplint
# to warning, etc.
confidence = int(result["confidence"])
del result["confidence"]
if confidence <= 1:
severity = SEVERITY_NOTICE
elif confidence >= 5:
severity = SEVERITY_ERROR
else:
severity = SEVERITY_WARNING
result["severity"] = severity
if "severity" not in result:
result["severity"] = SEVERITY_ERROR
else:
result["severity"] = result["severity"].lower()
if result["severity"] in ["info", "style"]:
result["severity"] = SEVERITY_NOTICE
return result
# Nothing matched
return None
def add_error_entry( # pylint: disable=too-many-arguments
root,
severity,
file_name,
line=None,
column=None,
message=None,
source=None,
root_path=None,
):
"""
Add error information to the CheckStyle output being created.
"""
file_element = find_or_create_file_element(
root, file_name, root_path=root_path
)
error_element = ET.SubElement(file_element, "error")
error_element.set("severity", severity)
if line:
error_element.set("line", line)
if column:
error_element.set("column", column)
if message:
error_element.set("message", message)
if source:
# To verify if this is a valid attribute
error_element.set("source", source)
def find_or_create_file_element(root, file_name: str, root_path=None):
"""
Find/create file element in XML document tree.
"""
if root_path is not None:
file_name = remove_prefix(file_name, root_path)
for file_element in root.findall("file"):
if file_element.get("name") == file_name:
return file_element
file_element = ET.SubElement(root, "file")
file_element.set("name", file_name)
return file_element
def main():
"""
Parse the script arguments and get the conversion done.
"""
parser = argparse.ArgumentParser(
description="Convert messages to Checkstyle XML format."
)
parser.add_argument(
"input", help="Input file. Use '-' for stdin.", nargs="?", default="-"
)
parser.add_argument(
"output",
help="Output file. Use '-' for stdout.",
nargs="?",
default="-",
)
parser.add_argument(
"-i",
"--in",
dest="input_named",
help="Input filename. Overrides positional input.",
)
parser.add_argument(
"-o",
"--out",
dest="output_named",
help="Output filename. Overrides positional output.",
)
parser.add_argument(
"--root",
metavar="ROOT_PATH",
help="Root directory to remove from file paths."
" Defaults to working directory.",
default=os.getcwd(),
)
parser.add_argument(
"--github-annotate",
action=argparse.BooleanOptionalAction,
help="Annotate when in Github workflow.",
# Currently disabled,
# Future: (os.environ.get("GITHUB_EVENT_PATH", None) is not None),
default=False,
)
args = parser.parse_args()
if args.input == "-" and args.input_named:
with open(
args.input_named, encoding="utf_8", errors="surrogateescape"
) as input_file:
text = input_file.read()
elif args.input != "-":
with open(
args.input, encoding="utf_8", errors="surrogateescape"
) as input_file:
text = input_file.read()
else:
text = sys.stdin.read()
root_path = os.path.join(args.root, "")
try:
notices = convert_text_to_notices(text)
except ImportError:
notices = convert_lines_to_notices(re.split(r"[\r\n]+", text))
checkstyle_xml = convert_notices_to_checkstyle(
notices, root_path=root_path
)
if args.output == "-" and args.output_named:
with open(args.output_named, "w", encoding="utf_8") as output_file:
output_file.write(checkstyle_xml)
elif args.output != "-":
with open(args.output, "w", encoding="utf_8") as output_file:
output_file.write(checkstyle_xml)
else:
print(checkstyle_xml)
if args.github_annotate:
checkrun = CheckRun()
checkrun.submit(notices)
if __name__ == "__main__":
main()

111
.github/workflows/pre-commit.yml vendored Normal file
View File

@ -0,0 +1,111 @@
---
name: pre-commit
on:
pull_request:
push:
jobs:
pre-commit:
runs-on: ubuntu-latest
env:
LOG_TO_CS: .github/logToCs.py
RAW_LOG: pre-commit.log
CS_XML: pre-commit.xml
steps:
- name: Install required tools
run: sudo apt-get update && sudo apt-get install cppcheck
if: false
# The next uses the git API because there is no clone yet.
# This is faster for a big repo.
- name: Get all changed php files (if PR)
id: changed-php
uses: tj-actions/changed-files@v42
if: github.event_name == 'pull_request'
with:
files: |
**.php
# Checkout git sources to analyze
- uses: actions/checkout@v4
# Action setup-python needs a requirements.txt or pyproject.toml
# This ensures one of them exists.
- name: Create requirements.txt if no requirements.txt or pyproject.toml
run: |-
[ -r requirements.txt ] || [ -r pyproject.toml ] || touch requirements.txt
# Install python and pre-commit tool
- uses: actions/setup-python@v5
with:
cache: pip
python-version: '3.11'
- run: python -m pip install pre-commit regex
# Restore previous cache of precommit
- uses: actions/cache/restore@v4
with:
path: ~/.cache/pre-commit/
key: pre-commit-4|${{ env.pythonLocation }}|${{ hashFiles('.pre-commit-config.yaml') }}
# Run all the precommit tools (defined into pre-commit-config.yaml).
# We can force exclusion of some of them here.
- name: Run pre-commit hooks
env:
# SKIP is used by pre-commit to not execute certain hooks
SKIP: no-commit-to-branch,php-cs,php-cbf,trailing-whitespace,end-of-file-fixer,check-json,check-executables-have-shebangs,check-shebang-scripts-are-executable,beautysh,yamllint,shellcheck
run: |
set -o pipefail
pre-commit gc
pre-commit run --show-diff-on-failure --color=always --all-files | tee ${RAW_LOG}
# The next uses git, which is slow for a bit repo.
# - name: Get all changed php files (if PR)
# id: changed-php
# uses: tj-actions/changed-files@v42
# if: github.event_name == 'pull_request'
# with:
# files: |
# **.php
- name: Setup PHPCS
uses: shivammathur/setup-php@v2
if: steps.changed-php.outputs.any_changed == 'true'
with:
php-version: 8.1
coverage: none # disable xdebug, pcov
tools: phpcs
- name: Run some pre-commit hooks on selected changed files only
if: steps.changed-php.outputs.any_changed == 'true'
env:
ALL_CHANGED_FILES: ${{ steps.changed-php.outputs.all_changed_files }}
run: |
set -o pipefail
pre-commit run php-cs --files ${ALL_CHANGED_FILES} | tee -a ${RAW_LOG}
# If error, we convert log in the checkstyle format
- name: Convert Raw Log to CheckStyle format
if: ${{ failure() }}
run: |
python ${LOG_TO_CS} ${RAW_LOG} ${CS_XML}
# Annotate the git sources with the log messages
- name: Annotate Source Code with Messages
uses: staabm/annotate-pull-request-from-checkstyle-action@v1
if: ${{ failure() }}
with:
files: ${{ env.CS_XML }}
notices-as-warnings: true # optional
prepend-filename: true # optional
# Save the precommit cache
- uses: actions/cache/save@v4
if: ${{ ! cancelled() }}
with:
path: ~/.cache/pre-commit/
key: pre-commit-4|${{ env.pythonLocation }}|${{ hashFiles('.pre-commit-config.yaml')
}}
# Upload result log files of precommit into the Artifact shared store
- name: Provide log as artifact
uses: actions/upload-artifact@v4
if: ${{ ! cancelled() }}
with:
name: precommit-logs
path: |
${{ env.RAW_LOG }}
${{ env.CS_XML }}
retention-days: 2

151
.pre-commit-config.yaml Normal file
View File

@ -0,0 +1,151 @@
---
exclude: (?x)^( htdocs/includes/ckeditor/.* )
repos:
# Several miscellaneous checks and fix (on yaml files, end of files fix)
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
hooks:
- id: no-commit-to-branch
args: [--branch, develop, --pattern, \d+.0]
- id: check-yaml
args: [--unsafe]
- id: check-json
- id: mixed-line-ending
exclude: (?x)^(htdocs/includes/tecnickcom/tcpdf/fonts/.*)$
- id: trailing-whitespace
exclude_types: [markdown]
- id: end-of-file-fixer
- id: check-merge-conflict
- id: check-executables-have-shebangs
- id: check-shebang-scripts-are-executable
exclude: (?x)^( dev/tools/dolibarr-postgres2mysql.php |test/other/test_serialize.php
|test/phpunit/textutf8.txt |test/phpunit/textiso.txt |htdocs/includes/.*
|htdocs/modulebuilder/template/.* |build/debian/dolibarr.postrm |build/debian/dolibarr.postinst
|build/debian/dolibarr.config )$
- id: fix-byte-order-marker
- id: check-case-conflict
# Beautify shell scripts
- repo: https://github.com/lovesegfault/beautysh.git
rev: v6.2.1
hooks:
- id: beautysh
exclude: (?x)^(dev/setup/git/hooks/pre-commit)$
args: [--tab]
# Run local script
#
# For instance to update the license in edited files, you could add to local.sh:
#
# ```shell
# #!/bin/bash
# MYDIR=$(dirname "$0")
# CHANGED_INTERNALS=$(git diff --name-only | grep -v includes)
# "$MYDIR/dev/tools/updatelicense.php" $CHANGED_INTERNALS
# ```
- repo: local
hooks:
- id: local-precommit-script
name: Run local script before commit if it exists
language: system
entry: bash -c '[ ! -x local.sh ] || ./local.sh'
pass_filenames: false
# Check PHP syntax
- repo: https://github.com/mdeweerd/pre-commit-php
rev: v1.6.3
hooks:
- id: php-cbf
files: \.(php)$
args: [--standard=dev/setup/codesniffer/ruleset.xml]
- id: php-cs
files: \.(php)$
args: [--standard=dev/setup/codesniffer/ruleset.xml, --report=emacs]
- id: php-lint
- id: php-stan
stages: [manual]
files: \.(php)$
# Prettier (format code, only for non common files)
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v3.0.3
hooks:
- id: prettier
stages: [manual]
exclude: (?x)^( .*\.(phar |min\.css |lock) |htdocs/(includes|theme/common)/.*
)$
exclude_types:
- php
- executable
- binary
- shell
- javascript
- markdown
- html
- less
- plain-text
- scss
- css
- yaml
# Check format of yaml files
- repo: https://github.com/adrienverge/yamllint.git
rev: v1.33.0
hooks:
- id: yamllint
args:
- --no-warnings
- -d
- '{extends: relaxed, rules: {line-length: {max: 120}}}'
# Execute codespell to fix typo errors (setup of codespell into dev/tools/codespell/)
- repo: https://github.com/codespell-project/codespell
rev: v2.2.6
hooks:
- id: codespell
# Due to a current limitation of configuration files,
# we can specify two dicts only on the CLI.
# You can update the contents of the exclude-file codespell-lines-ignore with the script
# dev/tools/codespell/addCodespellIgnores.sh
args:
- -D
- '-'
- -D
- dev/tools/codespell/codespell-dict.txt
- -I
- dev/tools/codespell/codespell-ignore.txt
- -x
- dev/tools/codespell/codespell-lines-ignore.txt
- --uri-ignore-words-list
- ned
exclude_types: [image]
exclude: (?x)^(.phan/stubs/.*)$
additional_dependencies: [tomli]
- alias: codespell-lang-en_US
# Only for translations with specialised exceptions
# -D contains predefined conversion dictionaries
# -L is to ignore some words
id: codespell
files: ^htdocs/langs/en_US/.*$
args:
- -D
- '-'
- -D
- dev/tools/codespell/codespell-dict.txt
- -L
- informations,medias,uptodate,reenable,crypted,developpers
- -L
- creat,unitl,alltime,datas,referers
- -I
- dev/tools/codespell/codespell-ignore.txt
- -x
- dev/tools/codespell/codespell-lines-ignore.txt
- --uri-ignore-words-list
- ned
# Check some shell scripts
- repo: https://github.com/shellcheck-py/shellcheck-py
rev: v0.9.0.6
hooks:
- id: shellcheck
args: [-W, '100']

View File

@ -0,0 +1,68 @@
#!/bin/bash
# Copyright (C) 2024 MDW <mdeweerd@users.noreply.github.com>
#
# Script to add codespell exceptions to the ignores lines file.
#
# The file is named '...-lines-ignore' to make TAB expansion on the cli easier.
#
# The line in the ignore file must match the line in the source
# exactly.
#
# To clean up or create the ignored lines file, just do
# ```shell
# echo > dev/tools/codespell/codespell-lines-ignore.txt
# ```
# and then execute this script.
#
# author: https://github.com/mdeweerd
#
# :warning:
#
# This script only works properly if codespell is installed for your CLI.
# As the configuration is in pyproject.toml, you also need tomli.
#
# ```shell
# python -m pip install codespell tomli
# # or
# pip install codespell tomli
# ```
codespell_ignore_file=dev/tools/codespell/codespell-lines-ignore.txt
if [ -z "${0##*.sh}" ] ; then
# Suppose running from inside script
# Get real path
script=$(realpath "$(test -L "$0" && readlink "$0" || echo "$0")")
PROJECT_ROOT=$(realpath "${script}")
while [ "${PROJECT_ROOT}" != "/" ] ; do
[ -r "${PROJECT_ROOT}/${codespell_ignore_file}" ] && break
PROJECT_ROOT=$(dirname "${PROJECT_ROOT}")
done
if [ "${PROJECT_ROOT}" == "/" ] ; then
echo "Project root not found from '${script}'"
exit 1
fi
codespell_ignore_file=${PROJECT_ROOT}/${codespell_ignore_file}
fi
# Make sure we are at the root of the project
[ -r "${codespell_ignore_file}" ] || { echo "${codespell_ignore_file} not found" ; exit 1 ; }
# Then:
# - Run codespell;
# - Identify files that have fixes;
# - Limit to files under git control;
# - Run codespell on selected files;
# - For each line, create a grep command to find the lines;
# - Execute that command by evaluation
codespell . \
| sed -n -E 's@^([^:]+):.*@\1@p' \
| xargs -r git ls-files -- \
| xargs -r codespell -- \
| sed -n -E 's@^([^:]+):[[:digit:]]+:[[:space:]](\S+)[[:space:]].*@grep -P '\''\\b\2\\b'\'' -- "\1" >> '"${codespell_ignore_file}"'@p' \
| while read -r line ; do eval "$line" ; done
# Finally, sort and remove duplicates to make merges easier.
sort -u -o "${codespell_ignore_file}"{,}

View File

@ -0,0 +1,21 @@
# Please add in alphabetical order->(`sort -u` like).
EPR->ERP
alpah->alpha
alphanothtml->alphanohtml
alpahnothtml->alphanohtml
aplha->alpha
aplhanothtml->alphanohtml
aploha->alpha
aplohanothtml->alphanohtml
aplphanothtml->alphanohtml
choosed->chosen
dolibar->dolibarr
dollibar->dolibarr
dollibarr->dolibarr
# fiche->card
mot de passe->password
not de passe->password
nothtml->nohtml
tableau de bord->state board
tagret->target
thridparty->thirdparty

View File

@ -0,0 +1,91 @@
# List of words codespell will ignore
# one per line, case-sensitive (when not lowercase)
# PROVid
provid
# PostgreSQL
postgresql
alltime
ba
blacklist
whitelist
bu
captial
categorie
categories
crypted
clos
contaxt
courant
datea
datee
errorstring
exten
falsy
master
medias
noe
NOO
noo
od
nd
udate
periode
projet
referer
referers
scrit
ser
slave
savvy
# Inside email
suport
te
technic
thead
udo
ue
ro
ws
# Code string
ect
tempdate
# checkES
checkes
sav
files'
# Used as array ke
seeked
# Used as translation key
developpers
# Used as var
pice
# Used as key
marge
# htdocs/projet/activity/permonth.php
tweek
# moral (var name)
mor
# reyear, remonth, reday
reday
# Strings used as keys for translation
uptodate
reenable
# Function - rename to devalidate ?
unvalidate
# Some french strings
somme
caracteres
cas
sur
Datas
datas
valide
raison
que
dur
fonction
espace
methode
# Proper names
tim

File diff suppressed because it is too large Load Diff

50
pyproject.toml Normal file
View File

@ -0,0 +1,50 @@
[build-system]
requires = ["setuptools>=61.2"]
build-backend = "setuptools.build_meta"
[tool.codespell]
# The configuration must be kept here to ensure that
# `codespell` can be run as a standalone program from the CLI
# with the appropriate default options.
skip = "*/langs/*,*/build/exe/*,**.log,*.pdf,*dev/resources/*,*.phar,*.z,*.gz,*.sql,*.svg,*htdocs/includes/*,*/textiso.txt,*.js,*README-*,*build/rpm/*spec,*build/pad/*ml,*htdocs/includes/phpoffice/*,*htdocs/includes/tecnickcom/*,*dev/initdemo/removeconfdemo.sh,*dev/tools/codespell/*,*pyproject.toml,*build/exe/*,*fontawe*,*htdocs/theme/*/flags-sprite.inc.php,*dev/setup/codetemplates/codetemplates.xml,*/php.ini,*/html_cerfafr.*,*/lessc.class.php,*.asciidoc,*.xml,*opensurvey/css/style.css"
quiet-level=2
ignore-regex = '\\[fnrstv]'
builtin = "clear,rare,informal,usage,code,names"
ignore-words = "dev/tools/codespell/codespell-ignore.txt"
exclude-file = "dev/tools/codespell/codespell-lines-ignore.txt"
uri-ignore-words-list="ned"
# For future reference: it is not currently possible to specify
# the standard dictionnary and the custom dictionnary in the configuration
# file
# D = "-"
# dictionary = "dev/tools/codespell/codespell-dict.txt"
[tool.setuptools]
include-package-data = false
# pyproject.toml
[tool.yamlfix]
# allow_duplicate_keys = true
line_length = 80
# none_representation = "null"
# comments_min_spaces_from_content = 2
# comments_require_starting_space = true
# whitelines = 0
# comment_whitelines = 0
# section_whitelines = 0
# explicit_start = true
# sequence_style = keep_style # flow_style, block_style, keep_style
# indent_mapping = 2
# indent_offset = 2
# indent_sequence = 4
# none_representation = ""
# quote_basic_values = false
# YAMLFIX_quote_keys_and_basic_values = false
# uote_representation = false
# preserve_quotes = false