From 96d81c572fd223df078bea92bed6a1793985a2cc Mon Sep 17 00:00:00 2001 From: Luca Cigarini Date: Tue, 25 Mar 2025 18:24:38 +0100 Subject: [PATCH 01/15] Last commit of the day --- .github/actions/apt_requirements/action.yml | 3 ++ .../misc/compute_files_hash/action.yml | 12 ++++--- .../restore_virtualenv/action.yml | 2 +- .../save_virtualenv/action.yml | 2 +- .../python_linters/requirements-linters.txt | 3 ++ actions/apt_requirements/README.md | 31 +++++++++++++++++++ actions/apt_requirements/action.yml | 3 ++ actions/misc/compute_files_hash/README.md | 18 +++++++++++ actions/misc/compute_files_hash/action.yml | 12 ++++--- .../create_dev_requirements_file/README.md | 11 +++++++ .../create_docs_requirements_file/README.md | 12 +++++++ .../restore_virtualenv/action.yml | 2 +- .../save_virtualenv/action.yml | 2 +- .../python_linters/requirements-linters.txt | 3 ++ 14 files changed, 102 insertions(+), 14 deletions(-) create mode 100644 actions/apt_requirements/README.md create mode 100644 actions/misc/compute_files_hash/README.md create mode 100644 actions/python_requirements/create_dev_requirements_file/README.md create mode 100644 actions/python_requirements/create_docs_requirements_file/README.md diff --git a/.github/actions/apt_requirements/action.yml b/.github/actions/apt_requirements/action.yml index 7134837..70a3a98 100644 --- a/.github/actions/apt_requirements/action.yml +++ b/.github/actions/apt_requirements/action.yml @@ -5,6 +5,9 @@ inputs: description: Requirements file required: true +# TODO scomporre questa action in due: +# - save apt cache +# - restore apt cache runs: using: "composite" steps: diff --git a/.github/actions/misc/compute_files_hash/action.yml b/.github/actions/misc/compute_files_hash/action.yml index 2c3b905..fca2a53 100644 --- a/.github/actions/misc/compute_files_hash/action.yml +++ b/.github/actions/misc/compute_files_hash/action.yml @@ -2,8 +2,8 @@ name: Composite action compute files hash description: Composite action to compute a single hash of one or more files inputs: file_paths: - description: Comma separeted list of files. - required: false + description: Space separeted list of files for which a single SHA256 hash will be computed. + required: true outputs: computed_hash: @@ -16,11 +16,13 @@ runs: - name: Compute files SHA256 hash id: compute_files_sha256_hash run: | + if [[ -z '${{ inputs.file_paths }}' ]]; then + echo "::error::file_paths cannot be empty!" + exit 1 + fi JOINED_FILES="cat " # Create a bash array of file paths - IFS=',' read -r -a files <<< "${{ inputs.file_paths }}" - echo "::debug::File paths array is composed by: ${files[@]}" - for file in ${files[@]}; + for file in ${{ inputs.file_paths }}; do if [[ -f $file ]]; then # Concat file path to cat command diff --git a/.github/actions/python_requirements/restore_virtualenv/action.yml b/.github/actions/python_requirements/restore_virtualenv/action.yml index 1798a87..bfde805 100644 --- a/.github/actions/python_requirements/restore_virtualenv/action.yml +++ b/.github/actions/python_requirements/restore_virtualenv/action.yml @@ -32,7 +32,7 @@ runs: uses: actions/cache/restore@v4 with: path: ${{ inputs.virtual_environment_path }} - key: ${{ inputs.git_reference }}-${{ steps.compute_requirements_files_sha256_hash.outputs.computed_hash }} + key: ${{ inputs.git_reference }}-venv-${{ steps.compute_requirements_files_sha256_hash.outputs.computed_hash }} - name: Activate restored virtual environment if: > diff --git a/.github/actions/python_requirements/save_virtualenv/action.yml b/.github/actions/python_requirements/save_virtualenv/action.yml index b93d66c..151fae8 100644 --- a/.github/actions/python_requirements/save_virtualenv/action.yml +++ b/.github/actions/python_requirements/save_virtualenv/action.yml @@ -26,4 +26,4 @@ runs: uses: actions/cache/save@v4 with: path: ${{ inputs.virtual_environment_path }} - key: ${{ inputs.git_reference }}-${{ steps.compute_requirements_files_sha256_hash.outputs.computed_hash }} \ No newline at end of file + key: ${{ inputs.git_reference }}-venv-${{ steps.compute_requirements_files_sha256_hash.outputs.computed_hash }} \ No newline at end of file diff --git a/.github/configurations/python_linters/requirements-linters.txt b/.github/configurations/python_linters/requirements-linters.txt index 1f6b203..d8e8a4c 100644 --- a/.github/configurations/python_linters/requirements-linters.txt +++ b/.github/configurations/python_linters/requirements-linters.txt @@ -1,6 +1,9 @@ autoflake~=2.3.1 bandit~=1.8.3 black~=25.1.0 +# use fork since main repo is not updated +# see https://github.com/rocioar/flake8-django/pull/134 +# Note: python 3.12 is not supported flake8-django @ git+https://github.com/terencehonles/flake8-django.git@a6e369e89d275dfd5514f2aa9d091aa36c5ff84b flake8~=7.1.2 isort~=6.0.1 diff --git a/actions/apt_requirements/README.md b/actions/apt_requirements/README.md new file mode 100644 index 0000000..841f871 --- /dev/null +++ b/actions/apt_requirements/README.md @@ -0,0 +1,31 @@ +# Composite action install APT requirements + +This action install possible APT requirements defined in a standalone file. +To speed up this process, and hence to avoid re-downloading the requirements each time even though nothing has changed GitHub cache has been exploited. + +First, a SHA256 hash of the APT requirements file is produced. +The aforementioned hash is then used to (respectively) query the caching system on GitHub for: + +* The git reference of the Pull Request (e.g. `941/merge`). +* The target branch reference (e.g. `develop`). + +If requirements `.deb` files were previously cached, so in case of a **cache hit**, they will be restored to the correct APT cache directory (i.e. `/var/cache/apt/archives`). Doing this allow us to skip: + +* Refreshing the repositories package lists (i.e. `apt update` command) +* Downloading the required packages and their dependencies. + +After querying the GitHub cache, `apt install` command is run with the space separated list of APT requirements to install all dependencies. + +Finally, if both cache queries resulted in a miss, APT requirements are cached using `-`. + +NOTE: + +As you can see, here two restore + +TODO finire documentazione + +## Documentation + +### Inputs + +* `requirements_file` - Mandatory - Path to the APT requirements file. This file must contain a list of packages to install, one per line. diff --git a/actions/apt_requirements/action.yml b/actions/apt_requirements/action.yml index 7134837..70a3a98 100644 --- a/actions/apt_requirements/action.yml +++ b/actions/apt_requirements/action.yml @@ -5,6 +5,9 @@ inputs: description: Requirements file required: true +# TODO scomporre questa action in due: +# - save apt cache +# - restore apt cache runs: using: "composite" steps: diff --git a/actions/misc/compute_files_hash/README.md b/actions/misc/compute_files_hash/README.md new file mode 100644 index 0000000..f1c594f --- /dev/null +++ b/actions/misc/compute_files_hash/README.md @@ -0,0 +1,18 @@ +# Composite action compute files hash + +This action computes a single SHA256 hash of one or more files. +Given a **space separated list of file paths**, a new file is created by concatenating all those files together. Then the SHA256 hash of the newly created file is computed and returned as the output. + +Before being joined together, each file is tested to ensure that it **exists** and that it is **a regular file**. + +This action is useful when saving/restoring a cache in which a unique key is required. As a matter of fact, the hash is used as a part of the hash key. + +## Documentation + +### Inputs + +* `file_paths` - Mandatory - Space separated list of file paths for which a single SHA256 hash will be computed. + +### Outputs + +* `computed_hash` - A SHA256 hash of the file obtained by joining (concatenating) all input files together. diff --git a/actions/misc/compute_files_hash/action.yml b/actions/misc/compute_files_hash/action.yml index 2c3b905..fca2a53 100644 --- a/actions/misc/compute_files_hash/action.yml +++ b/actions/misc/compute_files_hash/action.yml @@ -2,8 +2,8 @@ name: Composite action compute files hash description: Composite action to compute a single hash of one or more files inputs: file_paths: - description: Comma separeted list of files. - required: false + description: Space separeted list of files for which a single SHA256 hash will be computed. + required: true outputs: computed_hash: @@ -16,11 +16,13 @@ runs: - name: Compute files SHA256 hash id: compute_files_sha256_hash run: | + if [[ -z '${{ inputs.file_paths }}' ]]; then + echo "::error::file_paths cannot be empty!" + exit 1 + fi JOINED_FILES="cat " # Create a bash array of file paths - IFS=',' read -r -a files <<< "${{ inputs.file_paths }}" - echo "::debug::File paths array is composed by: ${files[@]}" - for file in ${files[@]}; + for file in ${{ inputs.file_paths }}; do if [[ -f $file ]]; then # Concat file path to cat command diff --git a/actions/python_requirements/create_dev_requirements_file/README.md b/actions/python_requirements/create_dev_requirements_file/README.md new file mode 100644 index 0000000..6749dad --- /dev/null +++ b/actions/python_requirements/create_dev_requirements_file/README.md @@ -0,0 +1,11 @@ +# Composite action create Python dev requirements file + +This is action creates the `requirements-dev.txt` file. This is a Python requirements file that will contain all **development dependencies**. + +## Documentation + +### Inputs + +* `install_from` - Optional - The path used as working directory when creating the `requirements-dev.txt` file. It defaults to the current directory (i.e. `.`). +* `project_dev_requirements_file` - Optional - The path of a project `requirements-dev.txt`. This was designed in case development requirements other than coverage are required. If specified, the dependencies in the project `requirements-dev.txt` will be appended in the newly created `requirements-dev.txt`. **Be careful: if a relative path is used this will depend on `install_from`.** Defaults to empty strings, and hence **no custom `requirements-dev.txt`**. +* `use_coverage` - Optional - Whether to use coverage or not. diff --git a/actions/python_requirements/create_docs_requirements_file/README.md b/actions/python_requirements/create_docs_requirements_file/README.md new file mode 100644 index 0000000..34bae77 --- /dev/null +++ b/actions/python_requirements/create_docs_requirements_file/README.md @@ -0,0 +1,12 @@ +# Composite action create Python docs requirements file + +This is action creates the `requirements-docs.txt` file. This is a Python requirements file that will contain all **dependencies required to build the documentation**. + +## Documentation + +### Inputs + +* `install_from` - Optional - The path used as working directory when creating the `requirements-docs.txt` file. It defaults to the current directory (i.e. `.`). +* `project_docs_requirements_file` - Optional - The path of a project `requirements-docs.txt`. This was designed in case requirements to build documentation other than rstcheck, sphinx, sphinx_rtd_theme, sphinxcontrib-spelling and sphinxcontrib-django2 are required. If specified, the dependencies in the project `requirements-docs.txt` will be appended in the newly created `requirements-docs.txt`. **Be careful: if a relative path is used this will depend on `install_from`.** Defaults to empty strings, and hence **no custom `requirements-docs.txt`**. +* `django_settings_module`: - Optional - Path to the Django settings file. It's used to make GitHub action aware of Django presence. In this case, `sphinxcontrib-django2` is also added to the newly created requirement file. **Be careful: if a relative path is used this will depend on `install_from`.** Defaults to empty strings, and hence **no Django settings file**. +* `check_docs_directory` - Optional - Path that will be used by rstcheck to check documentation. **Be careful: if a relative path is used this will depend on `install_from`.** Defaults to empty strings, and hence **documentation won't be checked**. diff --git a/actions/python_requirements/restore_virtualenv/action.yml b/actions/python_requirements/restore_virtualenv/action.yml index 1798a87..bfde805 100644 --- a/actions/python_requirements/restore_virtualenv/action.yml +++ b/actions/python_requirements/restore_virtualenv/action.yml @@ -32,7 +32,7 @@ runs: uses: actions/cache/restore@v4 with: path: ${{ inputs.virtual_environment_path }} - key: ${{ inputs.git_reference }}-${{ steps.compute_requirements_files_sha256_hash.outputs.computed_hash }} + key: ${{ inputs.git_reference }}-venv-${{ steps.compute_requirements_files_sha256_hash.outputs.computed_hash }} - name: Activate restored virtual environment if: > diff --git a/actions/python_requirements/save_virtualenv/action.yml b/actions/python_requirements/save_virtualenv/action.yml index b93d66c..151fae8 100644 --- a/actions/python_requirements/save_virtualenv/action.yml +++ b/actions/python_requirements/save_virtualenv/action.yml @@ -26,4 +26,4 @@ runs: uses: actions/cache/save@v4 with: path: ${{ inputs.virtual_environment_path }} - key: ${{ inputs.git_reference }}-${{ steps.compute_requirements_files_sha256_hash.outputs.computed_hash }} \ No newline at end of file + key: ${{ inputs.git_reference }}-venv-${{ steps.compute_requirements_files_sha256_hash.outputs.computed_hash }} \ No newline at end of file diff --git a/configurations/python_linters/requirements-linters.txt b/configurations/python_linters/requirements-linters.txt index 1f6b203..d8e8a4c 100644 --- a/configurations/python_linters/requirements-linters.txt +++ b/configurations/python_linters/requirements-linters.txt @@ -1,6 +1,9 @@ autoflake~=2.3.1 bandit~=1.8.3 black~=25.1.0 +# use fork since main repo is not updated +# see https://github.com/rocioar/flake8-django/pull/134 +# Note: python 3.12 is not supported flake8-django @ git+https://github.com/terencehonles/flake8-django.git@a6e369e89d275dfd5514f2aa9d091aa36c5ff84b flake8~=7.1.2 isort~=6.0.1 From 4bc4fa96900b64b7f24ee44df831f62919c45bdd Mon Sep 17 00:00:00 2001 From: Luca Cigarini Date: Wed, 26 Mar 2025 18:31:21 +0100 Subject: [PATCH 02/15] Last commit of the day 26 --- .../create_virtualenv/action.yml | 2 + .../restore_pip_cache/action.yml | 5 +- .../restore_virtualenv/action.yml | 4 +- .../save_pip_cache/action.yml | 7 ++- .../save_virtualenv/action.yml | 6 +- .github/workflows/_python.yml | 4 +- .../restore_apt_cache/README.md | 29 +++++++++ .../restore_apt_cache/action.yml | 59 +++++++++++++++++++ .../apt_requirements/save_apt_cache/README.md | 22 +++++++ .../save_apt_cache/action.yml | 24 ++++++++ .../create_dev_requirements_file/README.md | 10 ++-- .../create_docs_requirements_file/README.md | 10 ++-- .../create_linter_requirements_file/README.md | 27 +++++++++ .../create_virtualenv/README.md | 20 +++++++ .../create_virtualenv/action.yml | 2 + .../restore_pip_cache/README.md | 41 +++++++++++++ .../restore_pip_cache/action.yml | 5 +- .../restore_virtualenv/README.md | 30 ++++++++++ .../restore_virtualenv/action.yml | 4 +- .../save_pip_cache/README.md | 22 +++++++ .../save_pip_cache/action.yml | 7 ++- .../save_virtualenv/README.md | 23 ++++++++ .../save_virtualenv/action.yml | 6 +- workflows/_python.yml | 4 +- 24 files changed, 340 insertions(+), 33 deletions(-) create mode 100644 actions/apt_requirements/restore_apt_cache/README.md create mode 100644 actions/apt_requirements/restore_apt_cache/action.yml create mode 100644 actions/apt_requirements/save_apt_cache/README.md create mode 100644 actions/apt_requirements/save_apt_cache/action.yml create mode 100644 actions/python_requirements/create_linter_requirements_file/README.md create mode 100644 actions/python_requirements/create_virtualenv/README.md create mode 100644 actions/python_requirements/restore_pip_cache/README.md create mode 100644 actions/python_requirements/restore_virtualenv/README.md create mode 100644 actions/python_requirements/save_pip_cache/README.md create mode 100644 actions/python_requirements/save_virtualenv/README.md diff --git a/.github/actions/python_requirements/create_virtualenv/action.yml b/.github/actions/python_requirements/create_virtualenv/action.yml index 00a40a4..02dcb93 100644 --- a/.github/actions/python_requirements/create_virtualenv/action.yml +++ b/.github/actions/python_requirements/create_virtualenv/action.yml @@ -22,5 +22,7 @@ runs: run: | source ${{ inputs.virtualenv_path }}/bin/activate echo "VIRTUAL_ENV=$VIRTUAL_ENV" >> $GITHUB_ENV + echo "::debug::Virtual environment path is $VIRTUAL_ENV" echo "$VIRTUAL_ENV/bin" >> $GITHUB_PATH + echo "::debug::PATH environment variable state after $VIRTUAL_ENV/bin path being added to it: $GITHUB_PATH" shell: bash diff --git a/.github/actions/python_requirements/restore_pip_cache/action.yml b/.github/actions/python_requirements/restore_pip_cache/action.yml index 0c4f93b..e4568f7 100644 --- a/.github/actions/python_requirements/restore_pip_cache/action.yml +++ b/.github/actions/python_requirements/restore_pip_cache/action.yml @@ -1,5 +1,5 @@ -name: Composite action restore pip's cache -description: Composite action to restore pip's cache +name: Composite action restore pip cache +description: Composite action to restore pip cache inputs: custom_pip_cache_path: description: Path to pip cache. @@ -32,6 +32,7 @@ runs: else echo "pip_cache_path=${{ inputs.custom_pip_cache_path }}" >> $GITHUB_OUTPUT fi + echo "::debug::Pip cache path $pip_cache_path" shell: bash - name: Restore pip cache id: restore_pip_cache diff --git a/.github/actions/python_requirements/restore_virtualenv/action.yml b/.github/actions/python_requirements/restore_virtualenv/action.yml index bfde805..cd76c98 100644 --- a/.github/actions/python_requirements/restore_virtualenv/action.yml +++ b/.github/actions/python_requirements/restore_virtualenv/action.yml @@ -6,8 +6,8 @@ inputs: required: false default: ".venv" requirements_paths: - description: Comma separeted list of requirements files. They will be used to compute the hash for the cache key. - required: false + description: Space separeted list of requirement files. They will be used to compute the hash for the cache key. + required: true git_reference: description: A git reference (name of the branch, reference to the PR) that will be used to build the cache key. required: false diff --git a/.github/actions/python_requirements/save_pip_cache/action.yml b/.github/actions/python_requirements/save_pip_cache/action.yml index 165bcf4..d98e398 100644 --- a/.github/actions/python_requirements/save_pip_cache/action.yml +++ b/.github/actions/python_requirements/save_pip_cache/action.yml @@ -1,8 +1,8 @@ -name: Composite action save pip's cache -description: Composite action to save pip's cache +name: Composite action save pip cache +description: Composite action to save pip cache inputs: custom_pip_cache_path: - description: Path to pip cache. + description: Path to the pip cache. required: false git_reference: description: A git reference (name of the branch, reference to the PR) that will be used to build the cache key. @@ -27,6 +27,7 @@ runs: else echo "pip_cache_path=${{ inputs.custom_pip_cache_path }}" >> $GITHUB_OUTPUT fi + echo "::debug::The pip cache path is $pip_cache_path" shell: bash - name: Save pip cache uses: actions/cache/save@v4 diff --git a/.github/actions/python_requirements/save_virtualenv/action.yml b/.github/actions/python_requirements/save_virtualenv/action.yml index 151fae8..6c6c66c 100644 --- a/.github/actions/python_requirements/save_virtualenv/action.yml +++ b/.github/actions/python_requirements/save_virtualenv/action.yml @@ -2,12 +2,12 @@ name: Composite action save Python virtual environment description: Composite action to save Python virtual environment inputs: virtual_environment_path: - description: Path to virtual environment. + description: Path to the virtual environment. required: false default: ".venv" requirements_paths: - description: Comma separeted list of requirements files. They will be used to compute the hash for the cache key. - required: false + description: Space separeted list of requirements files. They will be used to compute the hash for the cache key. + required: true git_reference: description: A git reference (name of the branch, reference to the PR) that will be used to build the cache key. required: false diff --git a/.github/workflows/_python.yml b/.github/workflows/_python.yml index 1f6624f..ae67a0d 100644 --- a/.github/workflows/_python.yml +++ b/.github/workflows/_python.yml @@ -319,7 +319,7 @@ jobs: id: restore_python_virtual_environment_pr uses: ./.github/actions/python_requirements/restore_virtualenv/ with: - requirements_paths: "${{ inputs.requirements_path }},requirements-linters.txt,requirements-dev.txt,requirements-docs.txt" + requirements_paths: "${{ inputs.requirements_path }} requirements-linters.txt requirements-dev.txt requirements-docs.txt" - name: Restore Python virtual environment related to target branch id: restore_python_virtual_environment_target_branch @@ -375,7 +375,7 @@ jobs: steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' uses: ./.github/actions/python_requirements/save_virtualenv with: - requirements_paths: "${{ inputs.requirements_path }},requirements-linters.txt,requirements-dev.txt,requirements-docs.txt" + requirements_paths: "${{ inputs.requirements_path }} requirements-linters.txt requirements-dev.txt requirements-docs.txt" - name: Save pip cache related to the PR event if: > diff --git a/actions/apt_requirements/restore_apt_cache/README.md b/actions/apt_requirements/restore_apt_cache/README.md new file mode 100644 index 0000000..046b58e --- /dev/null +++ b/actions/apt_requirements/restore_apt_cache/README.md @@ -0,0 +1,29 @@ +# Composite action restore APT cache + +This action restores an APT cache from GitHub's cache. + +Combined with [**save_apt_cache**](../save_apt_cache/README.md), it helps save time by avoiding the download of APT requirements. + +The action is composed of five steps: + +1. **Compute APT requirements files SHA256 hash** - This step uses [**misc/compute_files_hash**](../../misc/compute_files_hash/README.md) action to compute a single SHA256 hash of the APT requirements file described by the *apt_rquirements_file_path* input variable. The computed SHA256 hash will be part of the cache key. +2. **Backup `/var/cache/apt/archives permissions`** - This step backs up the permissions associated to the `/var/cache/apt/archives` directory. So, after restoring the APT cache they can be restored to the original ones. +3. **Add write permissions for all to `/var/cache/apt/archives`** - This step sets the write permission to the `/var/cache/apt/archives`. This is crucial because the [**cache/restore**](https://github.com/actions/cache/blob/main/restore/README.md) GitHub's action needs to be able to write to it. Without setting the correct write permission, a permission error is raised. +4. **Restore APT cache** - This step restores the APT cache. It uses the GitHub's [**cache/restore**](https://github.com/actions/cache/blob/main/restore/README.md) action with the following parameters: + * **path** - A list of files, directories, or paths to restore - set to `/var/cache/apt/archives/*.deb`. + * **key** - An explicit key for a cache entry - set to the combination of three strings: + * *git_reference*, provided as an input to the action. + * A static part, `-apt-` + * The previously computed SHA256 hash of the APT requirements file. +5. **Restore original permissions to `/var/cache/apt/archives` and delete backup** - This step restore the original permissions to the `/var/cache/apt/archives` directory. Finally, the backup file is deleted. + +## Documentation + +### Inputs + +* **apt_requirements_file_path** - Required - Path to the APT requirements file. It will be used to compute a SHA256 hash used in the cache key. +* **git_reference** - Optional - A git reference that will be used to build the cache key. It defaults to `github.ref_name` which is a context variable containing **the short ref name of the branch or tag that triggered the workflow run**. For example it may be `feature-branch-1` or, for pull requests, `/merge`. + +### Outputs + +* **cache-hit** - A boolean value which is true when APT cache is found in the GitHub's cache, false otherwise. diff --git a/actions/apt_requirements/restore_apt_cache/action.yml b/actions/apt_requirements/restore_apt_cache/action.yml new file mode 100644 index 0000000..6ddaa7a --- /dev/null +++ b/actions/apt_requirements/restore_apt_cache/action.yml @@ -0,0 +1,59 @@ +name: Composite action restore APT cache +description: Composite action to restore APT cache +inputs: + apt_requirements_file_path: + description: Path to the APT requirements file + required: true + git_reference: + description: A git reference (name of the branch, reference to the PR) that will be used to build the cache key. + required: false + default: ${{ github.ref_name }} + +outputs: + cache-hit: + description: Whether the APT cache was found in the GitHub's cache or not. + value: ${{ steps.restore_apt_cache.outputs.cache-hit }} + + +runs: + using: "composite" + steps: + - name: Compute APT requirements file SHA256 hash + id: compute_apt_requirements_file_sha256_hash + uses: ./.github/actions/misc/compute_files_hash + with: + file_paths: ${{ inputs.requirements_file }} + + - name: Backup /var/cache/apt/archives permissions + id: backup_apt_cache_dir_permissions + run: | + echo "apt_cache_dir_permissions_file=/tmp/apt_cache_dir_permissions.facl" > $GITHUB_OUTPUT + sudo getfacl /var/cache/apt/archives > $apt_cache_dir_permissions_file + echo "::debug::Original permissions given to /var/cache/apt/archives: $(ls -l /var/cache/apt/archives)" + echo "::debug::Created /var/cache/apt/archives permissions backup to $apt_cache_dir_permissions_file" + + # Vital to be able to restore cache + # If write permission is not set, a permissions error will be raised + - name: Add write permission for all to /var/cache/apt/archives + run: | + sudo chmod a+w /var/cache/apt/archives + echo "::debug::New permissions given to /var/cache/apt/archives: $(ls -l /var/cache/apt/archives)" + shell: bash + + - name: Restore APT cache + uses: actions/cache/restore@v4 + id: restore_apt_cache + with: + path: /var/cache/apt/archives/*.deb + key: ${{ github.base_ref }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} + + - name: Restore original permissions to /var/cache/apt/archives and delete backup + run: | + permissions_file=${{ steps.backup_apt_cache_dir_permissions.outputs.apt_cache_dir_permissions_file }} + sudo setfacl --restore="$permissions_file" + echo "::debug::Restored original permissions to /var/cache/apt/archives: $(ls -l /var/cache/apt/archives)" + if [[ -f "$permissions_file" ]]; then + sudo rm "$permissions_file" + echo "::debug::Correctly removed $permissions_file permissions backup file" + fi + shell: bash \ No newline at end of file diff --git a/actions/apt_requirements/save_apt_cache/README.md b/actions/apt_requirements/save_apt_cache/README.md new file mode 100644 index 0000000..4d8dca8 --- /dev/null +++ b/actions/apt_requirements/save_apt_cache/README.md @@ -0,0 +1,22 @@ +# Composite action save APT cache + +This action saves the APT cache, almost always located at `/var/cache/apt/archives/*.deb` to the GitHub's cache. + +Combined with [**restore_apt_cache**](../restore_apt_cache/README.md) helps save time by avoiding the download of APT requirements. + +The action is composed of two steps: + +1. **Compute APT requirements file SHA256 hash** - This step uses the [**misc/compute_files_hash**](../../misc/compute_files_hash/README.md) action to compute the SHA256 hash of the APT requriments file that will be part of the cache key. +2. **Save APT cache** - This step does the real caching on GitHub. The GitHub's [**cache/save**](https://github.com/actions/cache/blob/main/save/README.md) is used with the following parameters: + 1. **path** - A list of files, directories, or paths to cache - set to `/var/cache/apt/archives/*.deb` to save all `*.deb` files in APT cache. + 2. **key** - An explicit key for a cache entry - set to the combination of three strings: + 1. *git_reference*, provided as an input to the action. + 2. A static part, `-apt-` + 3. The previously computed SHA256 hash of the APT requirements file. + +## Documentation + +### Inputs + +* **apt_requirements_file_path** - Required - Path to the APT requirements file. It will be used to compute a SHA256 hash used in the cache key. +* **git_reference** - Optional - A git reference that will be used to build the cache key. It defaults to `github.ref_name` which is a context variable containing **the short ref name of the branch or tag that triggered the workflow run**. For example it may be `feature-branch-1` or, for pull requests, `/merge`. diff --git a/actions/apt_requirements/save_apt_cache/action.yml b/actions/apt_requirements/save_apt_cache/action.yml new file mode 100644 index 0000000..af41cfd --- /dev/null +++ b/actions/apt_requirements/save_apt_cache/action.yml @@ -0,0 +1,24 @@ +name: Composite action save APT cache +description: Composite action to save APT cache +inputs: + apt_requirements_file_path: + description: Path to the APT requirements file + required: true + git_reference: + description: A git reference (name of the branch, reference to the PR) that will be used to build the cache key. + required: false + default: ${{ github.ref_name }} + +runs: + using: "composite" + steps: + - name: Compute APT requiremments file SHA256 hash + id: compute_apt_requirements_file_sha256_hash + uses: ./.github/actions/misc/compute_files_hash + with: + file_paths: ${{ inputs.apt_requirements_file_path }} + - name: Save APT cache + uses: actions/cache/save@v4 + with: + path: /var/cache/apt/archives/*.deb + key: ${{ inputs.git_reference }}-apt-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} \ No newline at end of file diff --git a/actions/python_requirements/create_dev_requirements_file/README.md b/actions/python_requirements/create_dev_requirements_file/README.md index 6749dad..ae32be0 100644 --- a/actions/python_requirements/create_dev_requirements_file/README.md +++ b/actions/python_requirements/create_dev_requirements_file/README.md @@ -1,11 +1,13 @@ # Composite action create Python dev requirements file -This is action creates the `requirements-dev.txt` file. This is a Python requirements file that will contain all **development dependencies**. +This action creates the `requirements-dev.txt` file which will contain all **development dependencies**. + +As of today, the only development dependency supported is `coverage`. ## Documentation ### Inputs -* `install_from` - Optional - The path used as working directory when creating the `requirements-dev.txt` file. It defaults to the current directory (i.e. `.`). -* `project_dev_requirements_file` - Optional - The path of a project `requirements-dev.txt`. This was designed in case development requirements other than coverage are required. If specified, the dependencies in the project `requirements-dev.txt` will be appended in the newly created `requirements-dev.txt`. **Be careful: if a relative path is used this will depend on `install_from`.** Defaults to empty strings, and hence **no custom `requirements-dev.txt`**. -* `use_coverage` - Optional - Whether to use coverage or not. +* **install_from** - Optional - The path used as working directory when creating the `requirements-dev.txt` file. It defaults to the current directory (i.e. `.`). +* **project_dev_requirements_file** - Optional - The path of a project `requirements-dev.txt`. This was designed in case development requirements other than coverage are required. If specified, the dependencies in the project `requirements-dev.txt` will be appended in the newly created `requirements-dev.txt`. **Be careful: if a relative path is used this will depend on *install_from*.** Defaults to empty strings, and hence **no custom `requirements-dev.txt`**. +* **use_coverage** - Optional - Whether to use coverage or not. It defaults to false. diff --git a/actions/python_requirements/create_docs_requirements_file/README.md b/actions/python_requirements/create_docs_requirements_file/README.md index 34bae77..913192f 100644 --- a/actions/python_requirements/create_docs_requirements_file/README.md +++ b/actions/python_requirements/create_docs_requirements_file/README.md @@ -1,12 +1,12 @@ # Composite action create Python docs requirements file -This is action creates the `requirements-docs.txt` file. This is a Python requirements file that will contain all **dependencies required to build the documentation**. +This action creates the `requirements-docs.txt` file. This is a Python requirements file that will contain all **dependencies required to build the documentation**. ## Documentation ### Inputs -* `install_from` - Optional - The path used as working directory when creating the `requirements-docs.txt` file. It defaults to the current directory (i.e. `.`). -* `project_docs_requirements_file` - Optional - The path of a project `requirements-docs.txt`. This was designed in case requirements to build documentation other than rstcheck, sphinx, sphinx_rtd_theme, sphinxcontrib-spelling and sphinxcontrib-django2 are required. If specified, the dependencies in the project `requirements-docs.txt` will be appended in the newly created `requirements-docs.txt`. **Be careful: if a relative path is used this will depend on `install_from`.** Defaults to empty strings, and hence **no custom `requirements-docs.txt`**. -* `django_settings_module`: - Optional - Path to the Django settings file. It's used to make GitHub action aware of Django presence. In this case, `sphinxcontrib-django2` is also added to the newly created requirement file. **Be careful: if a relative path is used this will depend on `install_from`.** Defaults to empty strings, and hence **no Django settings file**. -* `check_docs_directory` - Optional - Path that will be used by rstcheck to check documentation. **Be careful: if a relative path is used this will depend on `install_from`.** Defaults to empty strings, and hence **documentation won't be checked**. +* **install_from** - Optional - The path used as working directory when creating the `requirements-docs.txt` file. It defaults to the current directory (i.e. `.`). +* **project_docs_requirements_file** - Optional - The path of a project `requirements-docs.txt`. This was designed in case requirements to build documentation other than rstcheck, sphinx, sphinx_rtd_theme, sphinxcontrib-spelling and sphinxcontrib-django2 are required. If specified, the dependencies in the project `requirements-docs.txt` will be appended in the newly created `requirements-docs.txt`. **Be careful: if a relative path is used this will depend on *install_from*.** Defaults to empty strings, and hence **no custom `requirements-docs.txt`**. +* **django_settings_module** - Optional - Path to the Django settings file. It's used to make GitHub action aware of Django presence. In this case, `sphinxcontrib-django2` is also added to the newly created requirement file. **Be careful: if a relative path is used this will depend on *install_from*.** Defaults to empty strings, and hence **no Django settings file**. +* **check_docs_directory** - Optional - Path that will be used by rstcheck to check documentation. **Be careful: if a relative path is used this will depend on *install_from*.** Defaults to empty strings, and hence **documentation won't be checked**. diff --git a/actions/python_requirements/create_linter_requirements_file/README.md b/actions/python_requirements/create_linter_requirements_file/README.md new file mode 100644 index 0000000..9be5aa9 --- /dev/null +++ b/actions/python_requirements/create_linter_requirements_file/README.md @@ -0,0 +1,27 @@ +# Composite action create Python linter requirements file + +This action creates the `requirements-linters.txt` file which will contain all **linter dependencies** required by the CI. +The user can then choose which linters will be run, and hence written to the `requirements-linters.txt`, by the CI by setting some flags to true like *use_black*. + +As of today only the following linters are supported: + +* `autoflake` +* `bandit` +* `black` +* `flake8` +* `flake8-django` +* `isort` +* `pylint` +* `pylint-django` + +## Documentation + +### Inputs + +* **install_from** - Optional - The path used as working directory when creating the `requirements-linters.txt` file. It defaults to the current directory (i.e. `.`). +* `project_linter_requirements_file` - Optional - The path of a project `requirements-linters.txt`. This was designed in case requirements for linters other than `autoflake`, `bandit`, `black`, `flake8`, `flake8-django`, `isort`, `pylint` and `pylint-django` are required. If specified, the dependencies in the project `requirements-linters.txt` will be appended in the newly created `requirements-linters.txt`. **Be careful: if a relative path is used this will depend on *install_from*.** Defaults to empty strings, and hence **no custom `requirements-linters.txt`**. +* **django_settings_module** - Optional - Path to the Django settings file. It's used to make GitHub action aware of Django presence. In the case of a Django project, `flake8-django` and `pylint-django`, may be used and hence they will be added to the newly created requirements file. **Be careful: if a relative path is used this will depend on *install_from*.** Defaults to empty strings, and hence **no Django settings file**. +* **use_autoflake** - Optional - Flag to state whether to use or not `autoflake` linter. It defaults to false. +* **use_bandit** - Optional - Flag to state whether to use or not `bandit` linter. It defaults to false. +* **use_flake8** - Optional - Flag to state whether to use or not `flake8` linter. It defaults to false. +* **use_pylint** - Optional - Flag to state whether to use or not `pylint` linter. It defaults to false. diff --git a/actions/python_requirements/create_virtualenv/README.md b/actions/python_requirements/create_virtualenv/README.md new file mode 100644 index 0000000..8f3361a --- /dev/null +++ b/actions/python_requirements/create_virtualenv/README.md @@ -0,0 +1,20 @@ +# Composite action create Python virtual environment + +This GitHub action creates a Python virtual environment using Python's `venv` module. + +When the *activate_only* flag set is to true, the virtual environment at *virtualenv_path* will only be activated—**no creation will take place**. + +NOTE: + +To activate a Python virtual environment, the `activate` script is often used. +However, in a GitHub Action environment, this is not enough because environment variables are "lost" at the end of the Action. For this we need to do two things: + +1. Append the `VIRTUAL_ENV` environment variable to the `GITHUB_ENV` environment file. The [`GITHUB_ENV`](https://docs.github.com/en/enterprise-cloud@latest/actions/writing-workflows/choosing-what-your-workflow-does/workflow-commands-for-github-actions#setting-an-environment-variable) files makes environment variables available to any subsequent steps in a workflow job. Finally, it's important to note that `VIRTUAL_ENV` variable is created by the `activate` script and contains the path to the virtual environment. +2. Prepend the virtual environment's `bin` path to the system PATH. To allow also any subsequent steps in a workflow to be able to use it, [`GITHUB_PATH`](https://docs.github.com/en/enterprise-cloud@latest/actions/writing-workflows/choosing-what-your-workflow-does/workflow-commands-for-github-actions#adding-a-system-path) is employed. + +## Documentation + +### Inputs + +* **virtualenv_path** - Optional - The path where the virtual environment will be created. It defaults to `.venv`. +* **activate_only** - Optional - Flag that states whether to only activate the virtual environment. If false, a new virtual environment will be created before being activated. It defaults to false. \ No newline at end of file diff --git a/actions/python_requirements/create_virtualenv/action.yml b/actions/python_requirements/create_virtualenv/action.yml index 00a40a4..02dcb93 100644 --- a/actions/python_requirements/create_virtualenv/action.yml +++ b/actions/python_requirements/create_virtualenv/action.yml @@ -22,5 +22,7 @@ runs: run: | source ${{ inputs.virtualenv_path }}/bin/activate echo "VIRTUAL_ENV=$VIRTUAL_ENV" >> $GITHUB_ENV + echo "::debug::Virtual environment path is $VIRTUAL_ENV" echo "$VIRTUAL_ENV/bin" >> $GITHUB_PATH + echo "::debug::PATH environment variable state after $VIRTUAL_ENV/bin path being added to it: $GITHUB_PATH" shell: bash diff --git a/actions/python_requirements/restore_pip_cache/README.md b/actions/python_requirements/restore_pip_cache/README.md new file mode 100644 index 0000000..92a2a2f --- /dev/null +++ b/actions/python_requirements/restore_pip_cache/README.md @@ -0,0 +1,41 @@ +# Composite action restore pip cache + +This action restores the pip download cache from GitHub's cache. + +The action is composed of four steps: + +1. **Generate random UUID** - This step computes a random UUID, using the shell command `uuidgen`, which will be part of the cache key. Since pip cache will always be restored when a virtual environment is not found on GitHub's cache, a random UUID is required to generate a cache miss. +2. **Get pip cache directory** - This step retrieves the path to the pip cache. If *custom_pip_cache_path* is not an empty string, it will be used as pip cache path. Otherwise, the pip cache will be computed using `pip cache dir`. +3. **Restore pip cache** - This step performs the heavy lifting of the restoring. Using GitHub's [**cache/restore**](https://github.com/actions/cache/blob/main/restore/README.md) action, the cache is restored using a **partial match**. This is performed by setting the following [inputs](https://github.com/actions/cache/tree/main/restore#inputs): + 1. **key** - an explicit key for a cache entry - will be set to a random UUID which will always trigger a cache miss. + 2. **path** - a list of files, directories, paths to restore - will be set to the pip download cache path. + 3. **restore-keys** - an ordered list of prefix-matched keys to use for restoring stale cache if no cache hit occurred for key - will be set to `-pip-cache-` to restore the most recent pip cache for the chosen git reference. +4. **Explain cache output** - This step analyze the results of the [**cache/restore**](https://github.com/actions/cache/blob/main/restore/README.md) action and sets *real_cache_hit* environment variable to true if there was a match, false otherwise. This is necessary because, in the case of a **partial match**, the *cache-hit*, output of [**cache/restore**](https://github.com/actions/cache/blob/main/restore/README.md), will be false. Instead, we use the `cache-matched-key`, another output of [**cache/restore**](https://github.com/actions/cache/blob/main/restore/README.md), which contains a reference for both **partial** and full matches, but will be empty in the case of a cache miss. + +NOTE: + +This action, despite seeming a bit unusual, is correct because GitHub does not allow cache updates or overwrites. + +Let's think about a real-world scenario: + +A user updates the requirements file. + +In this case our query to GitHub's cache for the previously cached virtual environment will **always** miss. This happens because changing the requirements file results in a new SHA256 hash, so the cache key changes. + +Thus, we aim to restore the pip cache to at least *mitigate* the impact of the changes in the requirements. Specifically, we want to save time by avoiding the download of packages that did not change. + +Next, we try to query the GitHub's cache for the previously cached pip cache. However, there are a few issues: + +1. We cannot use the SHA256 of the requirements file because it has changed, leading to cache misses. +2. We cannot create a cache key without a random component because, as said earlier, GitHub does not allow overwriting or updating of a cache item. For example, a cache key like `develop-pip-cache-` would generate an error when attempting to save a new cache if one already exists with the same name. + +## Documentation + +### Inputs + +* **custom_pip_cache** - Optional - Path to the pip cache. It can be used for setting a custom pip cache path. It defaults to an empty string. In this case, the pip cache path will be computed using `pip cache dir`. More information regarding the previous command is available [here](https://pip.pypa.io/en/stable/cli/pip_cache/#description) +* **git_reference** - Optional - A git reference that will be used to build the cache key. It defaults to `github.ref_name` which is a context variable containing **the short ref name of the branch or tag that triggered the workflow run**. For example it may be `feature-branch-1` or, for pull requests, `/merge`. + +### Outputs + +* **cache-hit** - A boolean value which states whether pip cache was found on GitHub's cache or not. diff --git a/actions/python_requirements/restore_pip_cache/action.yml b/actions/python_requirements/restore_pip_cache/action.yml index 0c4f93b..e4568f7 100644 --- a/actions/python_requirements/restore_pip_cache/action.yml +++ b/actions/python_requirements/restore_pip_cache/action.yml @@ -1,5 +1,5 @@ -name: Composite action restore pip's cache -description: Composite action to restore pip's cache +name: Composite action restore pip cache +description: Composite action to restore pip cache inputs: custom_pip_cache_path: description: Path to pip cache. @@ -32,6 +32,7 @@ runs: else echo "pip_cache_path=${{ inputs.custom_pip_cache_path }}" >> $GITHUB_OUTPUT fi + echo "::debug::Pip cache path $pip_cache_path" shell: bash - name: Restore pip cache id: restore_pip_cache diff --git a/actions/python_requirements/restore_virtualenv/README.md b/actions/python_requirements/restore_virtualenv/README.md new file mode 100644 index 0000000..e40a3c1 --- /dev/null +++ b/actions/python_requirements/restore_virtualenv/README.md @@ -0,0 +1,30 @@ +# Composite action restore Python virtual environment + +This action restores a Python virtual environment from GitHub's cache. + +Combined with [**save_virtualenv**](../save_virtualenv/README.md), **it helps save time by avoiding the installation of Python requirements**. + +The action is composed of three steps: + +1. **Compute requirements files SHA256 hash** - This step uses [**misc/compute_files_hash**](../../misc/compute_files_hash/README.md) action to compute a single SHA256 hash of the files described by the *requirements_paths*. The computed SHA256 hash will be part of the cache key. +2. **Restore virtual environment** - This step does the heavy lifting of restoring the virtual environment from GitHub's cache. It uses the GitHub's [**cache/restore**](https://github.com/actions/cache/blob/main/restore/README.md) action with the following parameters: + * **path** - A list of files, directories, or paths to restore - set to the virtual environment path input variable *virtual_environment_path*. + * **key** - An explicit key for a cache entry - set to the combination of three strings: + * *git_reference*, provided as an input to the action. + * A static part, `-venv-` + * The previously computed SHA256 hash of the requirements files. +3. **Activate restored virtual environment** - If the Python virtual environment was found in the GitHub's cache, it needs to be activated. This is performed using [**python_requirements/create_virtualenv**](../create_virtualenv/README.md) action with the following parameters: + * **virtualenv_path** - set to the Python virtual environment path. + * **activate_only** - set to true because it doesn't need to be created. + +## Documentation + +### Inputs + +* **virtual_environment_path** - Optional - Path where the virtual environment is located. It may be used to provide a custom path for the virtual environment. It defaults to `.venv`. +* **requirements_paths** - Required - A space separated list of requirements file paths. They will be used to compute a SHA256 hash used in the cache key. It defaults to an empty string. +* **git_reference** - Optional - A git reference that will be used to build the cache key. It defaults to `github.ref_name` which is a context variable containing **the short ref name of the branch or tag that triggered the workflow run**. For example it may be `feature-branch-1` or, for pull requests, `/merge`. + +### Outputs + +* **cache-hit** - A boolean value which is true when virtual environment is found in the GitHub's cache, false otherwise. diff --git a/actions/python_requirements/restore_virtualenv/action.yml b/actions/python_requirements/restore_virtualenv/action.yml index bfde805..cd76c98 100644 --- a/actions/python_requirements/restore_virtualenv/action.yml +++ b/actions/python_requirements/restore_virtualenv/action.yml @@ -6,8 +6,8 @@ inputs: required: false default: ".venv" requirements_paths: - description: Comma separeted list of requirements files. They will be used to compute the hash for the cache key. - required: false + description: Space separeted list of requirement files. They will be used to compute the hash for the cache key. + required: true git_reference: description: A git reference (name of the branch, reference to the PR) that will be used to build the cache key. required: false diff --git a/actions/python_requirements/save_pip_cache/README.md b/actions/python_requirements/save_pip_cache/README.md new file mode 100644 index 0000000..e3950a0 --- /dev/null +++ b/actions/python_requirements/save_pip_cache/README.md @@ -0,0 +1,22 @@ +# Composite action save pip cache + +This action saves the pip download cache. + +Every time a user runs `pip install `, pip downloads the package and all its dependencies.The packages are saved in a directory which, by default, is located at `~/.cache/pip`. +Saving this cache in GitHub's cache allows us to save time when installing those packages. As a matter of fact, before installing packages, pip's cache can be restored using [**restore_pip_cache**](../restore_pip_cache/README.md) action. + +The action is composed of three steps: + +1. **Generate random UUID** - This step computes a random UUID, using shell command `uuidgen`, which will be part of the cache key. The uniqueness of the UUID ensures that there will be no collisions between cache keys, which is crucial because **GitHub won't allow the creation of two caches with the same key** (cache update/overwrite **is not supported**). +2. **Get pip cache directory** - This step retrieves the path to the pip cache. If *custom_pip_cache_path* is not an empty string, it will be used as pip cache path. Otherwise, the pip cache will be computed using `pip cache dir`. +3. **Save pip cache** - This step performs the heavy lifting of the caching. Using GitHub's [**cache/save**](https://github.com/actions/cache/blob/main/save/README.md) action, the cache is saved with a key composed of: + 1. The git reference input, *git_reference* + 2. A static part, `pip-cache` + 3. The previously computed UUID + +## Documentation + +### Inputs + +* **custom_pip_cache** - Optional - Path to the pip cache. It can be used for setting a custom pip cache path. It defaults to an empty string. In this case, the pip cache path will be computed using `pip cache dir`. More information regarding the previous command is available [here](https://pip.pypa.io/en/stable/cli/pip_cache/#description) +* **git_reference** - Optional - A git reference that will be used to build the cache key. It defaults to `github.ref_name` which is a context variable containing **the short ref name of the branch or tag that triggered the workflow run**. For example it may be `feature-branch-1` or, for pull requests, `/merge`. diff --git a/actions/python_requirements/save_pip_cache/action.yml b/actions/python_requirements/save_pip_cache/action.yml index 165bcf4..d98e398 100644 --- a/actions/python_requirements/save_pip_cache/action.yml +++ b/actions/python_requirements/save_pip_cache/action.yml @@ -1,8 +1,8 @@ -name: Composite action save pip's cache -description: Composite action to save pip's cache +name: Composite action save pip cache +description: Composite action to save pip cache inputs: custom_pip_cache_path: - description: Path to pip cache. + description: Path to the pip cache. required: false git_reference: description: A git reference (name of the branch, reference to the PR) that will be used to build the cache key. @@ -27,6 +27,7 @@ runs: else echo "pip_cache_path=${{ inputs.custom_pip_cache_path }}" >> $GITHUB_OUTPUT fi + echo "::debug::The pip cache path is $pip_cache_path" shell: bash - name: Save pip cache uses: actions/cache/save@v4 diff --git a/actions/python_requirements/save_virtualenv/README.md b/actions/python_requirements/save_virtualenv/README.md new file mode 100644 index 0000000..19d9ab5 --- /dev/null +++ b/actions/python_requirements/save_virtualenv/README.md @@ -0,0 +1,23 @@ +# Composite action save Python virtual environment + +This action saves a Python virtual environment to GitHub's cache. + +Combined with [**restore_virtualenv**](../restore_virtualenv/README.md), **it helps save time by avoiding the installation of Python requirements**. + +The action is composed of two steps: + +1. **Compute requirements files SHA256 hash** - This step uses [**misc/compute_files_hash**](../../misc/compute_files_hash/README.md) to compute a single SHA256 hash of the files described by the *requirements_paths*. The computed SHA256 hash will be part of the cache key. +2. **Cache virtual environment** - This step does the heavy lifting of saving the virtual environment to GitHub's cache. It uses the GitHub's [**cache/save**](https://github.com/actions/cache/blob/main/save/README.md) action with the following parameters: + 1. **path** - A list of files, directories, or paths to cache - set to the virtual environment path input variable *virtual_environment_path*. + 2. **key** - An explicit key for a cache entry - set to the combination of three strings: + 1. *git_reference*, provided as an input to the action. + 2. A static part, `-venv-` + 3. The previously computed SHA256 hash of the requirements files. + +## Documentation + +### Inputs + +* **virtual_environment_path** - Optional - Path where the virtual environment is located. It may be used to provide a custom path for the virtual environment. It defaults to `.venv`. +* **requirements_paths** - Required - A space separated list of requirements file paths. They will be used to compute a SHA256 hash used in the cache key. +* **git_reference** - Optional - A git reference that will be used to build the cache key. It defaults to `github.ref_name` which is a context variable containing **the short ref name of the branch or tag that triggered the workflow run**. For example it may be `feature-branch-1` or, for pull requests, `/merge`. diff --git a/actions/python_requirements/save_virtualenv/action.yml b/actions/python_requirements/save_virtualenv/action.yml index 151fae8..6c6c66c 100644 --- a/actions/python_requirements/save_virtualenv/action.yml +++ b/actions/python_requirements/save_virtualenv/action.yml @@ -2,12 +2,12 @@ name: Composite action save Python virtual environment description: Composite action to save Python virtual environment inputs: virtual_environment_path: - description: Path to virtual environment. + description: Path to the virtual environment. required: false default: ".venv" requirements_paths: - description: Comma separeted list of requirements files. They will be used to compute the hash for the cache key. - required: false + description: Space separeted list of requirements files. They will be used to compute the hash for the cache key. + required: true git_reference: description: A git reference (name of the branch, reference to the PR) that will be used to build the cache key. required: false diff --git a/workflows/_python.yml b/workflows/_python.yml index 1f6624f..ae67a0d 100644 --- a/workflows/_python.yml +++ b/workflows/_python.yml @@ -319,7 +319,7 @@ jobs: id: restore_python_virtual_environment_pr uses: ./.github/actions/python_requirements/restore_virtualenv/ with: - requirements_paths: "${{ inputs.requirements_path }},requirements-linters.txt,requirements-dev.txt,requirements-docs.txt" + requirements_paths: "${{ inputs.requirements_path }} requirements-linters.txt requirements-dev.txt requirements-docs.txt" - name: Restore Python virtual environment related to target branch id: restore_python_virtual_environment_target_branch @@ -375,7 +375,7 @@ jobs: steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' uses: ./.github/actions/python_requirements/save_virtualenv with: - requirements_paths: "${{ inputs.requirements_path }},requirements-linters.txt,requirements-dev.txt,requirements-docs.txt" + requirements_paths: "${{ inputs.requirements_path }} requirements-linters.txt requirements-dev.txt requirements-docs.txt" - name: Save pip cache related to the PR event if: > From c212320d230a66b2ca37db0c0d6a0ed03596cf30 Mon Sep 17 00:00:00 2001 From: Luca Cigarini Date: Thu, 27 Mar 2025 18:42:43 +0100 Subject: [PATCH 03/15] last commit of the day --- .github/workflows/_detect_changes.yml | 8 +- .github/workflows/_node.yml | 3 +- .github/workflows/_python.yml | 43 +++++- .github/workflows/create_apt_cache.yaml | 12 +- .github/workflows/create_python_cache.yaml | 2 +- CHANGELOG.md | 1 + actions/apt_requirements/README.md | 31 ---- actions/apt_requirements/action.yml | 60 -------- workflows/README.md | 156 +++++++++++++++++++++ workflows/_detect_changes.yml | 8 +- workflows/_node.yml | 3 +- workflows/_python.yml | 43 +++++- workflows/create_apt_cache.yaml | 12 +- workflows/create_python_cache.yaml | 2 +- 14 files changed, 259 insertions(+), 125 deletions(-) delete mode 100644 actions/apt_requirements/README.md delete mode 100644 actions/apt_requirements/action.yml create mode 100644 workflows/README.md diff --git a/.github/workflows/_detect_changes.yml b/.github/workflows/_detect_changes.yml index eb68be3..9a74105 100644 --- a/.github/workflows/_detect_changes.yml +++ b/.github/workflows/_detect_changes.yml @@ -45,13 +45,16 @@ jobs: backend: ${{steps.diff_check_backend.outputs.backend}} frontend: ${{steps.diff_check_frontend.outputs.frontend}} steps: - - uses: actions/checkout@v4 + - name: Check out PR target branch + uses: actions/checkout@v4 with: ref: ${{ github.base_ref }} - - uses: actions/checkout@v4 + - name: Check out source branch latest commit + uses: actions/checkout@v4 with: clean: false + - name: Generate summary if: ${{inputs.backend_directories != ''}} | ${{inputs.frontend_directories != ''}} run: | @@ -76,7 +79,6 @@ jobs: echo "::debug::diff command results: $(git diff --compact-summary origin/${{ github.base_ref }} -- ${{ inputs.backend_directories }} $BACKEND_EXCLUSIONS | head -n -1 )" echo "backend $BACKEND_CHANGES" - - name: Generate diffs for frontend if: ${{inputs.frontend_directories != ''}} id: diff_check_frontend diff --git a/.github/workflows/_node.yml b/.github/workflows/_node.yml index 1a850b0..074bbe1 100644 --- a/.github/workflows/_node.yml +++ b/.github/workflows/_node.yml @@ -88,7 +88,8 @@ jobs: node_version: ${{ fromJson(inputs.node_versions) }} language: ['javascript'] steps: - - uses: actions/checkout@v4 + - name: Check out latest commit for current branch + uses: actions/checkout@v4 - name: Set up Node.js uses: actions/setup-node@v4 diff --git a/.github/workflows/_python.yml b/.github/workflows/_python.yml index ae67a0d..04ec4c5 100644 --- a/.github/workflows/_python.yml +++ b/.github/workflows/_python.yml @@ -245,7 +245,8 @@ jobs: language: ['python'] env: ${{ fromJson(inputs.env) }} steps: - - uses: actions/checkout@v4 + - name: Check out latest commit + uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v5 @@ -264,11 +265,39 @@ jobs: shell: bash - - name: Install apt requirements - if: inputs.packages_path - uses: ./.github/actions/apt_requirements + - name: Restore APT cache related to PR event + id: restore_apt_cache_pr + uses: ./.github/actions/apt_requirements/restore_apt_cache with: - requirements_file: ${{ inputs.packages_path }} + apt_requirements_file_path: ${{ inputs.packages_path }} + + - name: Restore APT cache related to target branch + id: restore_apt_cache_target_branch + if: steps.restore_apt_cache_pr.outputs.cache-hit != 'true' + uses: ./.github/actions/apt_requirements/restore_apt_cache + with: + apt_requirements_file_path: ${{ inputs.packages_path }} + git_reference: ${{ github.base_ref }} + + - name: Refresh APT repositories + if: > + steps.restore_apt_cache_pr.outputs.cache-hit != 'true' && + steps.restore_apt_cache_target_branch.outputs.cache-hit != 'true' + run: | + sudo apt-get update + shell: bash + + - name: Install APT requirements + run: | + sudo apt-get install -y --no-install-recommends $(tr '\n' ' ' < ${{ inputs.packages_path }}) + shell: bash + + - name: Save APT cache related to PR event + if: > + steps.restore_apt_cache_pr.outputs.cache-hit != 'true' && + uses: ./.github/actions/apt_requirements/save_apt_cache + with: + apt_requirements_file_path: ${{ inputs.packages_path }} - name: Check requirements licenses if: inputs.check_requirements_licenses && steps.cache-virtualenv.outputs.cache-hit != 'true' @@ -370,14 +399,14 @@ jobs: shell: bash working-directory: ${{ inputs.install_from }} - - name: Save Python virtual environment related to the PR event + - name: Save Python virtual environment related to PR event if: > steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' uses: ./.github/actions/python_requirements/save_virtualenv with: requirements_paths: "${{ inputs.requirements_path }} requirements-linters.txt requirements-dev.txt requirements-docs.txt" - - name: Save pip cache related to the PR event + - name: Save pip cache related to PR event if: > steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' && steps.restore_pip_cache_pr.outputs.cache-hit != 'true' diff --git a/.github/workflows/create_apt_cache.yaml b/.github/workflows/create_apt_cache.yaml index 06f26cc..4123d51 100644 --- a/.github/workflows/create_apt_cache.yaml +++ b/.github/workflows/create_apt_cache.yaml @@ -23,7 +23,8 @@ jobs: name: Create cache for APT dependencies runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - name: Check out latest commit on current branch + uses: actions/checkout@v4 # Remember to set the same APT requirements file path set before! - name: Install APT dependencies @@ -31,13 +32,14 @@ jobs: sudo apt-get update sudo apt-get -y install --no-install-recommends $(tr '\n' ' ' < .github/test/python_test/packages.txt) - - name: Compute apt_packages.txt SHA256 hash - id: compute_apt_packages_sha256_hash + - name: Compute APT dependencies file SHA256 hash + id: compute_apt_dependencies_file_sha256_hash uses: ./.github/actions/misc/compute_files_hash with: file_paths: .github/test/python_test/packages.txt - - uses: actions/cache/save@v4 + - name: Save APT cache + uses: actions/cache/save@v4 with: path: /var/cache/apt/archives/*.deb - key: ${{ github.ref_name }}-${{ steps.compute_apt_packages_sha256_hash.outputs.computed_hash }} + key: ${{ github.ref_name }}-${{ steps.compute_apt_dependencies_file_sha256_hash.outputs.computed_hash }} diff --git a/.github/workflows/create_python_cache.yaml b/.github/workflows/create_python_cache.yaml index a6b65b5..cac8510 100644 --- a/.github/workflows/create_python_cache.yaml +++ b/.github/workflows/create_python_cache.yaml @@ -32,7 +32,7 @@ jobs: with: python-version: "3.12" - - name: Set up Python's virtual environment + - name: Set up Python virtual environment uses: ./.github/actions/python_requirements/create_virtualenv - name: Install Python dependencies diff --git a/CHANGELOG.md b/CHANGELOG.md index 9ff6f6e..d8363d9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ * Added "release.yml" action to to push containers to AWS ECR * Added *create_apt_cache.yaml* workflow to cache APT requirements each time a commit is pushed on selected branch and **when the requirements file has changed**. +* Added documentation. ### Bugfix diff --git a/actions/apt_requirements/README.md b/actions/apt_requirements/README.md deleted file mode 100644 index 841f871..0000000 --- a/actions/apt_requirements/README.md +++ /dev/null @@ -1,31 +0,0 @@ -# Composite action install APT requirements - -This action install possible APT requirements defined in a standalone file. -To speed up this process, and hence to avoid re-downloading the requirements each time even though nothing has changed GitHub cache has been exploited. - -First, a SHA256 hash of the APT requirements file is produced. -The aforementioned hash is then used to (respectively) query the caching system on GitHub for: - -* The git reference of the Pull Request (e.g. `941/merge`). -* The target branch reference (e.g. `develop`). - -If requirements `.deb` files were previously cached, so in case of a **cache hit**, they will be restored to the correct APT cache directory (i.e. `/var/cache/apt/archives`). Doing this allow us to skip: - -* Refreshing the repositories package lists (i.e. `apt update` command) -* Downloading the required packages and their dependencies. - -After querying the GitHub cache, `apt install` command is run with the space separated list of APT requirements to install all dependencies. - -Finally, if both cache queries resulted in a miss, APT requirements are cached using `-`. - -NOTE: - -As you can see, here two restore - -TODO finire documentazione - -## Documentation - -### Inputs - -* `requirements_file` - Mandatory - Path to the APT requirements file. This file must contain a list of packages to install, one per line. diff --git a/actions/apt_requirements/action.yml b/actions/apt_requirements/action.yml deleted file mode 100644 index 70a3a98..0000000 --- a/actions/apt_requirements/action.yml +++ /dev/null @@ -1,60 +0,0 @@ -name: Composite action install APT requirements -description: Composite action to install APT requirements -inputs: - requirements_file: - description: Requirements file - required: true - -# TODO scomporre questa action in due: -# - save apt cache -# - restore apt cache -runs: - using: "composite" - steps: - - name: Compute apt requirements file SHA256 hash - id: compute_apt_requirements_file_sha256_hash - uses: ./.github/actions/misc/compute_files_hash - with: - file_paths: ${{ inputs.requirements_file }} - - # Vital to be able to restore cache - # If write permission is not set, a permissions error will be raised - - name: Modification to /var/cache/apt/archives permissions - run: | - sudo chmod a+w /var/cache/apt/archives - shell: bash - - - uses: actions/cache/restore@v4 - id: restore_cache_from_parent_branch - with: - path: /var/cache/apt/archives/*.deb - key: ${{ github.base_ref }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} - - - uses: actions/cache/restore@v4 - id: restore_cache_from_current_branch - if: steps.restore_cache_from_parent_branch.outputs.cache-hit != 'true' - with: - path: /var/cache/apt/archives/*.deb - key: ${{ github.ref_name }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} - - - name: Refresh repositories - if: > - steps.restore_cache_from_parent_branch.outputs.cache-hit != 'true' && - steps.restore_cache_from_current_branch.outputs.cache-hit != 'true' - run: | - sudo apt-get update - shell: bash - - - name: Install requirements - run: | - sudo apt-get install -y --no-install-recommends $(tr '\n' ' ' < ${{ inputs.requirements_file }}) - shell: bash - - - uses: actions/cache/save@v4 - id: cache_apt_requirements_for_current_branch - if: > - steps.restore_cache_from_parent_branch.outputs.cache-hit != 'true' && - steps.restore_cache_from_current_branch.outputs.cache-hit != 'true' - with: - path: /var/cache/apt/archives/*.deb - key: ${{ github.ref_name }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} \ No newline at end of file diff --git a/workflows/README.md b/workflows/README.md new file mode 100644 index 0000000..cebf147 --- /dev/null +++ b/workflows/README.md @@ -0,0 +1,156 @@ +# Worflows + +## [Reusable detect changes workflow](_detect_changes.yml) + +This sub workflow detects and enumerates the changes between two branches. + +The workflow is composed of five steps: + +1. **Check out PR target branch** - This step checks out the latest commit of the PR target branch for the current repository. This workflow was designed to detect changes when a PR to a target branch was created. Therefore, the latest commit of the target branch must be checked out as the first step. To achieve this, GitHub's [**checkout**](https://github.com/actions/checkout) action is used with the following parameters: + 1. **ref** - The branch, tag or SHA to checkout - It is set to `github.base_ref`, which corresponds to the **PR target branch**. +2. **Check out source branch latest commit** - This step checks out the latest commit of the source branch on top of the previous one. To do so, GitHub's [**checkout**](https://github.com/actions/checkout) action is used with the following parameters: + 1. **clean** - Whether to execute `git clean -ffdx && git reset --hard HEAD` before fetching - It is set to false, which means **do not delete untracked files**. +3. **Generate summary** - This step creates the title for the action summary. As a matter of fact, the detected changes will be reported below the title in the summary section. The step is performed only if one or both *backend_directories* and *frontend_directories* inputs are not empty. +4. **Generate diffs for backend** - This step detects and enumerates the files that changed between the two branches. This is performed using [`git diff`](https://git-scm.com/docs/git-diff) command. Specifically, the code instructs git to show the changes in the *backend_directories* relative to `origin/` (the target branch). During this process, the [**pathspec**](https://git-scm.com/docs/gitglossary#Documentation/gitglossary.txt-aiddefpathspecapathspec) is used to exclude files or directories specified in the *backend_exclusions* input. The changes are then enumerated and output through the *backend* variable. +5. **Generate diffs for frontend** - This step follow the same pattern as the **Generate diffs for backend** step but for the frontend directories. + +### Documentation + +#### Inputs + +* **backend_directories** - Optional - Space separated list of backend directories to check for changes. By default, it is set to an empty string. +* **backend_exclusions** - Optional - Space separated list of backend files or directories to **exclude** when checking for changes. Globs are supported. By default, it is set to an empty string. +* **frontend_directories** - Optional - Space separated list of frontend directories to check for changes. By default, it is set to an empty string +* **frontend_exclusions** - Optional - Space separated list of frontend files or directories to **exclude** when checking for changes. Globs are supported. By default, it is set to an empty string. +* **ubuntu_version** - Optional - The Ubuntu version to run the workflow against. By default, it is set to `latest`. + +#### Outputs + +* **backend** - The number of backend files that have changed. +* **frontend** - The number of frontend files that have changed. + +## [Reusable node tests workflow](_node.yml) + +This sub workflow install node dependencies and run frontend linters and tests. + +The workflow is composed of nine steps: + +1. **Check out latest commit for current branch** - This step checks out the latest commit for the current branch of the repository. To do so, it uses GitHub's [**checkout**](https://github.com/actions/checkout) action with no parameters. +2. **Set up Node.js** - This step sets Node.js up downloading binaries and project's dependencies. This is done using the GitHub's [**setup-node**](https://github.com/actions/setup-node) action which also allows to cache and restore the project dependencies. It's used with the following parameters: + 1. **node-version** - Node.js version to use - It is set according to *node_version* input variable. + 2. **cache** - Which package manager used to install and cache packages - It is set to `npm`. + 3. **cache-dependency-path** - Path to the dependency file: `package-lock.json`, `yarn.lock` etc. It is set to `/package-lock.json`, where *working_directory* is the input variable. +3. **Add dependencies** - This step adds additional dependencies to the `package-lock.json` file. Specifically, these packages are added to the **devDependencies** part of the aforementioned file. Which packages will be added is chosen accordingly to input variables: + 1. *use_jest* + 2. *use_react* + 3. *use_eslint* + 4. *use_prettier* + 5. *use_stylelint* +4. **Install packages** - This step install all missing packages from the dependency file in the directory specified by the *working_directory* input variable. +5. **Run linters** - This step uses [**node_linter**](../actions/node_linter/action.yml) action to run linters against the frontend source code. +6. **Check packages licenses** - This step uses [**pilosus/action-pip-license-checker**](https://github.com/pilosus/action-pip-license-checker) to check the licenses used by the project requirements. +7. **Run CodeQL** - This step uses [**codeql**](../actions/codeql/action.yml) action to run CodeQL to discover vulnerabilities across the codebase. +8. **Run custom command** - This step is performed only if the input variable *custom_command* is not empty. The step simply run the bash command described in the previously mentioned input variable in the working directory specified by the *working_directory* input variable. +9. **Run jest tests** - This step runs Jest tests if the input variable *use_jest* is set to true. Finally, if *use_coverage* and *upload_coverage* are set to true, a coverage report is generated and uploaded. + +### Documentation + +#### Inputs + +* **node_versions** - Required - An array of Node.js versions to use. +* **working_directory** - Required - Path to the `package.json` file +* **check_packages_licenses** - Optional - Whether to check npm packages licenses or not. By default it is set to true. +* **use_jest** - Optional - Whether to use Jest test suite or not. By default it is set to false. +* **use_react** - Optional - Whether react is used by the project or not. By default it is set to false. +* **use_eslint** - Optional - Whether to use ESlint linter or not. By default it is set to true +* **use_prettier** - Optional - Whether to use Prettier formatter or not. By default it is set to true. +* **use_stylelint** - Optional - Whether to use Stylelint linter or not. By default it is set to true. +* **use_coverage** - Optional - Whether to use Coverage or not. To work, it also require *use_jest* to be true. By default it is set to false. +* **upload_coverage** - Optional - Whether to upload coverage report to GitHub. By default it is set to false +* **run_codeql** - Optional - Whether to run CodeQL against the codebase. By default it is set to false. +* **custom_command** - Optional - A custom bash command to be run by the workflow. By default it is set to an empty string. +* **max_timeout** - Optional - A maximum amount of minutes allowed for the workflow to run. By default it is set to 30. +* **ubuntu_version** - Optional - The Ubuntu version to run the workflow against. By default it is set to `latest`. + +## [Reusable python linter workflow](_python.yml) + +This sub workflow runs Python linters and tests against the codebase. + +It is composed of one job: + +1. **python** - This job is composed of thirty-one steps: + 1. **Check out latest commit** - Checks out the latest commit on the current branch of the repository. + 2. **Set up Python** + 3. **Inject stuff to environment** + 4. **Restore APT cache related to PR event** + 5. **Restore APT cache related to target branch** + 6. **Restore APT repositories** + 7. **Install APT requirements** + 8. **Save APT cache related to PR event** + 9. **Check requirements licenses** + 10. **Print wrong licenses** + 11. **Create linter requirements file** + 12. **Create dev requirements file** + 13. **Create docs requirement file** + 14. **Restore Python virtual environment related to PR event** + 15. **Restore Python virtual environment related to target branch** + 16. **Create Python virtual environment** + 17. **Restore pip cache related to PR event** + 18. **Restore pip cache related to target branch** + 19. **Install project requirements** + 20. **Install other requirements** + 21. **Save Python virtual environment related to PR event** + 22. **Save pip cache related to PR event** + 23. **Run linters** + 24. **Run CodeQL** + 25. **Build Docs** + 26. **Start services** + 27. **Start celery worker** + 28. **Run custom command** + 29. **Check migrations** + 30. **Run unittest** + 31. **Create coverage** + +## [Create APT cache](create_apt_cache.yaml) + +This workflow is run in the event of **a push on branches *main*, *master*, *develop*, *dev***. Specifically, it is triggered only when the APT requirements file is updated. + +The workflow is composed of a single job: + +1. **Create cache for APT dependencies** - This job, as described by its name, creates a cache for APT dependencies and stores it on GitHub. It is composed of four steps: + 1. **Check out latest commit on current branch** - This step checks out the latest commit on the current branch of the repository. + 2. **Install APT dependencies** - This step refreshes APT repositories and then install the project dependecies. This action is required to produce the APT cache that will be saved later. + 3. **Compute APT dependencies file SHA256 hash** - This step computes the SHA256 of the APT dependency file that will be used as cache key. + 4. **Save APT cache** - This step saves APT cache on GitHub. The GitHub's [**cache/save**](https://github.com/actions/cache/tree/main/save) action is used. + +## [Create Python cache](create_python_cache.yaml) + +This workflow is run in the event of **a push on branches *main*, *master*, *develop*, *dev***. Specifically, it is triggered only when the Python requirements file is updated. + +The workflow is composed of a single job: + +1. **Create cache for Python dependencies** - This job, as described by its name, creates a cache for Python dependencies and stores it on GitHub. It is composed of four steps: + 1. **Check out latest commit** - This step checks out the latest commit on the current branch for the repository. + 2. **Set up Python** - This step install Python on the runner. + 3. **Set up Python virtual environment** - This step uses [**create_virtualenv**](../actions/python_requirements/create_virtualenv/README.md) action to create a Python virtual environment. + 4. **Install Python dependencies** - This step install Python requirements to produce the final virtual environment that will be cached. Also, installing the Python dependencies, creates the pip cache. + 5. **Save pip cache** - This step uses [**save_pip_cache**](../actions/python_requirements/save_pip_cache/README.md) action to save pip's download cache on GitHub. + 6. **Create virtual environment cache** - This step uses [**save_virtualenv**](../actions/python_requirements/save_virtualenv/README.md) action to save virtual environment on GitHub's cache. + +## [CI](pull_request_automation.yml) + +This workflow runs in the case of a **pull request on branches *master*, *main*, *develop*, *dev*** and it's the core CI workflow. + +It is composed of three jobs: + +1. **detect-changes** - This job detects and enumerates changes to backend and/or frontend files. To do so, it uses the [**_detect_changes**](_detect_changes.yml) workflow. +2. **node** - If any changes to the frontend files are found, [**_node**](_node.yml) workflow is run. +3. **python** - If any changes to the backend files are found, [**_python**](_python.yml) workflow is run. + +## [Release and publish](release.yml) + +TODO + +## [Reusable release and tag workflow](_release_and_tag.yml) + +TODO \ No newline at end of file diff --git a/workflows/_detect_changes.yml b/workflows/_detect_changes.yml index eb68be3..9a74105 100644 --- a/workflows/_detect_changes.yml +++ b/workflows/_detect_changes.yml @@ -45,13 +45,16 @@ jobs: backend: ${{steps.diff_check_backend.outputs.backend}} frontend: ${{steps.diff_check_frontend.outputs.frontend}} steps: - - uses: actions/checkout@v4 + - name: Check out PR target branch + uses: actions/checkout@v4 with: ref: ${{ github.base_ref }} - - uses: actions/checkout@v4 + - name: Check out source branch latest commit + uses: actions/checkout@v4 with: clean: false + - name: Generate summary if: ${{inputs.backend_directories != ''}} | ${{inputs.frontend_directories != ''}} run: | @@ -76,7 +79,6 @@ jobs: echo "::debug::diff command results: $(git diff --compact-summary origin/${{ github.base_ref }} -- ${{ inputs.backend_directories }} $BACKEND_EXCLUSIONS | head -n -1 )" echo "backend $BACKEND_CHANGES" - - name: Generate diffs for frontend if: ${{inputs.frontend_directories != ''}} id: diff_check_frontend diff --git a/workflows/_node.yml b/workflows/_node.yml index 1a850b0..074bbe1 100644 --- a/workflows/_node.yml +++ b/workflows/_node.yml @@ -88,7 +88,8 @@ jobs: node_version: ${{ fromJson(inputs.node_versions) }} language: ['javascript'] steps: - - uses: actions/checkout@v4 + - name: Check out latest commit for current branch + uses: actions/checkout@v4 - name: Set up Node.js uses: actions/setup-node@v4 diff --git a/workflows/_python.yml b/workflows/_python.yml index ae67a0d..04ec4c5 100644 --- a/workflows/_python.yml +++ b/workflows/_python.yml @@ -245,7 +245,8 @@ jobs: language: ['python'] env: ${{ fromJson(inputs.env) }} steps: - - uses: actions/checkout@v4 + - name: Check out latest commit + uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v5 @@ -264,11 +265,39 @@ jobs: shell: bash - - name: Install apt requirements - if: inputs.packages_path - uses: ./.github/actions/apt_requirements + - name: Restore APT cache related to PR event + id: restore_apt_cache_pr + uses: ./.github/actions/apt_requirements/restore_apt_cache with: - requirements_file: ${{ inputs.packages_path }} + apt_requirements_file_path: ${{ inputs.packages_path }} + + - name: Restore APT cache related to target branch + id: restore_apt_cache_target_branch + if: steps.restore_apt_cache_pr.outputs.cache-hit != 'true' + uses: ./.github/actions/apt_requirements/restore_apt_cache + with: + apt_requirements_file_path: ${{ inputs.packages_path }} + git_reference: ${{ github.base_ref }} + + - name: Refresh APT repositories + if: > + steps.restore_apt_cache_pr.outputs.cache-hit != 'true' && + steps.restore_apt_cache_target_branch.outputs.cache-hit != 'true' + run: | + sudo apt-get update + shell: bash + + - name: Install APT requirements + run: | + sudo apt-get install -y --no-install-recommends $(tr '\n' ' ' < ${{ inputs.packages_path }}) + shell: bash + + - name: Save APT cache related to PR event + if: > + steps.restore_apt_cache_pr.outputs.cache-hit != 'true' && + uses: ./.github/actions/apt_requirements/save_apt_cache + with: + apt_requirements_file_path: ${{ inputs.packages_path }} - name: Check requirements licenses if: inputs.check_requirements_licenses && steps.cache-virtualenv.outputs.cache-hit != 'true' @@ -370,14 +399,14 @@ jobs: shell: bash working-directory: ${{ inputs.install_from }} - - name: Save Python virtual environment related to the PR event + - name: Save Python virtual environment related to PR event if: > steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' uses: ./.github/actions/python_requirements/save_virtualenv with: requirements_paths: "${{ inputs.requirements_path }} requirements-linters.txt requirements-dev.txt requirements-docs.txt" - - name: Save pip cache related to the PR event + - name: Save pip cache related to PR event if: > steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' && steps.restore_pip_cache_pr.outputs.cache-hit != 'true' diff --git a/workflows/create_apt_cache.yaml b/workflows/create_apt_cache.yaml index 06f26cc..4123d51 100644 --- a/workflows/create_apt_cache.yaml +++ b/workflows/create_apt_cache.yaml @@ -23,7 +23,8 @@ jobs: name: Create cache for APT dependencies runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - name: Check out latest commit on current branch + uses: actions/checkout@v4 # Remember to set the same APT requirements file path set before! - name: Install APT dependencies @@ -31,13 +32,14 @@ jobs: sudo apt-get update sudo apt-get -y install --no-install-recommends $(tr '\n' ' ' < .github/test/python_test/packages.txt) - - name: Compute apt_packages.txt SHA256 hash - id: compute_apt_packages_sha256_hash + - name: Compute APT dependencies file SHA256 hash + id: compute_apt_dependencies_file_sha256_hash uses: ./.github/actions/misc/compute_files_hash with: file_paths: .github/test/python_test/packages.txt - - uses: actions/cache/save@v4 + - name: Save APT cache + uses: actions/cache/save@v4 with: path: /var/cache/apt/archives/*.deb - key: ${{ github.ref_name }}-${{ steps.compute_apt_packages_sha256_hash.outputs.computed_hash }} + key: ${{ github.ref_name }}-${{ steps.compute_apt_dependencies_file_sha256_hash.outputs.computed_hash }} diff --git a/workflows/create_python_cache.yaml b/workflows/create_python_cache.yaml index a6b65b5..cac8510 100644 --- a/workflows/create_python_cache.yaml +++ b/workflows/create_python_cache.yaml @@ -32,7 +32,7 @@ jobs: with: python-version: "3.12" - - name: Set up Python's virtual environment + - name: Set up Python virtual environment uses: ./.github/actions/python_requirements/create_virtualenv - name: Install Python dependencies From 6359b5a6d84cdbfa1ac24442ebf5bb0f8696ad1e Mon Sep 17 00:00:00 2001 From: Luca Cigarini Date: Fri, 28 Mar 2025 12:49:42 +0100 Subject: [PATCH 04/15] added docs --- .github/workflows/_python.yml | 46 ++++++------ workflows/README.md | 127 +++++++++++++++++++++++++--------- workflows/_python.yml | 46 ++++++------ 3 files changed, 141 insertions(+), 78 deletions(-) diff --git a/.github/workflows/_python.yml b/.github/workflows/_python.yml index 04ec4c5..9af0e2c 100644 --- a/.github/workflows/_python.yml +++ b/.github/workflows/_python.yml @@ -299,28 +299,6 @@ jobs: with: apt_requirements_file_path: ${{ inputs.packages_path }} - - name: Check requirements licenses - if: inputs.check_requirements_licenses && steps.cache-virtualenv.outputs.cache-hit != 'true' - id: license_check_report - continue-on-error: true - uses: pilosus/action-pip-license-checker@v2 - with: - requirements: ${{ inputs.install_from }}/${{ inputs.requirements_path }} - exclude: ${{ inputs.ignore_requirements_licenses_regex }} - headers: true - fail: 'StrongCopyleft,NetworkCopyleft,Error' - fails-only: true - - - name: Print wrong licenses - if: steps.license_check_report.outcome == 'failure' - run: | - echo "License check failed" - echo "====================" - echo "${{ steps.license_check_report.outputs.report }}" - echo "====================" - exit 1 - shell: bash - - name: Create linter requirements file uses: ./.github/actions/python_requirements/create_linter_requirements_file with: @@ -399,6 +377,30 @@ jobs: shell: bash working-directory: ${{ inputs.install_from }} + - name: Check requirements licenses + if: > + inputs.check_requirements_licenses && + steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' + id: license_check_report + continue-on-error: true + uses: pilosus/action-pip-license-checker@v2 + with: + requirements: ${{ inputs.install_from }}/${{ inputs.requirements_path }} + exclude: ${{ inputs.ignore_requirements_licenses_regex }} + headers: true + fail: 'StrongCopyleft,NetworkCopyleft,Error' + fails-only: true + + - name: Print wrong licenses + if: steps.license_check_report.outcome == 'failure' + run: | + echo "License check failed" + echo "====================" + echo "${{ steps.license_check_report.outputs.report }}" + echo "====================" + exit 1 + shell: bash + - name: Save Python virtual environment related to PR event if: > steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' diff --git a/workflows/README.md b/workflows/README.md index cebf147..618ce64 100644 --- a/workflows/README.md +++ b/workflows/README.md @@ -4,7 +4,7 @@ This sub workflow detects and enumerates the changes between two branches. -The workflow is composed of five steps: +It is composed of five steps: 1. **Check out PR target branch** - This step checks out the latest commit of the PR target branch for the current repository. This workflow was designed to detect changes when a PR to a target branch was created. Therefore, the latest commit of the target branch must be checked out as the first step. To achieve this, GitHub's [**checkout**](https://github.com/actions/checkout) action is used with the following parameters: 1. **ref** - The branch, tag or SHA to checkout - It is set to `github.base_ref`, which corresponds to the **PR target branch**. @@ -33,7 +33,7 @@ The workflow is composed of five steps: This sub workflow install node dependencies and run frontend linters and tests. -The workflow is composed of nine steps: +It is composed of nine steps: 1. **Check out latest commit for current branch** - This step checks out the latest commit for the current branch of the repository. To do so, it uses GitHub's [**checkout**](https://github.com/actions/checkout) action with no parameters. 2. **Set up Node.js** - This step sets Node.js up downloading binaries and project's dependencies. This is done using the GitHub's [**setup-node**](https://github.com/actions/setup-node) action which also allows to cache and restore the project dependencies. It's used with the following parameters: @@ -79,37 +79,96 @@ This sub workflow runs Python linters and tests against the codebase. It is composed of one job: 1. **python** - This job is composed of thirty-one steps: - 1. **Check out latest commit** - Checks out the latest commit on the current branch of the repository. - 2. **Set up Python** - 3. **Inject stuff to environment** - 4. **Restore APT cache related to PR event** - 5. **Restore APT cache related to target branch** - 6. **Restore APT repositories** - 7. **Install APT requirements** - 8. **Save APT cache related to PR event** - 9. **Check requirements licenses** - 10. **Print wrong licenses** - 11. **Create linter requirements file** - 12. **Create dev requirements file** - 13. **Create docs requirement file** - 14. **Restore Python virtual environment related to PR event** - 15. **Restore Python virtual environment related to target branch** - 16. **Create Python virtual environment** - 17. **Restore pip cache related to PR event** - 18. **Restore pip cache related to target branch** - 19. **Install project requirements** - 20. **Install other requirements** - 21. **Save Python virtual environment related to PR event** - 22. **Save pip cache related to PR event** - 23. **Run linters** - 24. **Run CodeQL** - 25. **Build Docs** - 26. **Start services** - 27. **Start celery worker** - 28. **Run custom command** - 29. **Check migrations** - 30. **Run unittest** - 31. **Create coverage** + 1. **Check out latest commit** - Checks out the latest commit on the current branch of the repository using the GitHub's [**checkout**](https://github.com/actions/checkout) action. + 2. **Set up Python** - Sets up Python on the runner machine using GitHub's [**setup-python**](https://github.com/actions/setup-python) action with the following parameter: + 1. **python-version** - Which Python version to use - It is set according to the *python_versions* input variable. + 3. **Inject stuff to environment** - This step adds a few environment variables to the system's environment. Specifically: + 1. If *django_settings_module* is set, **PYTHONPATH** and **DJANGO_SETTINGS_MODULE** will be added to the runner's environment. + 2. If *run_codeql* is true, **CODEQL_PYTHON** will be added to the runner's environment. + 4. **Restore APT cache related to PR event** - This step will try to restore the APT cache related to the PR event using [**restore_apt_cache**](../actions/apt_requirements/restore_apt_cache/README.md) with the following parameter: + 1. **apt_requirements_file_path** - Path to the APT requirements file - It is set to the *packages_path* input variable. + 5. **Restore APT cache related to target branch** - This step will try to restore the APT cache related related to the target branch (of the PR) using [**restore_apt_cache**](../actions/apt_requirements/restore_apt_cache/README.md) only if **Restore APT cache related to PR event** produces a cache miss. It is run with the following parameter: + 1. **apt_requirements_file_path** - Path to the APT requirements file - It is set to the *packages_path* input variable. + 2. **git_reference** - A git reference (name of the branch, reference to the PR) that will be used to build the cache key - It is set to the target branch. + 6. **Restore APT repositories** - If both PR event and target branch APT cache restore attempt resulted in a cache miss, the APT repositories list is refreshed using `sudo apt-get update`. + 7. **Install APT requirements** - This step installs APT requirements listed in the *packages_path* requirements file. **Since they are not required, recommended packages are not downloaded**. + 8. **Save APT cache related to PR event** - When the attempt to restore the APT cache related to the PR event results in a cache miss, the newly populated APT cache is saved to GitHub. This is performed using [**save_apt_cache**](../actions/apt_requirements/save_apt_cache/README.md) action with the following parameter: + 1. **apt_requirements_file_path** - Path to the APT requirements file - It is se to the *packages_path* input variable. + 9. **Create linter requirements file** - This step creates the linter requirements file using the [**create_linter_requirements_file**](../actions/python_requirements/create_linter_requirements_file/README.md) action. + 10. **Create dev requirements file** - This step creates the development requirements file using the [**create_dev_requirements_file**](../actions/python_requirements/create_dev_requirements_file/README.md) action. + 11. **Create docs requirement file** - This step creates the documentation requirements file using the [**create_docs_requirements_file**](../actions/python_requirements/create_docs_requirements_file/README.md) action. + 12. **Restore Python virtual environment related to PR event** - This step attempts to restore the Python virtual environment for the PR using the [**restore_python_virtualenv**](../actions/python_requirements/restore_virtualenv/README.md) action. + 13. **Restore Python virtual environment related to target branch** - If the attempt to restore the Python virtual environment for the PR, result in a cache miss, an attempt to restore the Python virtual environment for the target branch is made using the [**restore_python_virtualenv**](../actions/python_requirements/restore_virtualenv/README.md) action. + 14. **Create Python virtual environment** - If both attempts to restore the Python virtual environment for the PR, for the target branch, result in a cache miss, a Python virtual environment is created using the [**create_virtualenv**](../actions/python_requirements/create_virtualenv/README.md) action. + 15. **Restore pip cache related to PR event** - If both attempts to restore the Python virtual environment for the PR, for the target branch, result in a cache miss, an attempt to restore the pip cache for the PR event is made using the [**restore_pip_cache**](../actions/python_requirements/restore_pip_cache/README.md) action. + 16. **Restore pip cache related to target branch** - If both attempts to restore the Python virtual environment for the PR, for the target branch, as well as the pip cache for the PR, result in a cache miss, an attempt to restore the pip cache for the target branch is made using the [**restore_pip_cache**](../actions/python_requirements/restore_pip_cache/README.md) action. + 17. **Install project requirements** - If both attempts to restore the Python virtual environment for the PR event, and the target branch result in a cache miss, project requirements are installed from the working directory specified by the *install_from* input variable. + 18. **Install other requirements** - If the attempt to restore the Python virtual environment for the PR event result in a cache miss, developer, linters and documentation requirements are installed from the working directory specified by *working_directory* input variable. + 19. **Check requirements licenses** - If the input variable *check_requirements_licenses* is set to true and the attempt to restore the Python virtual environment related to the PR event result in a cache miss, this step performs the requirements licenses check using [**pilosus/action-pip-license-checker**](https://github.com/pilosus/action-pip-license-checker). + 20. **Print wrong licenses** - If the output of **Check requirements licenses** is `failure`, the list of licenses for which the check failed will be returned. + 21. **Save Python virtual environment related to PR event** - If the attempt to restore the Python virtual environment resulted in a cache miss, the Python virtual environment is saved for the PR event using the [*save_virtualenv*](../actions/python_requirements/save_virtualenv/README.md) action with the following parameter: + 1. **requirements_paths** - A space separated list of requirements file paths - It is set to the combination of *requirements_path*, `requirements-linters.txt`, `requirements-dev.txt` and `requirements-docs.txt` joined by spaces. + 22. **Save pip cache related to PR event** - If both attempts to restore the Python virtual environment and the pip cache related to the PR resulted in a cache miss, the pip cache is saved for the PR event using the [*save_pip_cache*](../actions/python_requirements/save_pip_cache/README.md) action. + 23. **Run linters** - If one of the following input variables: *use_black*, *use_isort*, *use_flake8*, *use_pylint*, *use_bandit* and *use_autoflake* is true, this step executes the linters against the codebase in the working directory specified by the *working_directory* variable. + 24. **Run CodeQL** - If the *run_codeql* input variable is true, this step runs CodeQL against the codebase using the [**codeql**](../actions/codeql/action.yml) action in the working directory specified by the *working_directory* variable. + 25. **Build Docs** - If the *check_docs_directory* input variable is set, this step executes `rstcheck` to ensure that the documentation in *check_docs_directory* is valid. Finally, the documentation is built using `sphinx`. + 26. **Start services** - If one or more of the following input variables: *use_postgres*, *use_elastic_search*, *use_memcached*, *use_redis*, *use_rabbitmq* and *use_mongo* are true, this step creates the Docker container for the service using the [**services**](../actions/services/action.yml) action. Additional parameters, such as *postgres_db* or *elasticsearch_version* can also be provided to the aforementioned action. + 27. **Start celery worker** - If the *use_celery* input variable is true, a Celery worker is created for the *celery_app* application. The `celery` command is executed in the working directory specified by the *working_directory* input variable. + 28. **Run custom command** - If the *custom_command* input variable is not empty, the command defined by the variable is executed in the working directory specified by the *working_directory* input variable. + 29. **Check migrations** - If *check_migrations* is true and *django_settings_module* is not empty, this step will perform a dry run of `django-admin makemigrations` to ensure that the migrations are valid. + 30. **Run unittest** - This step runs Python tests against the codebase in the directory described by the *working_directory* input variable. Additionally, according to *tags_for_manual_tests* and *tags_for_slow_tests* variables, some tests will be excluded from the run. + 31. **Create coverage output** - If *use_coverage* and *upload_coverage* are set to true, this step produces a coverage report of the codebase and uploads it to GitHub. The *working_directory* input variable is used to determines the directory in which coverage should be run. + +### Documentation + +#### Inputs + +* **python_versions** - Required - Python versions used by this workflow in the form of a JSON array. +* **ubuntu_version** - Optional - Ubuntu version to run workflow against. By default, it is set to `latest`. +* **working_directory** - Required - Directory in which to run linters. +* **requirements_path** - Required - Path to the requirements file of the Python project. +* **install_from** - Optional - Directory where all installation commands will be run. By default, it is set to `.`. +* **packages_path** - Optional - Path to the APT requirements file of the Python project. By default, it is set to an empty string. +* **env** - Optional - A JSON object containing a set of environment variables to be added to the system's environment. By default, it is set to an empty JSON object `{}`. +* **max_timeout** - Optional - Maximum amount of time (in minutes) the workflow is allowed to run. By default, it is set to `30`. +* **use_black** - Optional - Whether to use black formatter. By default, it is set to `false`. +* **use_isort** - Optional - Whether to use isort formatter. By default, it is set to `false`. +* **use_autoflake** - Optional - Whether to use autoflake linter. By default, it is set to `false`. +* **use_bandit** - Optional - Whether to use bandit linter. By default, it is set to `false`. +* **use_flake8** - Optional - Whether to use flake8 linter. By default, it is set to `false`. +* **use_pylint** - Optional - Whether to use pylint linter. By default, it is set to `false`. +* **use_coverage** - Optional - Whether to use coverage. By default, it is set to `false`. +* **coverage_config_path** - Optional - Path to the coverage configuration file. By default, it is set to `.coveragerc`. +* **upload_coverage** - Optional - Whether to upload coverage report to GitHub. To work, it needs *use_coverage* to be true. By default, it is set to `false`. +* **run_codeql** - Optional - Whether to run CodeQL against codebase. By default, it is set to `false`. +* **use_celery** - Optional - Whether to create a Celery container. By default, it is set to `false`. +* **use_elastic_search** - Optional - Whether to create an Elasticsearch container. By default, it is set to `false`. +* **use_memcached** - Optional - Whether to create a Memcached container. By default, it is set to `false`. +* **use_mongo** - Optional - Whether to create a MongoDB container. By default, it is set to `false`. +* **use_postgres** - Optional - Whether to create a PostgresDB container. By default, it is set to `false`. +* **use_rabbitmq** - Optional - Whether to create a RabbitMQ container. By default, it is set to `false`. +* **use_redis** - Optional - Whether to create a Redis container. By default, it is set to `false`. +* **celery_app** - Optional - A Celery application name. Requires *use_celery* to be true. By default, it is set to an empty string. +* **celery_queues** - Optional - A comma separated list of Celery queues. Requires *use_celery* to be true. By default, it is set to `default`. +* **elasticsearch_version** - Optional - Elasticsearch's container version. By default, it is set to `latest`. +* **elasticsearch_port** - Optional - Elasticsearch's container exposed port. By default, it is set to `9200`. +* **memcached_version** - Optional - Mecached's container version. By default, it is set to `latest`. +* **mongo_version** - Optional - MongoDB's container version. By default, it is set to `latest`. +* **postgres_db** - Optional - PostgresDB database name. Requires *use_postgres* to be true. By default, it is set to `db`. +* **postgres_user** - Optional - PostgresDB user name. Requires *use_postgres* to be true. By default, it is set to `user`. +* **postgres_password** - Optional - PostgresDB password. Requires *use_postgres* to be true. By default, it is set to `password`. +* **postgres_version** - Optional - PostgresDB's container version. Requires *use_postgres* to be true. By default, it is set to `latest`. +* **rabbitmq_version** - Optional - RabbitMQ's container version. Requires *use_rabbitmq* to be true. By default, it is set to `latest`. +* **redis_version** - Optional - Redis' container version. Requires *use_redis* to be true. By default, it is set to `latest`. +* **django_settings_module** - Optional - Path to the Django settings file. By default, it is set to an empty string. +* **check_migrations** - Optional - Whether to check that the project's migrations are valid. Requires *django_settings_module* to be set. By default, it is set to `false`. +* **check_requirements_licenses** - Optional - Whether to check that the requirements license is valid. Requires *django_settings_module* to be set. By default, it is set to `true`. +* **ignore_requirements_licenses_regex** - Optional - A regex that describes which directories should be ignored when checking the validity of requirements licenses. By default, it is set to `uWSGI.*|lunardate.*|.*QuokkaClient.*|pyquokka.*`. +* **tags_for_slow_tests** - Optional - A space separated list of tags for tests that will only be run on the master/main branch. **Works only for Django projects**. By default, it is set to an `slow`. +* **tags_for_manual_tests** - Optional - A space separated list of tags for tests that will only be run **manually** (CI will ignore them). **Works only for Django projects**. By default, it is set to `manual`. +* **custom_command** - Optional - A custom bash command to run. By default, it is set to an empty string. +* **check_docs_directory** - Optional - Path to the documentation directory in which `rstcheck` will be run to check documentation files. By default, it is set to an empty string. +* **check_dockerfile** - Optional - Path to a Dockerfile to be checked. **Warning: if set it may significantly increase the action time**. By default, it is set to an empty string. ## [Create APT cache](create_apt_cache.yaml) @@ -153,4 +212,4 @@ TODO ## [Reusable release and tag workflow](_release_and_tag.yml) -TODO \ No newline at end of file +TODO diff --git a/workflows/_python.yml b/workflows/_python.yml index 04ec4c5..9af0e2c 100644 --- a/workflows/_python.yml +++ b/workflows/_python.yml @@ -299,28 +299,6 @@ jobs: with: apt_requirements_file_path: ${{ inputs.packages_path }} - - name: Check requirements licenses - if: inputs.check_requirements_licenses && steps.cache-virtualenv.outputs.cache-hit != 'true' - id: license_check_report - continue-on-error: true - uses: pilosus/action-pip-license-checker@v2 - with: - requirements: ${{ inputs.install_from }}/${{ inputs.requirements_path }} - exclude: ${{ inputs.ignore_requirements_licenses_regex }} - headers: true - fail: 'StrongCopyleft,NetworkCopyleft,Error' - fails-only: true - - - name: Print wrong licenses - if: steps.license_check_report.outcome == 'failure' - run: | - echo "License check failed" - echo "====================" - echo "${{ steps.license_check_report.outputs.report }}" - echo "====================" - exit 1 - shell: bash - - name: Create linter requirements file uses: ./.github/actions/python_requirements/create_linter_requirements_file with: @@ -399,6 +377,30 @@ jobs: shell: bash working-directory: ${{ inputs.install_from }} + - name: Check requirements licenses + if: > + inputs.check_requirements_licenses && + steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' + id: license_check_report + continue-on-error: true + uses: pilosus/action-pip-license-checker@v2 + with: + requirements: ${{ inputs.install_from }}/${{ inputs.requirements_path }} + exclude: ${{ inputs.ignore_requirements_licenses_regex }} + headers: true + fail: 'StrongCopyleft,NetworkCopyleft,Error' + fails-only: true + + - name: Print wrong licenses + if: steps.license_check_report.outcome == 'failure' + run: | + echo "License check failed" + echo "====================" + echo "${{ steps.license_check_report.outputs.report }}" + echo "====================" + exit 1 + shell: bash + - name: Save Python virtual environment related to PR event if: > steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' From 6705c840a90c01e284305df1fb327494089297c2 Mon Sep 17 00:00:00 2001 From: Luca Cigarini Date: Fri, 28 Mar 2025 12:52:09 +0100 Subject: [PATCH 05/15] few fixes --- .github/workflows/_detect_changes.yml | 8 ++++---- workflows/_detect_changes.yml | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/_detect_changes.yml b/.github/workflows/_detect_changes.yml index 9a74105..9e9b8a6 100644 --- a/.github/workflows/_detect_changes.yml +++ b/.github/workflows/_detect_changes.yml @@ -3,22 +3,22 @@ on: workflow_call: inputs: backend_directories: - description: Backend directories separated by spaces + description: Space separated list of backend directories required: false type: string backend_exclusions: - description: Backend directories or files to be excluded separated by spaces + description: Space separated list of Backend directories or files to be excluded required: false type: string frontend_directories: - description: Frontend directories separated by spaces + description: Space separated list of frontend directories required: false type: string frontend_exclusions: - description: Frontend directories or files to be excluded separated by spaces + description: Space separated list of frontend directories or files to be excluded required: false type: string diff --git a/workflows/_detect_changes.yml b/workflows/_detect_changes.yml index 9a74105..9e9b8a6 100644 --- a/workflows/_detect_changes.yml +++ b/workflows/_detect_changes.yml @@ -3,22 +3,22 @@ on: workflow_call: inputs: backend_directories: - description: Backend directories separated by spaces + description: Space separated list of backend directories required: false type: string backend_exclusions: - description: Backend directories or files to be excluded separated by spaces + description: Space separated list of Backend directories or files to be excluded required: false type: string frontend_directories: - description: Frontend directories separated by spaces + description: Space separated list of frontend directories required: false type: string frontend_exclusions: - description: Frontend directories or files to be excluded separated by spaces + description: Space separated list of frontend directories or files to be excluded required: false type: string From 646be756996574fb6ee7e26595730734d629468e Mon Sep 17 00:00:00 2001 From: Luca Cigarini Date: Fri, 28 Mar 2025 12:54:00 +0100 Subject: [PATCH 06/15] fixed if condition --- .github/workflows/_python.yml | 2 +- workflows/_python.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/_python.yml b/.github/workflows/_python.yml index 9af0e2c..e2142b1 100644 --- a/.github/workflows/_python.yml +++ b/.github/workflows/_python.yml @@ -294,7 +294,7 @@ jobs: - name: Save APT cache related to PR event if: > - steps.restore_apt_cache_pr.outputs.cache-hit != 'true' && + steps.restore_apt_cache_pr.outputs.cache-hit != 'true' uses: ./.github/actions/apt_requirements/save_apt_cache with: apt_requirements_file_path: ${{ inputs.packages_path }} diff --git a/workflows/_python.yml b/workflows/_python.yml index 9af0e2c..e2142b1 100644 --- a/workflows/_python.yml +++ b/workflows/_python.yml @@ -294,7 +294,7 @@ jobs: - name: Save APT cache related to PR event if: > - steps.restore_apt_cache_pr.outputs.cache-hit != 'true' && + steps.restore_apt_cache_pr.outputs.cache-hit != 'true' uses: ./.github/actions/apt_requirements/save_apt_cache with: apt_requirements_file_path: ${{ inputs.packages_path }} From 6b63c0dea9efd42b2e2921b4b345860401cbd0da Mon Sep 17 00:00:00 2001 From: Luca Cigarini Date: Fri, 28 Mar 2025 12:58:15 +0100 Subject: [PATCH 07/15] fixed .github --- .../restore_apt_cache/README.md | 29 +++ .../restore_apt_cache/action.yml | 59 +++++ .../apt_requirements/save_apt_cache/README.md | 22 ++ .../save_apt_cache/action.yml | 24 ++ .../actions/misc/compute_files_hash/README.md | 18 ++ .../create_dev_requirements_file/README.md | 13 ++ .../create_docs_requirements_file/README.md | 12 + .../create_linter_requirements_file/README.md | 27 +++ .../create_virtualenv/README.md | 20 ++ .../restore_pip_cache/README.md | 41 ++++ .../restore_virtualenv/README.md | 30 +++ .../save_pip_cache/README.md | 22 ++ .../save_virtualenv/README.md | 23 ++ .github/workflows/README.md | 215 ++++++++++++++++++ 14 files changed, 555 insertions(+) create mode 100644 .github/actions/apt_requirements/restore_apt_cache/README.md create mode 100644 .github/actions/apt_requirements/restore_apt_cache/action.yml create mode 100644 .github/actions/apt_requirements/save_apt_cache/README.md create mode 100644 .github/actions/apt_requirements/save_apt_cache/action.yml create mode 100644 .github/actions/misc/compute_files_hash/README.md create mode 100644 .github/actions/python_requirements/create_dev_requirements_file/README.md create mode 100644 .github/actions/python_requirements/create_docs_requirements_file/README.md create mode 100644 .github/actions/python_requirements/create_linter_requirements_file/README.md create mode 100644 .github/actions/python_requirements/create_virtualenv/README.md create mode 100644 .github/actions/python_requirements/restore_pip_cache/README.md create mode 100644 .github/actions/python_requirements/restore_virtualenv/README.md create mode 100644 .github/actions/python_requirements/save_pip_cache/README.md create mode 100644 .github/actions/python_requirements/save_virtualenv/README.md create mode 100644 .github/workflows/README.md diff --git a/.github/actions/apt_requirements/restore_apt_cache/README.md b/.github/actions/apt_requirements/restore_apt_cache/README.md new file mode 100644 index 0000000..046b58e --- /dev/null +++ b/.github/actions/apt_requirements/restore_apt_cache/README.md @@ -0,0 +1,29 @@ +# Composite action restore APT cache + +This action restores an APT cache from GitHub's cache. + +Combined with [**save_apt_cache**](../save_apt_cache/README.md), it helps save time by avoiding the download of APT requirements. + +The action is composed of five steps: + +1. **Compute APT requirements files SHA256 hash** - This step uses [**misc/compute_files_hash**](../../misc/compute_files_hash/README.md) action to compute a single SHA256 hash of the APT requirements file described by the *apt_rquirements_file_path* input variable. The computed SHA256 hash will be part of the cache key. +2. **Backup `/var/cache/apt/archives permissions`** - This step backs up the permissions associated to the `/var/cache/apt/archives` directory. So, after restoring the APT cache they can be restored to the original ones. +3. **Add write permissions for all to `/var/cache/apt/archives`** - This step sets the write permission to the `/var/cache/apt/archives`. This is crucial because the [**cache/restore**](https://github.com/actions/cache/blob/main/restore/README.md) GitHub's action needs to be able to write to it. Without setting the correct write permission, a permission error is raised. +4. **Restore APT cache** - This step restores the APT cache. It uses the GitHub's [**cache/restore**](https://github.com/actions/cache/blob/main/restore/README.md) action with the following parameters: + * **path** - A list of files, directories, or paths to restore - set to `/var/cache/apt/archives/*.deb`. + * **key** - An explicit key for a cache entry - set to the combination of three strings: + * *git_reference*, provided as an input to the action. + * A static part, `-apt-` + * The previously computed SHA256 hash of the APT requirements file. +5. **Restore original permissions to `/var/cache/apt/archives` and delete backup** - This step restore the original permissions to the `/var/cache/apt/archives` directory. Finally, the backup file is deleted. + +## Documentation + +### Inputs + +* **apt_requirements_file_path** - Required - Path to the APT requirements file. It will be used to compute a SHA256 hash used in the cache key. +* **git_reference** - Optional - A git reference that will be used to build the cache key. It defaults to `github.ref_name` which is a context variable containing **the short ref name of the branch or tag that triggered the workflow run**. For example it may be `feature-branch-1` or, for pull requests, `/merge`. + +### Outputs + +* **cache-hit** - A boolean value which is true when APT cache is found in the GitHub's cache, false otherwise. diff --git a/.github/actions/apt_requirements/restore_apt_cache/action.yml b/.github/actions/apt_requirements/restore_apt_cache/action.yml new file mode 100644 index 0000000..6ddaa7a --- /dev/null +++ b/.github/actions/apt_requirements/restore_apt_cache/action.yml @@ -0,0 +1,59 @@ +name: Composite action restore APT cache +description: Composite action to restore APT cache +inputs: + apt_requirements_file_path: + description: Path to the APT requirements file + required: true + git_reference: + description: A git reference (name of the branch, reference to the PR) that will be used to build the cache key. + required: false + default: ${{ github.ref_name }} + +outputs: + cache-hit: + description: Whether the APT cache was found in the GitHub's cache or not. + value: ${{ steps.restore_apt_cache.outputs.cache-hit }} + + +runs: + using: "composite" + steps: + - name: Compute APT requirements file SHA256 hash + id: compute_apt_requirements_file_sha256_hash + uses: ./.github/actions/misc/compute_files_hash + with: + file_paths: ${{ inputs.requirements_file }} + + - name: Backup /var/cache/apt/archives permissions + id: backup_apt_cache_dir_permissions + run: | + echo "apt_cache_dir_permissions_file=/tmp/apt_cache_dir_permissions.facl" > $GITHUB_OUTPUT + sudo getfacl /var/cache/apt/archives > $apt_cache_dir_permissions_file + echo "::debug::Original permissions given to /var/cache/apt/archives: $(ls -l /var/cache/apt/archives)" + echo "::debug::Created /var/cache/apt/archives permissions backup to $apt_cache_dir_permissions_file" + + # Vital to be able to restore cache + # If write permission is not set, a permissions error will be raised + - name: Add write permission for all to /var/cache/apt/archives + run: | + sudo chmod a+w /var/cache/apt/archives + echo "::debug::New permissions given to /var/cache/apt/archives: $(ls -l /var/cache/apt/archives)" + shell: bash + + - name: Restore APT cache + uses: actions/cache/restore@v4 + id: restore_apt_cache + with: + path: /var/cache/apt/archives/*.deb + key: ${{ github.base_ref }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} + + - name: Restore original permissions to /var/cache/apt/archives and delete backup + run: | + permissions_file=${{ steps.backup_apt_cache_dir_permissions.outputs.apt_cache_dir_permissions_file }} + sudo setfacl --restore="$permissions_file" + echo "::debug::Restored original permissions to /var/cache/apt/archives: $(ls -l /var/cache/apt/archives)" + if [[ -f "$permissions_file" ]]; then + sudo rm "$permissions_file" + echo "::debug::Correctly removed $permissions_file permissions backup file" + fi + shell: bash \ No newline at end of file diff --git a/.github/actions/apt_requirements/save_apt_cache/README.md b/.github/actions/apt_requirements/save_apt_cache/README.md new file mode 100644 index 0000000..4d8dca8 --- /dev/null +++ b/.github/actions/apt_requirements/save_apt_cache/README.md @@ -0,0 +1,22 @@ +# Composite action save APT cache + +This action saves the APT cache, almost always located at `/var/cache/apt/archives/*.deb` to the GitHub's cache. + +Combined with [**restore_apt_cache**](../restore_apt_cache/README.md) helps save time by avoiding the download of APT requirements. + +The action is composed of two steps: + +1. **Compute APT requirements file SHA256 hash** - This step uses the [**misc/compute_files_hash**](../../misc/compute_files_hash/README.md) action to compute the SHA256 hash of the APT requriments file that will be part of the cache key. +2. **Save APT cache** - This step does the real caching on GitHub. The GitHub's [**cache/save**](https://github.com/actions/cache/blob/main/save/README.md) is used with the following parameters: + 1. **path** - A list of files, directories, or paths to cache - set to `/var/cache/apt/archives/*.deb` to save all `*.deb` files in APT cache. + 2. **key** - An explicit key for a cache entry - set to the combination of three strings: + 1. *git_reference*, provided as an input to the action. + 2. A static part, `-apt-` + 3. The previously computed SHA256 hash of the APT requirements file. + +## Documentation + +### Inputs + +* **apt_requirements_file_path** - Required - Path to the APT requirements file. It will be used to compute a SHA256 hash used in the cache key. +* **git_reference** - Optional - A git reference that will be used to build the cache key. It defaults to `github.ref_name` which is a context variable containing **the short ref name of the branch or tag that triggered the workflow run**. For example it may be `feature-branch-1` or, for pull requests, `/merge`. diff --git a/.github/actions/apt_requirements/save_apt_cache/action.yml b/.github/actions/apt_requirements/save_apt_cache/action.yml new file mode 100644 index 0000000..af41cfd --- /dev/null +++ b/.github/actions/apt_requirements/save_apt_cache/action.yml @@ -0,0 +1,24 @@ +name: Composite action save APT cache +description: Composite action to save APT cache +inputs: + apt_requirements_file_path: + description: Path to the APT requirements file + required: true + git_reference: + description: A git reference (name of the branch, reference to the PR) that will be used to build the cache key. + required: false + default: ${{ github.ref_name }} + +runs: + using: "composite" + steps: + - name: Compute APT requiremments file SHA256 hash + id: compute_apt_requirements_file_sha256_hash + uses: ./.github/actions/misc/compute_files_hash + with: + file_paths: ${{ inputs.apt_requirements_file_path }} + - name: Save APT cache + uses: actions/cache/save@v4 + with: + path: /var/cache/apt/archives/*.deb + key: ${{ inputs.git_reference }}-apt-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} \ No newline at end of file diff --git a/.github/actions/misc/compute_files_hash/README.md b/.github/actions/misc/compute_files_hash/README.md new file mode 100644 index 0000000..f1c594f --- /dev/null +++ b/.github/actions/misc/compute_files_hash/README.md @@ -0,0 +1,18 @@ +# Composite action compute files hash + +This action computes a single SHA256 hash of one or more files. +Given a **space separated list of file paths**, a new file is created by concatenating all those files together. Then the SHA256 hash of the newly created file is computed and returned as the output. + +Before being joined together, each file is tested to ensure that it **exists** and that it is **a regular file**. + +This action is useful when saving/restoring a cache in which a unique key is required. As a matter of fact, the hash is used as a part of the hash key. + +## Documentation + +### Inputs + +* `file_paths` - Mandatory - Space separated list of file paths for which a single SHA256 hash will be computed. + +### Outputs + +* `computed_hash` - A SHA256 hash of the file obtained by joining (concatenating) all input files together. diff --git a/.github/actions/python_requirements/create_dev_requirements_file/README.md b/.github/actions/python_requirements/create_dev_requirements_file/README.md new file mode 100644 index 0000000..ae32be0 --- /dev/null +++ b/.github/actions/python_requirements/create_dev_requirements_file/README.md @@ -0,0 +1,13 @@ +# Composite action create Python dev requirements file + +This action creates the `requirements-dev.txt` file which will contain all **development dependencies**. + +As of today, the only development dependency supported is `coverage`. + +## Documentation + +### Inputs + +* **install_from** - Optional - The path used as working directory when creating the `requirements-dev.txt` file. It defaults to the current directory (i.e. `.`). +* **project_dev_requirements_file** - Optional - The path of a project `requirements-dev.txt`. This was designed in case development requirements other than coverage are required. If specified, the dependencies in the project `requirements-dev.txt` will be appended in the newly created `requirements-dev.txt`. **Be careful: if a relative path is used this will depend on *install_from*.** Defaults to empty strings, and hence **no custom `requirements-dev.txt`**. +* **use_coverage** - Optional - Whether to use coverage or not. It defaults to false. diff --git a/.github/actions/python_requirements/create_docs_requirements_file/README.md b/.github/actions/python_requirements/create_docs_requirements_file/README.md new file mode 100644 index 0000000..913192f --- /dev/null +++ b/.github/actions/python_requirements/create_docs_requirements_file/README.md @@ -0,0 +1,12 @@ +# Composite action create Python docs requirements file + +This action creates the `requirements-docs.txt` file. This is a Python requirements file that will contain all **dependencies required to build the documentation**. + +## Documentation + +### Inputs + +* **install_from** - Optional - The path used as working directory when creating the `requirements-docs.txt` file. It defaults to the current directory (i.e. `.`). +* **project_docs_requirements_file** - Optional - The path of a project `requirements-docs.txt`. This was designed in case requirements to build documentation other than rstcheck, sphinx, sphinx_rtd_theme, sphinxcontrib-spelling and sphinxcontrib-django2 are required. If specified, the dependencies in the project `requirements-docs.txt` will be appended in the newly created `requirements-docs.txt`. **Be careful: if a relative path is used this will depend on *install_from*.** Defaults to empty strings, and hence **no custom `requirements-docs.txt`**. +* **django_settings_module** - Optional - Path to the Django settings file. It's used to make GitHub action aware of Django presence. In this case, `sphinxcontrib-django2` is also added to the newly created requirement file. **Be careful: if a relative path is used this will depend on *install_from*.** Defaults to empty strings, and hence **no Django settings file**. +* **check_docs_directory** - Optional - Path that will be used by rstcheck to check documentation. **Be careful: if a relative path is used this will depend on *install_from*.** Defaults to empty strings, and hence **documentation won't be checked**. diff --git a/.github/actions/python_requirements/create_linter_requirements_file/README.md b/.github/actions/python_requirements/create_linter_requirements_file/README.md new file mode 100644 index 0000000..9be5aa9 --- /dev/null +++ b/.github/actions/python_requirements/create_linter_requirements_file/README.md @@ -0,0 +1,27 @@ +# Composite action create Python linter requirements file + +This action creates the `requirements-linters.txt` file which will contain all **linter dependencies** required by the CI. +The user can then choose which linters will be run, and hence written to the `requirements-linters.txt`, by the CI by setting some flags to true like *use_black*. + +As of today only the following linters are supported: + +* `autoflake` +* `bandit` +* `black` +* `flake8` +* `flake8-django` +* `isort` +* `pylint` +* `pylint-django` + +## Documentation + +### Inputs + +* **install_from** - Optional - The path used as working directory when creating the `requirements-linters.txt` file. It defaults to the current directory (i.e. `.`). +* `project_linter_requirements_file` - Optional - The path of a project `requirements-linters.txt`. This was designed in case requirements for linters other than `autoflake`, `bandit`, `black`, `flake8`, `flake8-django`, `isort`, `pylint` and `pylint-django` are required. If specified, the dependencies in the project `requirements-linters.txt` will be appended in the newly created `requirements-linters.txt`. **Be careful: if a relative path is used this will depend on *install_from*.** Defaults to empty strings, and hence **no custom `requirements-linters.txt`**. +* **django_settings_module** - Optional - Path to the Django settings file. It's used to make GitHub action aware of Django presence. In the case of a Django project, `flake8-django` and `pylint-django`, may be used and hence they will be added to the newly created requirements file. **Be careful: if a relative path is used this will depend on *install_from*.** Defaults to empty strings, and hence **no Django settings file**. +* **use_autoflake** - Optional - Flag to state whether to use or not `autoflake` linter. It defaults to false. +* **use_bandit** - Optional - Flag to state whether to use or not `bandit` linter. It defaults to false. +* **use_flake8** - Optional - Flag to state whether to use or not `flake8` linter. It defaults to false. +* **use_pylint** - Optional - Flag to state whether to use or not `pylint` linter. It defaults to false. diff --git a/.github/actions/python_requirements/create_virtualenv/README.md b/.github/actions/python_requirements/create_virtualenv/README.md new file mode 100644 index 0000000..8f3361a --- /dev/null +++ b/.github/actions/python_requirements/create_virtualenv/README.md @@ -0,0 +1,20 @@ +# Composite action create Python virtual environment + +This GitHub action creates a Python virtual environment using Python's `venv` module. + +When the *activate_only* flag set is to true, the virtual environment at *virtualenv_path* will only be activated—**no creation will take place**. + +NOTE: + +To activate a Python virtual environment, the `activate` script is often used. +However, in a GitHub Action environment, this is not enough because environment variables are "lost" at the end of the Action. For this we need to do two things: + +1. Append the `VIRTUAL_ENV` environment variable to the `GITHUB_ENV` environment file. The [`GITHUB_ENV`](https://docs.github.com/en/enterprise-cloud@latest/actions/writing-workflows/choosing-what-your-workflow-does/workflow-commands-for-github-actions#setting-an-environment-variable) files makes environment variables available to any subsequent steps in a workflow job. Finally, it's important to note that `VIRTUAL_ENV` variable is created by the `activate` script and contains the path to the virtual environment. +2. Prepend the virtual environment's `bin` path to the system PATH. To allow also any subsequent steps in a workflow to be able to use it, [`GITHUB_PATH`](https://docs.github.com/en/enterprise-cloud@latest/actions/writing-workflows/choosing-what-your-workflow-does/workflow-commands-for-github-actions#adding-a-system-path) is employed. + +## Documentation + +### Inputs + +* **virtualenv_path** - Optional - The path where the virtual environment will be created. It defaults to `.venv`. +* **activate_only** - Optional - Flag that states whether to only activate the virtual environment. If false, a new virtual environment will be created before being activated. It defaults to false. \ No newline at end of file diff --git a/.github/actions/python_requirements/restore_pip_cache/README.md b/.github/actions/python_requirements/restore_pip_cache/README.md new file mode 100644 index 0000000..92a2a2f --- /dev/null +++ b/.github/actions/python_requirements/restore_pip_cache/README.md @@ -0,0 +1,41 @@ +# Composite action restore pip cache + +This action restores the pip download cache from GitHub's cache. + +The action is composed of four steps: + +1. **Generate random UUID** - This step computes a random UUID, using the shell command `uuidgen`, which will be part of the cache key. Since pip cache will always be restored when a virtual environment is not found on GitHub's cache, a random UUID is required to generate a cache miss. +2. **Get pip cache directory** - This step retrieves the path to the pip cache. If *custom_pip_cache_path* is not an empty string, it will be used as pip cache path. Otherwise, the pip cache will be computed using `pip cache dir`. +3. **Restore pip cache** - This step performs the heavy lifting of the restoring. Using GitHub's [**cache/restore**](https://github.com/actions/cache/blob/main/restore/README.md) action, the cache is restored using a **partial match**. This is performed by setting the following [inputs](https://github.com/actions/cache/tree/main/restore#inputs): + 1. **key** - an explicit key for a cache entry - will be set to a random UUID which will always trigger a cache miss. + 2. **path** - a list of files, directories, paths to restore - will be set to the pip download cache path. + 3. **restore-keys** - an ordered list of prefix-matched keys to use for restoring stale cache if no cache hit occurred for key - will be set to `-pip-cache-` to restore the most recent pip cache for the chosen git reference. +4. **Explain cache output** - This step analyze the results of the [**cache/restore**](https://github.com/actions/cache/blob/main/restore/README.md) action and sets *real_cache_hit* environment variable to true if there was a match, false otherwise. This is necessary because, in the case of a **partial match**, the *cache-hit*, output of [**cache/restore**](https://github.com/actions/cache/blob/main/restore/README.md), will be false. Instead, we use the `cache-matched-key`, another output of [**cache/restore**](https://github.com/actions/cache/blob/main/restore/README.md), which contains a reference for both **partial** and full matches, but will be empty in the case of a cache miss. + +NOTE: + +This action, despite seeming a bit unusual, is correct because GitHub does not allow cache updates or overwrites. + +Let's think about a real-world scenario: + +A user updates the requirements file. + +In this case our query to GitHub's cache for the previously cached virtual environment will **always** miss. This happens because changing the requirements file results in a new SHA256 hash, so the cache key changes. + +Thus, we aim to restore the pip cache to at least *mitigate* the impact of the changes in the requirements. Specifically, we want to save time by avoiding the download of packages that did not change. + +Next, we try to query the GitHub's cache for the previously cached pip cache. However, there are a few issues: + +1. We cannot use the SHA256 of the requirements file because it has changed, leading to cache misses. +2. We cannot create a cache key without a random component because, as said earlier, GitHub does not allow overwriting or updating of a cache item. For example, a cache key like `develop-pip-cache-` would generate an error when attempting to save a new cache if one already exists with the same name. + +## Documentation + +### Inputs + +* **custom_pip_cache** - Optional - Path to the pip cache. It can be used for setting a custom pip cache path. It defaults to an empty string. In this case, the pip cache path will be computed using `pip cache dir`. More information regarding the previous command is available [here](https://pip.pypa.io/en/stable/cli/pip_cache/#description) +* **git_reference** - Optional - A git reference that will be used to build the cache key. It defaults to `github.ref_name` which is a context variable containing **the short ref name of the branch or tag that triggered the workflow run**. For example it may be `feature-branch-1` or, for pull requests, `/merge`. + +### Outputs + +* **cache-hit** - A boolean value which states whether pip cache was found on GitHub's cache or not. diff --git a/.github/actions/python_requirements/restore_virtualenv/README.md b/.github/actions/python_requirements/restore_virtualenv/README.md new file mode 100644 index 0000000..e40a3c1 --- /dev/null +++ b/.github/actions/python_requirements/restore_virtualenv/README.md @@ -0,0 +1,30 @@ +# Composite action restore Python virtual environment + +This action restores a Python virtual environment from GitHub's cache. + +Combined with [**save_virtualenv**](../save_virtualenv/README.md), **it helps save time by avoiding the installation of Python requirements**. + +The action is composed of three steps: + +1. **Compute requirements files SHA256 hash** - This step uses [**misc/compute_files_hash**](../../misc/compute_files_hash/README.md) action to compute a single SHA256 hash of the files described by the *requirements_paths*. The computed SHA256 hash will be part of the cache key. +2. **Restore virtual environment** - This step does the heavy lifting of restoring the virtual environment from GitHub's cache. It uses the GitHub's [**cache/restore**](https://github.com/actions/cache/blob/main/restore/README.md) action with the following parameters: + * **path** - A list of files, directories, or paths to restore - set to the virtual environment path input variable *virtual_environment_path*. + * **key** - An explicit key for a cache entry - set to the combination of three strings: + * *git_reference*, provided as an input to the action. + * A static part, `-venv-` + * The previously computed SHA256 hash of the requirements files. +3. **Activate restored virtual environment** - If the Python virtual environment was found in the GitHub's cache, it needs to be activated. This is performed using [**python_requirements/create_virtualenv**](../create_virtualenv/README.md) action with the following parameters: + * **virtualenv_path** - set to the Python virtual environment path. + * **activate_only** - set to true because it doesn't need to be created. + +## Documentation + +### Inputs + +* **virtual_environment_path** - Optional - Path where the virtual environment is located. It may be used to provide a custom path for the virtual environment. It defaults to `.venv`. +* **requirements_paths** - Required - A space separated list of requirements file paths. They will be used to compute a SHA256 hash used in the cache key. It defaults to an empty string. +* **git_reference** - Optional - A git reference that will be used to build the cache key. It defaults to `github.ref_name` which is a context variable containing **the short ref name of the branch or tag that triggered the workflow run**. For example it may be `feature-branch-1` or, for pull requests, `/merge`. + +### Outputs + +* **cache-hit** - A boolean value which is true when virtual environment is found in the GitHub's cache, false otherwise. diff --git a/.github/actions/python_requirements/save_pip_cache/README.md b/.github/actions/python_requirements/save_pip_cache/README.md new file mode 100644 index 0000000..e3950a0 --- /dev/null +++ b/.github/actions/python_requirements/save_pip_cache/README.md @@ -0,0 +1,22 @@ +# Composite action save pip cache + +This action saves the pip download cache. + +Every time a user runs `pip install `, pip downloads the package and all its dependencies.The packages are saved in a directory which, by default, is located at `~/.cache/pip`. +Saving this cache in GitHub's cache allows us to save time when installing those packages. As a matter of fact, before installing packages, pip's cache can be restored using [**restore_pip_cache**](../restore_pip_cache/README.md) action. + +The action is composed of three steps: + +1. **Generate random UUID** - This step computes a random UUID, using shell command `uuidgen`, which will be part of the cache key. The uniqueness of the UUID ensures that there will be no collisions between cache keys, which is crucial because **GitHub won't allow the creation of two caches with the same key** (cache update/overwrite **is not supported**). +2. **Get pip cache directory** - This step retrieves the path to the pip cache. If *custom_pip_cache_path* is not an empty string, it will be used as pip cache path. Otherwise, the pip cache will be computed using `pip cache dir`. +3. **Save pip cache** - This step performs the heavy lifting of the caching. Using GitHub's [**cache/save**](https://github.com/actions/cache/blob/main/save/README.md) action, the cache is saved with a key composed of: + 1. The git reference input, *git_reference* + 2. A static part, `pip-cache` + 3. The previously computed UUID + +## Documentation + +### Inputs + +* **custom_pip_cache** - Optional - Path to the pip cache. It can be used for setting a custom pip cache path. It defaults to an empty string. In this case, the pip cache path will be computed using `pip cache dir`. More information regarding the previous command is available [here](https://pip.pypa.io/en/stable/cli/pip_cache/#description) +* **git_reference** - Optional - A git reference that will be used to build the cache key. It defaults to `github.ref_name` which is a context variable containing **the short ref name of the branch or tag that triggered the workflow run**. For example it may be `feature-branch-1` or, for pull requests, `/merge`. diff --git a/.github/actions/python_requirements/save_virtualenv/README.md b/.github/actions/python_requirements/save_virtualenv/README.md new file mode 100644 index 0000000..19d9ab5 --- /dev/null +++ b/.github/actions/python_requirements/save_virtualenv/README.md @@ -0,0 +1,23 @@ +# Composite action save Python virtual environment + +This action saves a Python virtual environment to GitHub's cache. + +Combined with [**restore_virtualenv**](../restore_virtualenv/README.md), **it helps save time by avoiding the installation of Python requirements**. + +The action is composed of two steps: + +1. **Compute requirements files SHA256 hash** - This step uses [**misc/compute_files_hash**](../../misc/compute_files_hash/README.md) to compute a single SHA256 hash of the files described by the *requirements_paths*. The computed SHA256 hash will be part of the cache key. +2. **Cache virtual environment** - This step does the heavy lifting of saving the virtual environment to GitHub's cache. It uses the GitHub's [**cache/save**](https://github.com/actions/cache/blob/main/save/README.md) action with the following parameters: + 1. **path** - A list of files, directories, or paths to cache - set to the virtual environment path input variable *virtual_environment_path*. + 2. **key** - An explicit key for a cache entry - set to the combination of three strings: + 1. *git_reference*, provided as an input to the action. + 2. A static part, `-venv-` + 3. The previously computed SHA256 hash of the requirements files. + +## Documentation + +### Inputs + +* **virtual_environment_path** - Optional - Path where the virtual environment is located. It may be used to provide a custom path for the virtual environment. It defaults to `.venv`. +* **requirements_paths** - Required - A space separated list of requirements file paths. They will be used to compute a SHA256 hash used in the cache key. +* **git_reference** - Optional - A git reference that will be used to build the cache key. It defaults to `github.ref_name` which is a context variable containing **the short ref name of the branch or tag that triggered the workflow run**. For example it may be `feature-branch-1` or, for pull requests, `/merge`. diff --git a/.github/workflows/README.md b/.github/workflows/README.md new file mode 100644 index 0000000..618ce64 --- /dev/null +++ b/.github/workflows/README.md @@ -0,0 +1,215 @@ +# Worflows + +## [Reusable detect changes workflow](_detect_changes.yml) + +This sub workflow detects and enumerates the changes between two branches. + +It is composed of five steps: + +1. **Check out PR target branch** - This step checks out the latest commit of the PR target branch for the current repository. This workflow was designed to detect changes when a PR to a target branch was created. Therefore, the latest commit of the target branch must be checked out as the first step. To achieve this, GitHub's [**checkout**](https://github.com/actions/checkout) action is used with the following parameters: + 1. **ref** - The branch, tag or SHA to checkout - It is set to `github.base_ref`, which corresponds to the **PR target branch**. +2. **Check out source branch latest commit** - This step checks out the latest commit of the source branch on top of the previous one. To do so, GitHub's [**checkout**](https://github.com/actions/checkout) action is used with the following parameters: + 1. **clean** - Whether to execute `git clean -ffdx && git reset --hard HEAD` before fetching - It is set to false, which means **do not delete untracked files**. +3. **Generate summary** - This step creates the title for the action summary. As a matter of fact, the detected changes will be reported below the title in the summary section. The step is performed only if one or both *backend_directories* and *frontend_directories* inputs are not empty. +4. **Generate diffs for backend** - This step detects and enumerates the files that changed between the two branches. This is performed using [`git diff`](https://git-scm.com/docs/git-diff) command. Specifically, the code instructs git to show the changes in the *backend_directories* relative to `origin/` (the target branch). During this process, the [**pathspec**](https://git-scm.com/docs/gitglossary#Documentation/gitglossary.txt-aiddefpathspecapathspec) is used to exclude files or directories specified in the *backend_exclusions* input. The changes are then enumerated and output through the *backend* variable. +5. **Generate diffs for frontend** - This step follow the same pattern as the **Generate diffs for backend** step but for the frontend directories. + +### Documentation + +#### Inputs + +* **backend_directories** - Optional - Space separated list of backend directories to check for changes. By default, it is set to an empty string. +* **backend_exclusions** - Optional - Space separated list of backend files or directories to **exclude** when checking for changes. Globs are supported. By default, it is set to an empty string. +* **frontend_directories** - Optional - Space separated list of frontend directories to check for changes. By default, it is set to an empty string +* **frontend_exclusions** - Optional - Space separated list of frontend files or directories to **exclude** when checking for changes. Globs are supported. By default, it is set to an empty string. +* **ubuntu_version** - Optional - The Ubuntu version to run the workflow against. By default, it is set to `latest`. + +#### Outputs + +* **backend** - The number of backend files that have changed. +* **frontend** - The number of frontend files that have changed. + +## [Reusable node tests workflow](_node.yml) + +This sub workflow install node dependencies and run frontend linters and tests. + +It is composed of nine steps: + +1. **Check out latest commit for current branch** - This step checks out the latest commit for the current branch of the repository. To do so, it uses GitHub's [**checkout**](https://github.com/actions/checkout) action with no parameters. +2. **Set up Node.js** - This step sets Node.js up downloading binaries and project's dependencies. This is done using the GitHub's [**setup-node**](https://github.com/actions/setup-node) action which also allows to cache and restore the project dependencies. It's used with the following parameters: + 1. **node-version** - Node.js version to use - It is set according to *node_version* input variable. + 2. **cache** - Which package manager used to install and cache packages - It is set to `npm`. + 3. **cache-dependency-path** - Path to the dependency file: `package-lock.json`, `yarn.lock` etc. It is set to `/package-lock.json`, where *working_directory* is the input variable. +3. **Add dependencies** - This step adds additional dependencies to the `package-lock.json` file. Specifically, these packages are added to the **devDependencies** part of the aforementioned file. Which packages will be added is chosen accordingly to input variables: + 1. *use_jest* + 2. *use_react* + 3. *use_eslint* + 4. *use_prettier* + 5. *use_stylelint* +4. **Install packages** - This step install all missing packages from the dependency file in the directory specified by the *working_directory* input variable. +5. **Run linters** - This step uses [**node_linter**](../actions/node_linter/action.yml) action to run linters against the frontend source code. +6. **Check packages licenses** - This step uses [**pilosus/action-pip-license-checker**](https://github.com/pilosus/action-pip-license-checker) to check the licenses used by the project requirements. +7. **Run CodeQL** - This step uses [**codeql**](../actions/codeql/action.yml) action to run CodeQL to discover vulnerabilities across the codebase. +8. **Run custom command** - This step is performed only if the input variable *custom_command* is not empty. The step simply run the bash command described in the previously mentioned input variable in the working directory specified by the *working_directory* input variable. +9. **Run jest tests** - This step runs Jest tests if the input variable *use_jest* is set to true. Finally, if *use_coverage* and *upload_coverage* are set to true, a coverage report is generated and uploaded. + +### Documentation + +#### Inputs + +* **node_versions** - Required - An array of Node.js versions to use. +* **working_directory** - Required - Path to the `package.json` file +* **check_packages_licenses** - Optional - Whether to check npm packages licenses or not. By default it is set to true. +* **use_jest** - Optional - Whether to use Jest test suite or not. By default it is set to false. +* **use_react** - Optional - Whether react is used by the project or not. By default it is set to false. +* **use_eslint** - Optional - Whether to use ESlint linter or not. By default it is set to true +* **use_prettier** - Optional - Whether to use Prettier formatter or not. By default it is set to true. +* **use_stylelint** - Optional - Whether to use Stylelint linter or not. By default it is set to true. +* **use_coverage** - Optional - Whether to use Coverage or not. To work, it also require *use_jest* to be true. By default it is set to false. +* **upload_coverage** - Optional - Whether to upload coverage report to GitHub. By default it is set to false +* **run_codeql** - Optional - Whether to run CodeQL against the codebase. By default it is set to false. +* **custom_command** - Optional - A custom bash command to be run by the workflow. By default it is set to an empty string. +* **max_timeout** - Optional - A maximum amount of minutes allowed for the workflow to run. By default it is set to 30. +* **ubuntu_version** - Optional - The Ubuntu version to run the workflow against. By default it is set to `latest`. + +## [Reusable python linter workflow](_python.yml) + +This sub workflow runs Python linters and tests against the codebase. + +It is composed of one job: + +1. **python** - This job is composed of thirty-one steps: + 1. **Check out latest commit** - Checks out the latest commit on the current branch of the repository using the GitHub's [**checkout**](https://github.com/actions/checkout) action. + 2. **Set up Python** - Sets up Python on the runner machine using GitHub's [**setup-python**](https://github.com/actions/setup-python) action with the following parameter: + 1. **python-version** - Which Python version to use - It is set according to the *python_versions* input variable. + 3. **Inject stuff to environment** - This step adds a few environment variables to the system's environment. Specifically: + 1. If *django_settings_module* is set, **PYTHONPATH** and **DJANGO_SETTINGS_MODULE** will be added to the runner's environment. + 2. If *run_codeql* is true, **CODEQL_PYTHON** will be added to the runner's environment. + 4. **Restore APT cache related to PR event** - This step will try to restore the APT cache related to the PR event using [**restore_apt_cache**](../actions/apt_requirements/restore_apt_cache/README.md) with the following parameter: + 1. **apt_requirements_file_path** - Path to the APT requirements file - It is set to the *packages_path* input variable. + 5. **Restore APT cache related to target branch** - This step will try to restore the APT cache related related to the target branch (of the PR) using [**restore_apt_cache**](../actions/apt_requirements/restore_apt_cache/README.md) only if **Restore APT cache related to PR event** produces a cache miss. It is run with the following parameter: + 1. **apt_requirements_file_path** - Path to the APT requirements file - It is set to the *packages_path* input variable. + 2. **git_reference** - A git reference (name of the branch, reference to the PR) that will be used to build the cache key - It is set to the target branch. + 6. **Restore APT repositories** - If both PR event and target branch APT cache restore attempt resulted in a cache miss, the APT repositories list is refreshed using `sudo apt-get update`. + 7. **Install APT requirements** - This step installs APT requirements listed in the *packages_path* requirements file. **Since they are not required, recommended packages are not downloaded**. + 8. **Save APT cache related to PR event** - When the attempt to restore the APT cache related to the PR event results in a cache miss, the newly populated APT cache is saved to GitHub. This is performed using [**save_apt_cache**](../actions/apt_requirements/save_apt_cache/README.md) action with the following parameter: + 1. **apt_requirements_file_path** - Path to the APT requirements file - It is se to the *packages_path* input variable. + 9. **Create linter requirements file** - This step creates the linter requirements file using the [**create_linter_requirements_file**](../actions/python_requirements/create_linter_requirements_file/README.md) action. + 10. **Create dev requirements file** - This step creates the development requirements file using the [**create_dev_requirements_file**](../actions/python_requirements/create_dev_requirements_file/README.md) action. + 11. **Create docs requirement file** - This step creates the documentation requirements file using the [**create_docs_requirements_file**](../actions/python_requirements/create_docs_requirements_file/README.md) action. + 12. **Restore Python virtual environment related to PR event** - This step attempts to restore the Python virtual environment for the PR using the [**restore_python_virtualenv**](../actions/python_requirements/restore_virtualenv/README.md) action. + 13. **Restore Python virtual environment related to target branch** - If the attempt to restore the Python virtual environment for the PR, result in a cache miss, an attempt to restore the Python virtual environment for the target branch is made using the [**restore_python_virtualenv**](../actions/python_requirements/restore_virtualenv/README.md) action. + 14. **Create Python virtual environment** - If both attempts to restore the Python virtual environment for the PR, for the target branch, result in a cache miss, a Python virtual environment is created using the [**create_virtualenv**](../actions/python_requirements/create_virtualenv/README.md) action. + 15. **Restore pip cache related to PR event** - If both attempts to restore the Python virtual environment for the PR, for the target branch, result in a cache miss, an attempt to restore the pip cache for the PR event is made using the [**restore_pip_cache**](../actions/python_requirements/restore_pip_cache/README.md) action. + 16. **Restore pip cache related to target branch** - If both attempts to restore the Python virtual environment for the PR, for the target branch, as well as the pip cache for the PR, result in a cache miss, an attempt to restore the pip cache for the target branch is made using the [**restore_pip_cache**](../actions/python_requirements/restore_pip_cache/README.md) action. + 17. **Install project requirements** - If both attempts to restore the Python virtual environment for the PR event, and the target branch result in a cache miss, project requirements are installed from the working directory specified by the *install_from* input variable. + 18. **Install other requirements** - If the attempt to restore the Python virtual environment for the PR event result in a cache miss, developer, linters and documentation requirements are installed from the working directory specified by *working_directory* input variable. + 19. **Check requirements licenses** - If the input variable *check_requirements_licenses* is set to true and the attempt to restore the Python virtual environment related to the PR event result in a cache miss, this step performs the requirements licenses check using [**pilosus/action-pip-license-checker**](https://github.com/pilosus/action-pip-license-checker). + 20. **Print wrong licenses** - If the output of **Check requirements licenses** is `failure`, the list of licenses for which the check failed will be returned. + 21. **Save Python virtual environment related to PR event** - If the attempt to restore the Python virtual environment resulted in a cache miss, the Python virtual environment is saved for the PR event using the [*save_virtualenv*](../actions/python_requirements/save_virtualenv/README.md) action with the following parameter: + 1. **requirements_paths** - A space separated list of requirements file paths - It is set to the combination of *requirements_path*, `requirements-linters.txt`, `requirements-dev.txt` and `requirements-docs.txt` joined by spaces. + 22. **Save pip cache related to PR event** - If both attempts to restore the Python virtual environment and the pip cache related to the PR resulted in a cache miss, the pip cache is saved for the PR event using the [*save_pip_cache*](../actions/python_requirements/save_pip_cache/README.md) action. + 23. **Run linters** - If one of the following input variables: *use_black*, *use_isort*, *use_flake8*, *use_pylint*, *use_bandit* and *use_autoflake* is true, this step executes the linters against the codebase in the working directory specified by the *working_directory* variable. + 24. **Run CodeQL** - If the *run_codeql* input variable is true, this step runs CodeQL against the codebase using the [**codeql**](../actions/codeql/action.yml) action in the working directory specified by the *working_directory* variable. + 25. **Build Docs** - If the *check_docs_directory* input variable is set, this step executes `rstcheck` to ensure that the documentation in *check_docs_directory* is valid. Finally, the documentation is built using `sphinx`. + 26. **Start services** - If one or more of the following input variables: *use_postgres*, *use_elastic_search*, *use_memcached*, *use_redis*, *use_rabbitmq* and *use_mongo* are true, this step creates the Docker container for the service using the [**services**](../actions/services/action.yml) action. Additional parameters, such as *postgres_db* or *elasticsearch_version* can also be provided to the aforementioned action. + 27. **Start celery worker** - If the *use_celery* input variable is true, a Celery worker is created for the *celery_app* application. The `celery` command is executed in the working directory specified by the *working_directory* input variable. + 28. **Run custom command** - If the *custom_command* input variable is not empty, the command defined by the variable is executed in the working directory specified by the *working_directory* input variable. + 29. **Check migrations** - If *check_migrations* is true and *django_settings_module* is not empty, this step will perform a dry run of `django-admin makemigrations` to ensure that the migrations are valid. + 30. **Run unittest** - This step runs Python tests against the codebase in the directory described by the *working_directory* input variable. Additionally, according to *tags_for_manual_tests* and *tags_for_slow_tests* variables, some tests will be excluded from the run. + 31. **Create coverage output** - If *use_coverage* and *upload_coverage* are set to true, this step produces a coverage report of the codebase and uploads it to GitHub. The *working_directory* input variable is used to determines the directory in which coverage should be run. + +### Documentation + +#### Inputs + +* **python_versions** - Required - Python versions used by this workflow in the form of a JSON array. +* **ubuntu_version** - Optional - Ubuntu version to run workflow against. By default, it is set to `latest`. +* **working_directory** - Required - Directory in which to run linters. +* **requirements_path** - Required - Path to the requirements file of the Python project. +* **install_from** - Optional - Directory where all installation commands will be run. By default, it is set to `.`. +* **packages_path** - Optional - Path to the APT requirements file of the Python project. By default, it is set to an empty string. +* **env** - Optional - A JSON object containing a set of environment variables to be added to the system's environment. By default, it is set to an empty JSON object `{}`. +* **max_timeout** - Optional - Maximum amount of time (in minutes) the workflow is allowed to run. By default, it is set to `30`. +* **use_black** - Optional - Whether to use black formatter. By default, it is set to `false`. +* **use_isort** - Optional - Whether to use isort formatter. By default, it is set to `false`. +* **use_autoflake** - Optional - Whether to use autoflake linter. By default, it is set to `false`. +* **use_bandit** - Optional - Whether to use bandit linter. By default, it is set to `false`. +* **use_flake8** - Optional - Whether to use flake8 linter. By default, it is set to `false`. +* **use_pylint** - Optional - Whether to use pylint linter. By default, it is set to `false`. +* **use_coverage** - Optional - Whether to use coverage. By default, it is set to `false`. +* **coverage_config_path** - Optional - Path to the coverage configuration file. By default, it is set to `.coveragerc`. +* **upload_coverage** - Optional - Whether to upload coverage report to GitHub. To work, it needs *use_coverage* to be true. By default, it is set to `false`. +* **run_codeql** - Optional - Whether to run CodeQL against codebase. By default, it is set to `false`. +* **use_celery** - Optional - Whether to create a Celery container. By default, it is set to `false`. +* **use_elastic_search** - Optional - Whether to create an Elasticsearch container. By default, it is set to `false`. +* **use_memcached** - Optional - Whether to create a Memcached container. By default, it is set to `false`. +* **use_mongo** - Optional - Whether to create a MongoDB container. By default, it is set to `false`. +* **use_postgres** - Optional - Whether to create a PostgresDB container. By default, it is set to `false`. +* **use_rabbitmq** - Optional - Whether to create a RabbitMQ container. By default, it is set to `false`. +* **use_redis** - Optional - Whether to create a Redis container. By default, it is set to `false`. +* **celery_app** - Optional - A Celery application name. Requires *use_celery* to be true. By default, it is set to an empty string. +* **celery_queues** - Optional - A comma separated list of Celery queues. Requires *use_celery* to be true. By default, it is set to `default`. +* **elasticsearch_version** - Optional - Elasticsearch's container version. By default, it is set to `latest`. +* **elasticsearch_port** - Optional - Elasticsearch's container exposed port. By default, it is set to `9200`. +* **memcached_version** - Optional - Mecached's container version. By default, it is set to `latest`. +* **mongo_version** - Optional - MongoDB's container version. By default, it is set to `latest`. +* **postgres_db** - Optional - PostgresDB database name. Requires *use_postgres* to be true. By default, it is set to `db`. +* **postgres_user** - Optional - PostgresDB user name. Requires *use_postgres* to be true. By default, it is set to `user`. +* **postgres_password** - Optional - PostgresDB password. Requires *use_postgres* to be true. By default, it is set to `password`. +* **postgres_version** - Optional - PostgresDB's container version. Requires *use_postgres* to be true. By default, it is set to `latest`. +* **rabbitmq_version** - Optional - RabbitMQ's container version. Requires *use_rabbitmq* to be true. By default, it is set to `latest`. +* **redis_version** - Optional - Redis' container version. Requires *use_redis* to be true. By default, it is set to `latest`. +* **django_settings_module** - Optional - Path to the Django settings file. By default, it is set to an empty string. +* **check_migrations** - Optional - Whether to check that the project's migrations are valid. Requires *django_settings_module* to be set. By default, it is set to `false`. +* **check_requirements_licenses** - Optional - Whether to check that the requirements license is valid. Requires *django_settings_module* to be set. By default, it is set to `true`. +* **ignore_requirements_licenses_regex** - Optional - A regex that describes which directories should be ignored when checking the validity of requirements licenses. By default, it is set to `uWSGI.*|lunardate.*|.*QuokkaClient.*|pyquokka.*`. +* **tags_for_slow_tests** - Optional - A space separated list of tags for tests that will only be run on the master/main branch. **Works only for Django projects**. By default, it is set to an `slow`. +* **tags_for_manual_tests** - Optional - A space separated list of tags for tests that will only be run **manually** (CI will ignore them). **Works only for Django projects**. By default, it is set to `manual`. +* **custom_command** - Optional - A custom bash command to run. By default, it is set to an empty string. +* **check_docs_directory** - Optional - Path to the documentation directory in which `rstcheck` will be run to check documentation files. By default, it is set to an empty string. +* **check_dockerfile** - Optional - Path to a Dockerfile to be checked. **Warning: if set it may significantly increase the action time**. By default, it is set to an empty string. + +## [Create APT cache](create_apt_cache.yaml) + +This workflow is run in the event of **a push on branches *main*, *master*, *develop*, *dev***. Specifically, it is triggered only when the APT requirements file is updated. + +The workflow is composed of a single job: + +1. **Create cache for APT dependencies** - This job, as described by its name, creates a cache for APT dependencies and stores it on GitHub. It is composed of four steps: + 1. **Check out latest commit on current branch** - This step checks out the latest commit on the current branch of the repository. + 2. **Install APT dependencies** - This step refreshes APT repositories and then install the project dependecies. This action is required to produce the APT cache that will be saved later. + 3. **Compute APT dependencies file SHA256 hash** - This step computes the SHA256 of the APT dependency file that will be used as cache key. + 4. **Save APT cache** - This step saves APT cache on GitHub. The GitHub's [**cache/save**](https://github.com/actions/cache/tree/main/save) action is used. + +## [Create Python cache](create_python_cache.yaml) + +This workflow is run in the event of **a push on branches *main*, *master*, *develop*, *dev***. Specifically, it is triggered only when the Python requirements file is updated. + +The workflow is composed of a single job: + +1. **Create cache for Python dependencies** - This job, as described by its name, creates a cache for Python dependencies and stores it on GitHub. It is composed of four steps: + 1. **Check out latest commit** - This step checks out the latest commit on the current branch for the repository. + 2. **Set up Python** - This step install Python on the runner. + 3. **Set up Python virtual environment** - This step uses [**create_virtualenv**](../actions/python_requirements/create_virtualenv/README.md) action to create a Python virtual environment. + 4. **Install Python dependencies** - This step install Python requirements to produce the final virtual environment that will be cached. Also, installing the Python dependencies, creates the pip cache. + 5. **Save pip cache** - This step uses [**save_pip_cache**](../actions/python_requirements/save_pip_cache/README.md) action to save pip's download cache on GitHub. + 6. **Create virtual environment cache** - This step uses [**save_virtualenv**](../actions/python_requirements/save_virtualenv/README.md) action to save virtual environment on GitHub's cache. + +## [CI](pull_request_automation.yml) + +This workflow runs in the case of a **pull request on branches *master*, *main*, *develop*, *dev*** and it's the core CI workflow. + +It is composed of three jobs: + +1. **detect-changes** - This job detects and enumerates changes to backend and/or frontend files. To do so, it uses the [**_detect_changes**](_detect_changes.yml) workflow. +2. **node** - If any changes to the frontend files are found, [**_node**](_node.yml) workflow is run. +3. **python** - If any changes to the backend files are found, [**_python**](_python.yml) workflow is run. + +## [Release and publish](release.yml) + +TODO + +## [Reusable release and tag workflow](_release_and_tag.yml) + +TODO From 8f17cd7d9d98f03772e040cea9c5fc893857b569 Mon Sep 17 00:00:00 2001 From: Luca Cigarini Date: Fri, 28 Mar 2025 14:35:22 +0100 Subject: [PATCH 08/15] fixed missing shell in restore_apt_cache --- .github/actions/apt_requirements/restore_apt_cache/action.yml | 1 + actions/apt_requirements/restore_apt_cache/action.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/.github/actions/apt_requirements/restore_apt_cache/action.yml b/.github/actions/apt_requirements/restore_apt_cache/action.yml index 6ddaa7a..a3997c2 100644 --- a/.github/actions/apt_requirements/restore_apt_cache/action.yml +++ b/.github/actions/apt_requirements/restore_apt_cache/action.yml @@ -31,6 +31,7 @@ runs: sudo getfacl /var/cache/apt/archives > $apt_cache_dir_permissions_file echo "::debug::Original permissions given to /var/cache/apt/archives: $(ls -l /var/cache/apt/archives)" echo "::debug::Created /var/cache/apt/archives permissions backup to $apt_cache_dir_permissions_file" + shell: bash # Vital to be able to restore cache # If write permission is not set, a permissions error will be raised diff --git a/actions/apt_requirements/restore_apt_cache/action.yml b/actions/apt_requirements/restore_apt_cache/action.yml index 6ddaa7a..a3997c2 100644 --- a/actions/apt_requirements/restore_apt_cache/action.yml +++ b/actions/apt_requirements/restore_apt_cache/action.yml @@ -31,6 +31,7 @@ runs: sudo getfacl /var/cache/apt/archives > $apt_cache_dir_permissions_file echo "::debug::Original permissions given to /var/cache/apt/archives: $(ls -l /var/cache/apt/archives)" echo "::debug::Created /var/cache/apt/archives permissions backup to $apt_cache_dir_permissions_file" + shell: bash # Vital to be able to restore cache # If write permission is not set, a permissions error will be raised From f3633a884afb5069b915f08f68359d6cec65fc8e Mon Sep 17 00:00:00 2001 From: Luca Cigarini Date: Fri, 28 Mar 2025 14:37:03 +0100 Subject: [PATCH 09/15] fixed another bug :) --- .github/actions/apt_requirements/restore_apt_cache/action.yml | 2 +- actions/apt_requirements/restore_apt_cache/action.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/actions/apt_requirements/restore_apt_cache/action.yml b/.github/actions/apt_requirements/restore_apt_cache/action.yml index a3997c2..91ae998 100644 --- a/.github/actions/apt_requirements/restore_apt_cache/action.yml +++ b/.github/actions/apt_requirements/restore_apt_cache/action.yml @@ -22,7 +22,7 @@ runs: id: compute_apt_requirements_file_sha256_hash uses: ./.github/actions/misc/compute_files_hash with: - file_paths: ${{ inputs.requirements_file }} + file_paths: ${{ inputs.apt_requirements_file_path }} - name: Backup /var/cache/apt/archives permissions id: backup_apt_cache_dir_permissions diff --git a/actions/apt_requirements/restore_apt_cache/action.yml b/actions/apt_requirements/restore_apt_cache/action.yml index a3997c2..91ae998 100644 --- a/actions/apt_requirements/restore_apt_cache/action.yml +++ b/actions/apt_requirements/restore_apt_cache/action.yml @@ -22,7 +22,7 @@ runs: id: compute_apt_requirements_file_sha256_hash uses: ./.github/actions/misc/compute_files_hash with: - file_paths: ${{ inputs.requirements_file }} + file_paths: ${{ inputs.apt_requirements_file_path }} - name: Backup /var/cache/apt/archives permissions id: backup_apt_cache_dir_permissions From bd4d93c05760b60cb77c1b6d646065a8259b700e Mon Sep 17 00:00:00 2001 From: Luca Cigarini Date: Fri, 28 Mar 2025 14:44:00 +0100 Subject: [PATCH 10/15] fixed another bug --- .../actions/apt_requirements/restore_apt_cache/action.yml | 7 ++++--- .github/workflows/README.md | 2 +- actions/apt_requirements/restore_apt_cache/action.yml | 7 ++++--- workflows/README.md | 2 +- 4 files changed, 10 insertions(+), 8 deletions(-) diff --git a/.github/actions/apt_requirements/restore_apt_cache/action.yml b/.github/actions/apt_requirements/restore_apt_cache/action.yml index 91ae998..89a2f07 100644 --- a/.github/actions/apt_requirements/restore_apt_cache/action.yml +++ b/.github/actions/apt_requirements/restore_apt_cache/action.yml @@ -27,10 +27,11 @@ runs: - name: Backup /var/cache/apt/archives permissions id: backup_apt_cache_dir_permissions run: | - echo "apt_cache_dir_permissions_file=/tmp/apt_cache_dir_permissions.facl" > $GITHUB_OUTPUT - sudo getfacl /var/cache/apt/archives > $apt_cache_dir_permissions_file + PERMISSIONS_FILE_PATH="/tmp/apt_cache_dir_permissions.facl" + echo "apt_cache_dir_permissions_file=$PERMISSIONS_FILE_PATH" > $GITHUB_OUTPUT + sudo getfacl /var/cache/apt/archives > $PERMISSIONS_FILE_PATH echo "::debug::Original permissions given to /var/cache/apt/archives: $(ls -l /var/cache/apt/archives)" - echo "::debug::Created /var/cache/apt/archives permissions backup to $apt_cache_dir_permissions_file" + echo "::debug::Created /var/cache/apt/archives permissions backup to $PERMISSIONS_FILE_PATH" shell: bash # Vital to be able to restore cache diff --git a/.github/workflows/README.md b/.github/workflows/README.md index 618ce64..0d16395 100644 --- a/.github/workflows/README.md +++ b/.github/workflows/README.md @@ -48,7 +48,7 @@ It is composed of nine steps: 5. *use_stylelint* 4. **Install packages** - This step install all missing packages from the dependency file in the directory specified by the *working_directory* input variable. 5. **Run linters** - This step uses [**node_linter**](../actions/node_linter/action.yml) action to run linters against the frontend source code. -6. **Check packages licenses** - This step uses [**pilosus/action-pip-license-checker**](https://github.com/pilosus/action-pip-license-checker) to check the licenses used by the project requirements. +6. **Check packages licenses** - This step uses [**pilosus/action-pip-license-checker**](https://github.com/pilosus/action-pip-license-checker) to check the licenses used by the project requirements. 7. **Run CodeQL** - This step uses [**codeql**](../actions/codeql/action.yml) action to run CodeQL to discover vulnerabilities across the codebase. 8. **Run custom command** - This step is performed only if the input variable *custom_command* is not empty. The step simply run the bash command described in the previously mentioned input variable in the working directory specified by the *working_directory* input variable. 9. **Run jest tests** - This step runs Jest tests if the input variable *use_jest* is set to true. Finally, if *use_coverage* and *upload_coverage* are set to true, a coverage report is generated and uploaded. diff --git a/actions/apt_requirements/restore_apt_cache/action.yml b/actions/apt_requirements/restore_apt_cache/action.yml index 91ae998..89a2f07 100644 --- a/actions/apt_requirements/restore_apt_cache/action.yml +++ b/actions/apt_requirements/restore_apt_cache/action.yml @@ -27,10 +27,11 @@ runs: - name: Backup /var/cache/apt/archives permissions id: backup_apt_cache_dir_permissions run: | - echo "apt_cache_dir_permissions_file=/tmp/apt_cache_dir_permissions.facl" > $GITHUB_OUTPUT - sudo getfacl /var/cache/apt/archives > $apt_cache_dir_permissions_file + PERMISSIONS_FILE_PATH="/tmp/apt_cache_dir_permissions.facl" + echo "apt_cache_dir_permissions_file=$PERMISSIONS_FILE_PATH" > $GITHUB_OUTPUT + sudo getfacl /var/cache/apt/archives > $PERMISSIONS_FILE_PATH echo "::debug::Original permissions given to /var/cache/apt/archives: $(ls -l /var/cache/apt/archives)" - echo "::debug::Created /var/cache/apt/archives permissions backup to $apt_cache_dir_permissions_file" + echo "::debug::Created /var/cache/apt/archives permissions backup to $PERMISSIONS_FILE_PATH" shell: bash # Vital to be able to restore cache diff --git a/workflows/README.md b/workflows/README.md index 618ce64..0d16395 100644 --- a/workflows/README.md +++ b/workflows/README.md @@ -48,7 +48,7 @@ It is composed of nine steps: 5. *use_stylelint* 4. **Install packages** - This step install all missing packages from the dependency file in the directory specified by the *working_directory* input variable. 5. **Run linters** - This step uses [**node_linter**](../actions/node_linter/action.yml) action to run linters against the frontend source code. -6. **Check packages licenses** - This step uses [**pilosus/action-pip-license-checker**](https://github.com/pilosus/action-pip-license-checker) to check the licenses used by the project requirements. +6. **Check packages licenses** - This step uses [**pilosus/action-pip-license-checker**](https://github.com/pilosus/action-pip-license-checker) to check the licenses used by the project requirements. 7. **Run CodeQL** - This step uses [**codeql**](../actions/codeql/action.yml) action to run CodeQL to discover vulnerabilities across the codebase. 8. **Run custom command** - This step is performed only if the input variable *custom_command* is not empty. The step simply run the bash command described in the previously mentioned input variable in the working directory specified by the *working_directory* input variable. 9. **Run jest tests** - This step runs Jest tests if the input variable *use_jest* is set to true. Finally, if *use_coverage* and *upload_coverage* are set to true, a coverage report is generated and uploaded. From b8607208902ed463bf103a424b3c673d9c861c77 Mon Sep 17 00:00:00 2001 From: Luca Cigarini Date: Fri, 28 Mar 2025 14:54:36 +0100 Subject: [PATCH 11/15] a test --- .../actions/apt_requirements/restore_apt_cache/action.yml | 5 ++++- actions/apt_requirements/restore_apt_cache/action.yml | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/.github/actions/apt_requirements/restore_apt_cache/action.yml b/.github/actions/apt_requirements/restore_apt_cache/action.yml index 89a2f07..dcdcffe 100644 --- a/.github/actions/apt_requirements/restore_apt_cache/action.yml +++ b/.github/actions/apt_requirements/restore_apt_cache/action.yml @@ -30,7 +30,8 @@ runs: PERMISSIONS_FILE_PATH="/tmp/apt_cache_dir_permissions.facl" echo "apt_cache_dir_permissions_file=$PERMISSIONS_FILE_PATH" > $GITHUB_OUTPUT sudo getfacl /var/cache/apt/archives > $PERMISSIONS_FILE_PATH - echo "::debug::Original permissions given to /var/cache/apt/archives: $(ls -l /var/cache/apt/archives)" + ARCHIVES_PERMISSIONS=$(ls -ld /var/cache/apt/archives) + echo "::debug::Original permissions given to /var/cache/apt/archives: $ARCHIVES_PERMISSIONS" echo "::debug::Created /var/cache/apt/archives permissions backup to $PERMISSIONS_FILE_PATH" shell: bash @@ -39,6 +40,7 @@ runs: - name: Add write permission for all to /var/cache/apt/archives run: | sudo chmod a+w /var/cache/apt/archives + echo "$ARCHIVES_PERMISSIONS" echo "::debug::New permissions given to /var/cache/apt/archives: $(ls -l /var/cache/apt/archives)" shell: bash @@ -51,6 +53,7 @@ runs: - name: Restore original permissions to /var/cache/apt/archives and delete backup run: | + echo "$(ls /tmp/)" permissions_file=${{ steps.backup_apt_cache_dir_permissions.outputs.apt_cache_dir_permissions_file }} sudo setfacl --restore="$permissions_file" echo "::debug::Restored original permissions to /var/cache/apt/archives: $(ls -l /var/cache/apt/archives)" diff --git a/actions/apt_requirements/restore_apt_cache/action.yml b/actions/apt_requirements/restore_apt_cache/action.yml index 89a2f07..dcdcffe 100644 --- a/actions/apt_requirements/restore_apt_cache/action.yml +++ b/actions/apt_requirements/restore_apt_cache/action.yml @@ -30,7 +30,8 @@ runs: PERMISSIONS_FILE_PATH="/tmp/apt_cache_dir_permissions.facl" echo "apt_cache_dir_permissions_file=$PERMISSIONS_FILE_PATH" > $GITHUB_OUTPUT sudo getfacl /var/cache/apt/archives > $PERMISSIONS_FILE_PATH - echo "::debug::Original permissions given to /var/cache/apt/archives: $(ls -l /var/cache/apt/archives)" + ARCHIVES_PERMISSIONS=$(ls -ld /var/cache/apt/archives) + echo "::debug::Original permissions given to /var/cache/apt/archives: $ARCHIVES_PERMISSIONS" echo "::debug::Created /var/cache/apt/archives permissions backup to $PERMISSIONS_FILE_PATH" shell: bash @@ -39,6 +40,7 @@ runs: - name: Add write permission for all to /var/cache/apt/archives run: | sudo chmod a+w /var/cache/apt/archives + echo "$ARCHIVES_PERMISSIONS" echo "::debug::New permissions given to /var/cache/apt/archives: $(ls -l /var/cache/apt/archives)" shell: bash @@ -51,6 +53,7 @@ runs: - name: Restore original permissions to /var/cache/apt/archives and delete backup run: | + echo "$(ls /tmp/)" permissions_file=${{ steps.backup_apt_cache_dir_permissions.outputs.apt_cache_dir_permissions_file }} sudo setfacl --restore="$permissions_file" echo "::debug::Restored original permissions to /var/cache/apt/archives: $(ls -l /var/cache/apt/archives)" From 48f4707177ebe6ae8dac41e8c24d1d0123eaf049 Mon Sep 17 00:00:00 2001 From: Luca Cigarini Date: Fri, 28 Mar 2025 14:58:18 +0100 Subject: [PATCH 12/15] fix --- .../restore_apt_cache/action.yml | 20 +++++++++---------- .../restore_apt_cache/action.yml | 20 +++++++++---------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/.github/actions/apt_requirements/restore_apt_cache/action.yml b/.github/actions/apt_requirements/restore_apt_cache/action.yml index dcdcffe..71fe9a9 100644 --- a/.github/actions/apt_requirements/restore_apt_cache/action.yml +++ b/.github/actions/apt_requirements/restore_apt_cache/action.yml @@ -29,7 +29,7 @@ runs: run: | PERMISSIONS_FILE_PATH="/tmp/apt_cache_dir_permissions.facl" echo "apt_cache_dir_permissions_file=$PERMISSIONS_FILE_PATH" > $GITHUB_OUTPUT - sudo getfacl /var/cache/apt/archives > $PERMISSIONS_FILE_PATH + sudo getfacl -p /var/cache/apt/archives > $PERMISSIONS_FILE_PATH ARCHIVES_PERMISSIONS=$(ls -ld /var/cache/apt/archives) echo "::debug::Original permissions given to /var/cache/apt/archives: $ARCHIVES_PERMISSIONS" echo "::debug::Created /var/cache/apt/archives permissions backup to $PERMISSIONS_FILE_PATH" @@ -40,8 +40,8 @@ runs: - name: Add write permission for all to /var/cache/apt/archives run: | sudo chmod a+w /var/cache/apt/archives - echo "$ARCHIVES_PERMISSIONS" - echo "::debug::New permissions given to /var/cache/apt/archives: $(ls -l /var/cache/apt/archives)" + ARCHIVES_NEW_PERMISSIONS=$(ls -ld /var/cache/apt/archives) + echo "::debug::New permissions given to /var/cache/apt/archives: $ARCHIVES_NEW_PERMISSIONS" shell: bash - name: Restore APT cache @@ -53,12 +53,12 @@ runs: - name: Restore original permissions to /var/cache/apt/archives and delete backup run: | - echo "$(ls /tmp/)" - permissions_file=${{ steps.backup_apt_cache_dir_permissions.outputs.apt_cache_dir_permissions_file }} - sudo setfacl --restore="$permissions_file" - echo "::debug::Restored original permissions to /var/cache/apt/archives: $(ls -l /var/cache/apt/archives)" - if [[ -f "$permissions_file" ]]; then - sudo rm "$permissions_file" - echo "::debug::Correctly removed $permissions_file permissions backup file" + PERMISSIONS_FILE_PATH=${{ steps.backup_apt_cache_dir_permissions.outputs.apt_cache_dir_permissions_file }} + sudo setfacl --restore="$PERMISSIONS_FILE_PATH" + ARCHIVES_RESTORED_PERMISSIONS=$(ls -ld /var/cache/apt/archives) + echo "::debug::Restored original permissions to /var/cache/apt/archives: $ARCHIVES_RESTORED_PERMISSIONS" + if [[ -f "$PERMISSIONS_FILE_PATH" ]]; then + sudo rm "$PERMISSIONS_FILE_PATH" + echo "::debug::Correctly removed $PERMISSIONS_FILE_PATH permissions backup file" fi shell: bash \ No newline at end of file diff --git a/actions/apt_requirements/restore_apt_cache/action.yml b/actions/apt_requirements/restore_apt_cache/action.yml index dcdcffe..71fe9a9 100644 --- a/actions/apt_requirements/restore_apt_cache/action.yml +++ b/actions/apt_requirements/restore_apt_cache/action.yml @@ -29,7 +29,7 @@ runs: run: | PERMISSIONS_FILE_PATH="/tmp/apt_cache_dir_permissions.facl" echo "apt_cache_dir_permissions_file=$PERMISSIONS_FILE_PATH" > $GITHUB_OUTPUT - sudo getfacl /var/cache/apt/archives > $PERMISSIONS_FILE_PATH + sudo getfacl -p /var/cache/apt/archives > $PERMISSIONS_FILE_PATH ARCHIVES_PERMISSIONS=$(ls -ld /var/cache/apt/archives) echo "::debug::Original permissions given to /var/cache/apt/archives: $ARCHIVES_PERMISSIONS" echo "::debug::Created /var/cache/apt/archives permissions backup to $PERMISSIONS_FILE_PATH" @@ -40,8 +40,8 @@ runs: - name: Add write permission for all to /var/cache/apt/archives run: | sudo chmod a+w /var/cache/apt/archives - echo "$ARCHIVES_PERMISSIONS" - echo "::debug::New permissions given to /var/cache/apt/archives: $(ls -l /var/cache/apt/archives)" + ARCHIVES_NEW_PERMISSIONS=$(ls -ld /var/cache/apt/archives) + echo "::debug::New permissions given to /var/cache/apt/archives: $ARCHIVES_NEW_PERMISSIONS" shell: bash - name: Restore APT cache @@ -53,12 +53,12 @@ runs: - name: Restore original permissions to /var/cache/apt/archives and delete backup run: | - echo "$(ls /tmp/)" - permissions_file=${{ steps.backup_apt_cache_dir_permissions.outputs.apt_cache_dir_permissions_file }} - sudo setfacl --restore="$permissions_file" - echo "::debug::Restored original permissions to /var/cache/apt/archives: $(ls -l /var/cache/apt/archives)" - if [[ -f "$permissions_file" ]]; then - sudo rm "$permissions_file" - echo "::debug::Correctly removed $permissions_file permissions backup file" + PERMISSIONS_FILE_PATH=${{ steps.backup_apt_cache_dir_permissions.outputs.apt_cache_dir_permissions_file }} + sudo setfacl --restore="$PERMISSIONS_FILE_PATH" + ARCHIVES_RESTORED_PERMISSIONS=$(ls -ld /var/cache/apt/archives) + echo "::debug::Restored original permissions to /var/cache/apt/archives: $ARCHIVES_RESTORED_PERMISSIONS" + if [[ -f "$PERMISSIONS_FILE_PATH" ]]; then + sudo rm "$PERMISSIONS_FILE_PATH" + echo "::debug::Correctly removed $PERMISSIONS_FILE_PATH permissions backup file" fi shell: bash \ No newline at end of file From c31bd1abf76bfa0eb856fe7ac28caef86a76d861 Mon Sep 17 00:00:00 2001 From: Luca Cigarini Date: Fri, 28 Mar 2025 15:09:30 +0100 Subject: [PATCH 13/15] fixed few problems with apt caching --- .../apt_requirements/restore_apt_cache/action.yml | 2 +- .github/workflows/README.md | 3 +-- .github/workflows/create_apt_cache.yaml | 11 ++--------- actions/apt_requirements/restore_apt_cache/action.yml | 2 +- workflows/README.md | 3 +-- workflows/create_apt_cache.yaml | 11 ++--------- 6 files changed, 8 insertions(+), 24 deletions(-) diff --git a/.github/actions/apt_requirements/restore_apt_cache/action.yml b/.github/actions/apt_requirements/restore_apt_cache/action.yml index 71fe9a9..b505461 100644 --- a/.github/actions/apt_requirements/restore_apt_cache/action.yml +++ b/.github/actions/apt_requirements/restore_apt_cache/action.yml @@ -49,7 +49,7 @@ runs: id: restore_apt_cache with: path: /var/cache/apt/archives/*.deb - key: ${{ github.base_ref }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} + key: ${{ github.base_ref }}-apt-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} - name: Restore original permissions to /var/cache/apt/archives and delete backup run: | diff --git a/.github/workflows/README.md b/.github/workflows/README.md index 0d16395..5bbea49 100644 --- a/.github/workflows/README.md +++ b/.github/workflows/README.md @@ -179,8 +179,7 @@ The workflow is composed of a single job: 1. **Create cache for APT dependencies** - This job, as described by its name, creates a cache for APT dependencies and stores it on GitHub. It is composed of four steps: 1. **Check out latest commit on current branch** - This step checks out the latest commit on the current branch of the repository. 2. **Install APT dependencies** - This step refreshes APT repositories and then install the project dependecies. This action is required to produce the APT cache that will be saved later. - 3. **Compute APT dependencies file SHA256 hash** - This step computes the SHA256 of the APT dependency file that will be used as cache key. - 4. **Save APT cache** - This step saves APT cache on GitHub. The GitHub's [**cache/save**](https://github.com/actions/cache/tree/main/save) action is used. + 3. **Save APT cache** - This step saves APT cache on GitHub. The GitHub's [**save_apt_cache**](../actions/apt_requirements/save_apt_cache/README.md) action is used. ## [Create Python cache](create_python_cache.yaml) diff --git a/.github/workflows/create_apt_cache.yaml b/.github/workflows/create_apt_cache.yaml index 4123d51..9bd5fd7 100644 --- a/.github/workflows/create_apt_cache.yaml +++ b/.github/workflows/create_apt_cache.yaml @@ -32,14 +32,7 @@ jobs: sudo apt-get update sudo apt-get -y install --no-install-recommends $(tr '\n' ' ' < .github/test/python_test/packages.txt) - - name: Compute APT dependencies file SHA256 hash - id: compute_apt_dependencies_file_sha256_hash - uses: ./.github/actions/misc/compute_files_hash - with: - file_paths: .github/test/python_test/packages.txt - - name: Save APT cache - uses: actions/cache/save@v4 + uses: ./.github/actions/apt_requirements/save_apt_cache with: - path: /var/cache/apt/archives/*.deb - key: ${{ github.ref_name }}-${{ steps.compute_apt_dependencies_file_sha256_hash.outputs.computed_hash }} + apt_requirements_file_path: .github/test/python_test/packages.txt diff --git a/actions/apt_requirements/restore_apt_cache/action.yml b/actions/apt_requirements/restore_apt_cache/action.yml index 71fe9a9..b505461 100644 --- a/actions/apt_requirements/restore_apt_cache/action.yml +++ b/actions/apt_requirements/restore_apt_cache/action.yml @@ -49,7 +49,7 @@ runs: id: restore_apt_cache with: path: /var/cache/apt/archives/*.deb - key: ${{ github.base_ref }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} + key: ${{ github.base_ref }}-apt-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} - name: Restore original permissions to /var/cache/apt/archives and delete backup run: | diff --git a/workflows/README.md b/workflows/README.md index 0d16395..5bbea49 100644 --- a/workflows/README.md +++ b/workflows/README.md @@ -179,8 +179,7 @@ The workflow is composed of a single job: 1. **Create cache for APT dependencies** - This job, as described by its name, creates a cache for APT dependencies and stores it on GitHub. It is composed of four steps: 1. **Check out latest commit on current branch** - This step checks out the latest commit on the current branch of the repository. 2. **Install APT dependencies** - This step refreshes APT repositories and then install the project dependecies. This action is required to produce the APT cache that will be saved later. - 3. **Compute APT dependencies file SHA256 hash** - This step computes the SHA256 of the APT dependency file that will be used as cache key. - 4. **Save APT cache** - This step saves APT cache on GitHub. The GitHub's [**cache/save**](https://github.com/actions/cache/tree/main/save) action is used. + 3. **Save APT cache** - This step saves APT cache on GitHub. The GitHub's [**save_apt_cache**](../actions/apt_requirements/save_apt_cache/README.md) action is used. ## [Create Python cache](create_python_cache.yaml) diff --git a/workflows/create_apt_cache.yaml b/workflows/create_apt_cache.yaml index 4123d51..9bd5fd7 100644 --- a/workflows/create_apt_cache.yaml +++ b/workflows/create_apt_cache.yaml @@ -32,14 +32,7 @@ jobs: sudo apt-get update sudo apt-get -y install --no-install-recommends $(tr '\n' ' ' < .github/test/python_test/packages.txt) - - name: Compute APT dependencies file SHA256 hash - id: compute_apt_dependencies_file_sha256_hash - uses: ./.github/actions/misc/compute_files_hash - with: - file_paths: .github/test/python_test/packages.txt - - name: Save APT cache - uses: actions/cache/save@v4 + uses: ./.github/actions/apt_requirements/save_apt_cache with: - path: /var/cache/apt/archives/*.deb - key: ${{ github.ref_name }}-${{ steps.compute_apt_dependencies_file_sha256_hash.outputs.computed_hash }} + apt_requirements_file_path: .github/test/python_test/packages.txt From 5a44ab7c52b9fa3a61df5065a0531addfabdd1d0 Mon Sep 17 00:00:00 2001 From: Luca Cigarini Date: Fri, 28 Mar 2025 15:18:06 +0100 Subject: [PATCH 14/15] fixed another mistake --- .github/actions/apt_requirements/restore_apt_cache/action.yml | 2 +- actions/apt_requirements/restore_apt_cache/action.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/actions/apt_requirements/restore_apt_cache/action.yml b/.github/actions/apt_requirements/restore_apt_cache/action.yml index b505461..282935b 100644 --- a/.github/actions/apt_requirements/restore_apt_cache/action.yml +++ b/.github/actions/apt_requirements/restore_apt_cache/action.yml @@ -49,7 +49,7 @@ runs: id: restore_apt_cache with: path: /var/cache/apt/archives/*.deb - key: ${{ github.base_ref }}-apt-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} + key: ${{ inputs.git_reference }}-apt-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} - name: Restore original permissions to /var/cache/apt/archives and delete backup run: | diff --git a/actions/apt_requirements/restore_apt_cache/action.yml b/actions/apt_requirements/restore_apt_cache/action.yml index b505461..282935b 100644 --- a/actions/apt_requirements/restore_apt_cache/action.yml +++ b/actions/apt_requirements/restore_apt_cache/action.yml @@ -49,7 +49,7 @@ runs: id: restore_apt_cache with: path: /var/cache/apt/archives/*.deb - key: ${{ github.base_ref }}-apt-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} + key: ${{ inputs.git_reference }}-apt-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} - name: Restore original permissions to /var/cache/apt/archives and delete backup run: | From 234e2531f1d1d998dc6622dd1e302019831f65e6 Mon Sep 17 00:00:00 2001 From: Luca Cigarini Date: Fri, 28 Mar 2025 15:39:48 +0100 Subject: [PATCH 15/15] removed leftover --- .github/actions/apt_requirements/action.yml | 60 --------------------- 1 file changed, 60 deletions(-) delete mode 100644 .github/actions/apt_requirements/action.yml diff --git a/.github/actions/apt_requirements/action.yml b/.github/actions/apt_requirements/action.yml deleted file mode 100644 index 70a3a98..0000000 --- a/.github/actions/apt_requirements/action.yml +++ /dev/null @@ -1,60 +0,0 @@ -name: Composite action install APT requirements -description: Composite action to install APT requirements -inputs: - requirements_file: - description: Requirements file - required: true - -# TODO scomporre questa action in due: -# - save apt cache -# - restore apt cache -runs: - using: "composite" - steps: - - name: Compute apt requirements file SHA256 hash - id: compute_apt_requirements_file_sha256_hash - uses: ./.github/actions/misc/compute_files_hash - with: - file_paths: ${{ inputs.requirements_file }} - - # Vital to be able to restore cache - # If write permission is not set, a permissions error will be raised - - name: Modification to /var/cache/apt/archives permissions - run: | - sudo chmod a+w /var/cache/apt/archives - shell: bash - - - uses: actions/cache/restore@v4 - id: restore_cache_from_parent_branch - with: - path: /var/cache/apt/archives/*.deb - key: ${{ github.base_ref }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} - - - uses: actions/cache/restore@v4 - id: restore_cache_from_current_branch - if: steps.restore_cache_from_parent_branch.outputs.cache-hit != 'true' - with: - path: /var/cache/apt/archives/*.deb - key: ${{ github.ref_name }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} - - - name: Refresh repositories - if: > - steps.restore_cache_from_parent_branch.outputs.cache-hit != 'true' && - steps.restore_cache_from_current_branch.outputs.cache-hit != 'true' - run: | - sudo apt-get update - shell: bash - - - name: Install requirements - run: | - sudo apt-get install -y --no-install-recommends $(tr '\n' ' ' < ${{ inputs.requirements_file }}) - shell: bash - - - uses: actions/cache/save@v4 - id: cache_apt_requirements_for_current_branch - if: > - steps.restore_cache_from_parent_branch.outputs.cache-hit != 'true' && - steps.restore_cache_from_current_branch.outputs.cache-hit != 'true' - with: - path: /var/cache/apt/archives/*.deb - key: ${{ github.ref_name }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} \ No newline at end of file