commit 59e4c1058b54e63ec778dfcdb844c200b06344a5
parent df1e5f62c0e2e407d43dbe87939ce846b2417e02
Author: arjoonn <arjoonn@noreply.localhost>
Date: Wed, 25 Feb 2026 07:00:52 +0000
CLI Clean (!3)
Reviewed-on: https://gitea.midpathsoftware.com/midpath/jayporeci/pulls/3
Diffstat:
85 files changed, 1648 insertions(+), 5175 deletions(-)
diff --git a/.jci/run.sh b/.jci/run.sh
@@ -0,0 +1,35 @@
+#!/bin/bash
+# CI script for git-jci
+# This runs in .jci/<commit>/ directory
+# Environment variables available:
+# JCI_COMMIT - Full commit hash
+# JCI_REPO_ROOT - Repository root path
+# JCI_OUTPUT_DIR - Output directory (where artifacts should go)
+
+set -e
+
+echo "=== JCI CI Pipeline ==="
+echo "Commit: ${JCI_COMMIT:0:12}"
+echo ""
+
+cd "$JCI_REPO_ROOT"
+
+echo "Running tests..."
+go test ./...
+echo ""
+
+echo "Building static binary (CGO_ENABLED=0)..."
+CGO_ENABLED=0 go build -ldflags='-s -w -extldflags "-static"' -o "$JCI_OUTPUT_DIR/git-jci" ./cmd/git-jci
+
+# Verify it's static
+echo ""
+echo "Binary info:"
+file "$JCI_OUTPUT_DIR/git-jci"
+ls -lh "$JCI_OUTPUT_DIR/git-jci"
+
+echo ""
+echo "All steps completed successfully!"
+echo ""
+echo "=== Installation ==="
+echo "Download and install with:"
+echo " curl -fsSL \$(git jci web --url)/git-jci -o /tmp/git-jci && sudo install /tmp/git-jci /usr/local/bin/"
diff --git a/AGENTS.md b/AGENTS.md
@@ -0,0 +1 @@
+Make sure to use the cli_clean branch for your work.
diff --git a/Dockerfile b/Dockerfile
@@ -1,25 +0,0 @@
-from python:3.11 as jcienv
-workdir /app
-run python3 -m pip install --upgrade pip
-run python3 -m pip install poetry
-add pyproject.toml .
-add poetry.lock .
-run poetry config virtualenvs.create false
-run poetry install
-env PYTHONPATH=/jaypore_ci/run/:/app
-env PATH=/jaypore_ci/run/:/app:$PATH
-env EDITOR=vim
-add https://github.com/mozilla/sops/releases/download/v3.7.3/sops-v3.7.3.linux /bin/sops
-add https://github.com/FiloSottile/age/releases/download/v1.0.0/age-v1.0.0-linux-amd64.tar.gz ./age.tar.gz
-run tar xf ./age.tar.gz && mv ./age/age /bin && mv ./age/age-keygen /bin && rm -rf ./age
-run apt update && apt install -y wget curl zip vim
-run chmod u+x /bin/sops /bin/age /bin/age-keygen
-
-from jcienv as jci
-add jaypore_ci/ /app/jaypore_ci
-run poetry build
-run ls -alR dist
-run python3 -m pip install dist/jaypore_ci-*.whl
-run rm -rf jaypore_ci dist
-run ls -alR .
-workdir /jaypore_ci/run/
diff --git a/LICENSE b/LICENSE
@@ -1,21 +0,0 @@
-MIT License
-
-Copyright (c) 2022 Arjoonn Sharma
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/README.md b/README.md
@@ -1,14 +1,127 @@
-#  Jaypore CI
+# git-jci
-Documentation is at : https://www.jayporeci.in
+A local-first CI system that stores results in git's custom refs.
-## If you are reading this on GitHub
+## Installation
-- Github is the **mirror** for this codebase, actual development is on a personal gitea instance.
-- Please use [Github discussions](https://github.com/theSage21/jaypore_ci/discussions) for describing problems / asking for help / adding ideas.
-- Jaypore CI is open source, not openly developed yet so instead of submitting PRs, please fork the project and start a discussion.
+### From source
-## Usage
+```bash
+go build -o git-jci ./cmd/git-jci
+sudo mv git-jci /usr/local/bin/
+```
-- Install : `curl https://www.jayporeci.in/setup.sh > setup.sh && bash setup.sh -y`
-- Trigger : `git push origin`
+### From CI artifacts
+
+If CI has run, you can download the pre-built static binary:
+
+```bash
+# One-liner: download and install from running JCI web server
+curl -fsSL http://localhost:8000/jci/$(git rev-parse HEAD)/git-jci -o /tmp/git-jci && sudo install /tmp/git-jci /usr/local/bin/
+
+# Or from a specific commit
+curl -fsSL http://localhost:8000/jci/<commit>/git-jci -o /tmp/git-jci && sudo install /tmp/git-jci /usr/local/bin/
+```
+
+The binary is fully static (no dependencies) and works on any Linux system.
+
+Once installed, git will automatically find it as a subcommand:
+
+```bash
+git jci run
+```
+
+## Setup
+
+Create a `.jci/run.sh` script in your repository:
+
+```bash
+mkdir -p .jci
+cat > .jci/run.sh << 'EOF'
+#!/bin/bash
+set -e
+
+echo "Running tests..."
+cd "$JCI_REPO_ROOT" && go test ./...
+
+echo "Building..."
+cd "$JCI_REPO_ROOT" && go build -o "$JCI_OUTPUT_DIR/binary" ./cmd/...
+
+echo "Done!"
+EOF
+chmod +x .jci/run.sh
+```
+
+### Environment Variables
+
+Your `run.sh` script has access to:
+
+| Variable | Description |
+|----------|-------------|
+| `JCI_COMMIT` | Full commit hash |
+| `JCI_REPO_ROOT` | Repository root path |
+| `JCI_OUTPUT_DIR` | Output directory for artifacts |
+
+The script runs with `cwd` set to `JCI_OUTPUT_DIR`. Any files created there become CI artifacts.
+
+## Commands
+
+### `git jci run`
+
+Run CI for the current commit:
+
+```bash
+git commit -m "My changes"
+git jci run
+```
+
+This will:
+1. Execute `.jci/run.sh`
+2. Capture stdout/stderr to `run.output.txt`
+3. Store all output files (artifacts) in `refs/jci/<commit>`
+4. Generate an `index.html` with results
+
+### `git jci web [port]`
+
+Start a web server to view CI results. Default port is 8000.
+
+```bash
+git jci web
+git jci web 3000
+```
+
+### `git jci push [remote]`
+
+Push CI results to a remote. Default remote is `origin`.
+
+```bash
+git jci push
+git jci push upstream
+```
+
+### `git jci pull [remote]`
+
+Fetch CI results from a remote.
+
+```bash
+git jci pull
+```
+
+### `git jci prune`
+
+Remove CI results for commits that no longer exist in the repository.
+
+```bash
+git jci prune
+```
+
+## How it works
+
+CI results are stored as git tree objects under the `refs/jci/` namespace.
+This keeps them separate from your regular branches and tags, but still
+part of the git repository.
+
+- Results are not checked out to the working directory
+- They can be pushed/pulled like any other refs
+- They are garbage collected when the original commit is gone (via `prune`)
+- Each commit's CI output is stored as a separate commit object
diff --git a/cicd/Dockerfile b/cicd/Dockerfile
@@ -1,3 +0,0 @@
-ARG JAYPORECI_VERSION
-FROM arjoonn/jci:$JAYPORECI_VERSION
-COPY ../ /jaypore_ci/repo/
diff --git a/cicd/build_and_publish_docs.sh b/cicd/build_and_publish_docs.sh
@@ -1,54 +0,0 @@
-#! /bin/bash
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-build() {
- echo "Cleaning docs build"
- touch docs/build
- rm -rf docs/build && mkdir -p docs/build
- ls -al docs
- echo "Building docs"
- sphinx-apidoc -o docs/source/reference ./jaypore_ci
- (python3 cicd/render_changelog.py >> docs/source/index.rst)
- sphinx-build docs/source/ docs/build
- sphinx-build docs/source/ docs/build -b coverage
-
- # Create pre-push for repo
- PREPUSH=docs/build/pre-push.sh
- cp cicd/pre-push.sh $PREPUSH
- sed -i '$ d' $PREPUSH
- # add expected version of Jci
- echo "" >> $PREPUSH
- echo "# Change the version in the next line to whatever you want if you" >> $PREPUSH
- echo "# would like to upgrade to a different version of JayporeCI." >> $PREPUSH
- echo -n "EXPECTED_JAYPORECI_" >> $PREPUSH
- grep version pyproject.toml | python3 -c 'print(input().upper().replace(" ", "").replace("\"", ""))' >> $PREPUSH
-
- echo "" >> $PREPUSH
- echo '("$@")' >> $PREPUSH
-
- # Copy other files
- cp cicd/Dockerfile docs/build
- cp setup.sh docs/build
- cp -r htmlcov /jaypore_ci/run/docs/build/
- cp -r secrets/bin docs/build
- wget -O docs/build/sops https://github.com/mozilla/sops/releases/download/v3.7.3/sops-v3.7.3.linux
- wget -O ./age.tar.gz https://github.com/FiloSottile/age/releases/download/v1.0.0/age-v1.0.0-linux-amd64.tar.gz
- tar xf ./age.tar.gz && mv ./age/age docs/build/bin && mv ./age/age-keygen docs/build/bin && rm -rf ./age
-
- # Create docs bundle
- (cd docs/build && zip -r ../../website.zip ./)
-}
-
-publish() {
- echo "Publishing docs"
- curl -H "Content-Type: application/zip" \
- -H "Authorization: Bearer $NETLIFY_TOKEN" \
- --data-binary "@website.zip" \
- https://api.netlify.com/api/v1/sites/$NETLIFY_SITEID/deploys
-}
-
-(build)
-(publish)
diff --git a/cicd/build_and_push_docker.sh b/cicd/build_and_push_docker.sh
@@ -1,13 +0,0 @@
-#! /bin/bash
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-docker login -u arjoonn -p=$DOCKER_PWD
-docker build --target $1 -t $1:latest .
-docker tag $1:latest arjoonn/$1:latest
-docker push arjoonn/$1:latest
-VERSION=$(grep version pyproject.toml | python3 -c 'print(input().split("=")[1].upper().replace(" ", "").replace("\"", ""))')
-docker tag $1:latest arjoonn/$1:$VERSION
-docker push arjoonn/$1:$VERSION
diff --git a/cicd/build_and_push_pypi.sh b/cicd/build_and_push_pypi.sh
@@ -1,13 +0,0 @@
-#! /bin/bash
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-main() {
- echo "v$(poetry version | awk '{print $2}')" > "/jaypore_ci/run/PublishPypi.txt"
- poetry build
- poetry config pypi-token.pypi $PYPI_TOKEN
- poetry publish
-}
-(main)
diff --git a/cicd/cicd.py b/cicd/cicd.py
@@ -1,52 +0,0 @@
-from jaypore_ci import jci
-from typing import NamedTuple
-
-
-class Should(NamedTuple):
- release: bool = False
- lint: bool = True
-
-
-def parse_commit(repo):
- """
- Decide what all the commit is asking us to do.
- """
- config = {}
- for line in repo.commit_message.lower().split("\n"):
- line = line.strip().replace(" ", "")
- if "jci:" in line:
- _, key = line.split("jci:")
- config[key] = True
- return Should(**config)
-
-
-with jci.Pipeline() as p:
- should = parse_commit(p.repo)
- jcienv = f"jcienv:{p.repo.sha}"
- with p.stage("build_and_test"):
- p.job("JciEnv", f"docker build --target jcienv -t jcienv:{p.repo.sha} .")
- p.job(
- "Jci",
- f"docker build --target jci -t jci:{p.repo.sha} .",
- depends_on=["JciEnv"],
- )
- kwargs = dict(image=jcienv, depends_on=["JciEnv"])
- p.job("black", "python3 -m black --check .", **kwargs)
- p.job("pylint", "python3 -m pylint jaypore_ci/ tests/", **kwargs)
- p.job("pytest", "bash cicd/run_tests.sh", image=jcienv, depends_on=["JciEnv"])
- p.job(
- "install_test",
- "bash cicd/test_installation.sh",
- image=jcienv,
- depends_on=["JciEnv"],
- )
-
- if should.release:
- with p.stage("Publish", image=jcienv):
- p.job("DockerHubJcienv", "bash cicd/build_and_push_docker.sh jcienv")
- p.job("DockerHubJci", "bash cicd/build_and_push_docker.sh jci")
- p.job(
- "PublishDocs",
- f"bash cicd/build_and_publish_docs.sh {p.remote.branch}",
- )
- p.job("PublishPypi", "bash cicd/build_and_push_pypi.sh")
diff --git a/cicd/install_docker.sh b/cicd/install_docker.sh
@@ -1,23 +0,0 @@
-#! /bin/bash
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-main(){
- apt-get update
- apt-get install -y \
- ca-certificates \
- curl \
- gnupg \
- lsb-release
- mkdir -m 0755 -p /etc/apt/keyrings
- curl -fsSL https://download.docker.com/linux/debian/gpg | gpg --dearmor -o /etc/apt/keyrings/docker.gpg
- echo \
- "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/debian \
- $(lsb_release -cs) stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null
- apt-get update
- apt-get install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin
-
-}
-(main)
diff --git a/cicd/pre-push.sh b/cicd/pre-push.sh
@@ -1,67 +0,0 @@
-#! /bin/bash
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-
-run() {
- if [ -z ${ENV+x} ]; then
- echo "ENV : ? -> SKIP sourcing from secrets."
- else
- echo "ENV : '$ENV' -> Sourcing from secrets"
- echo "---"
- source /jaypore_ci/repo/secrets/bin/set_env.sh $ENV
- fi
- cp -r /jaypore_ci/repo/. /jaypore_ci/run
- cd /jaypore_ci/run/
- git clean -fdx
- # Change the name of the file if this is not cicd.py
- echo "---- Container ID:"
- cat /jaypore_ci/cidfiles/$SHA
- echo
- echo "---- ======="
- python /jaypore_ci/run/$JAYPORE_CODE_DIR/cicd.py
-}
-
-
-hook() {
- SHA=$(git rev-parse HEAD)
- REPO_ROOT=$(git rev-parse --show-toplevel)
- JAYPORE_CODE_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
- JAYPORE_CODE_DIR=$(basename $JAYPORE_CODE_DIR)
- # We will mount the current dir into /jaypore_ci/repo
- # Then we will copy things over to /jaypore_ci/run
- # Then we will run git clean to remove anything that is not in git
- # Then we call the actual cicd code
- #
- # We also pass docker.sock and the docker executable to the run so that
- # jaypore_ci can create docker containers
- mkdir -p /tmp/jayporeci__cidfiles &> /dev/null
- echo '----------------------------------------------'
- echo "Jaypore CI"
- echo "Building image : "
- docker build \
- --build-arg JAYPORECI_VERSION=$EXPECTED_JAYPORECI_VERSION \
- -t im_jayporeci__pipe__$SHA \
- -f $REPO_ROOT/$JAYPORE_CODE_DIR/Dockerfile \
- $REPO_ROOT
- echo "Running container : "
- docker run \
- -d \
- --name jayporeci__pipe__$SHA \
- -e JAYPORE_CODE_DIR=$JAYPORE_CODE_DIR \
- -e SHA=$SHA \
- -v /var/run/docker.sock:/var/run/docker.sock \
- -v /tmp/jayporeci__src__$SHA:/jaypore_ci/run \
- -v /tmp/jayporeci__cidfiles:/jaypore_ci/cidfiles:ro \
- --cidfile /tmp/jayporeci__cidfiles/$SHA \
- --workdir /jaypore_ci/run \
- im_jayporeci__pipe__$SHA \
- bash -c "ENV=$ENV bash /jaypore_ci/repo/$JAYPORE_CODE_DIR/pre-push.sh run"
- echo '----------------------------------------------'
-}
-EXPECTED_JAYPORECI_VERSION=latest
-
-# --------- runner
-("$@")
diff --git a/cicd/render_changelog.py b/cicd/render_changelog.py
@@ -1,9 +0,0 @@
-from jaypore_ci.changelog import version_map
-
-for version in sorted(version_map.keys(), reverse=True):
- print(version)
- print("-" * len(str(version)))
- print("")
- for line in version_map[version]["changes"]:
- print("- ", line)
- print("")
diff --git a/cicd/run_tests.sh b/cicd/run_tests.sh
@@ -1,20 +0,0 @@
-#! /bin/bash
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-
-main() {
- python -m coverage run --branch --source=. -m pytest -l --full-trace -vvvv --hypothesis-verbosity=verbose
- coverage html
- coverage report
- echo "Cov: $(coverage report --format=total)%" > "/jaypore_ci/run/pytest.txt"
- # Mark info in jci docs
- # .. |Product| replace:: SoftTech Analyzer
- echo -e "\n.. |coverage| replace:: $(coverage report --format=total)%\n" >> "/jaypore_ci/run/docs/source/index.rst"
- echo -e "\n.. |package_version| replace:: $(poetry version | awk '{print $2}')\n" >> "/jaypore_ci/run/docs/source/index.rst"
-}
-
-(main)
-
diff --git a/cicd/test_installation.sh b/cicd/test_installation.sh
@@ -1,28 +0,0 @@
-#! /bin/bash
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-
-main() {
- mkdir /fake_py_repo
- JCI_ROOT=$PWD
- cd /fake_py_repo
- echo "
-print(1
-+
-1)" > code.py
- git config --global user.email "fake@email.com"
- git config --global user.name "Fake User"
- git config --global init.defaultBranch develop
- git init
- git add -Av
- git commit -m 'init'
- export RUNNING_IN_CI=yes
- bash $JCI_ROOT/setup.sh -y
- git add -Av
- git commit -m 'installed Jci'
-}
-
-(main)
diff --git a/cmd/git-jci/main.go b/cmd/git-jci/main.go
@@ -0,0 +1,59 @@
+package main
+
+import (
+ "fmt"
+ "os"
+
+ "github.com/exedev/git-jci/internal/jci"
+)
+
+func main() {
+ if len(os.Args) < 2 {
+ printUsage()
+ os.Exit(1)
+ }
+
+ cmd := os.Args[1]
+ args := os.Args[2:]
+
+ var err error
+ switch cmd {
+ case "run":
+ err = jci.Run(args)
+ case "web":
+ err = jci.Web(args)
+ case "push":
+ err = jci.Push(args)
+ case "pull":
+ err = jci.Pull(args)
+ case "prune":
+ err = jci.Prune(args)
+ case "help", "-h", "--help":
+ printUsage()
+ return
+ default:
+ fmt.Fprintf(os.Stderr, "unknown command: %s\n", cmd)
+ printUsage()
+ os.Exit(1)
+ }
+
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "error: %v\n", err)
+ os.Exit(1)
+ }
+}
+
+func printUsage() {
+ fmt.Println(`git-jci - Local-first CI system stored in git
+
+Usage: git jci <command> [options]
+
+Commands:
+ run Run CI for the current commit and store results
+ web Start a web server to view CI results
+ push Push CI results to remote
+ pull Pull CI results from remote
+ prune Remove old CI results
+
+CI results are stored in refs/jci/<commit> namespace.`)
+}
diff --git a/docs/.gitignore b/docs/.gitignore
@@ -1,2 +0,0 @@
-build/
-reference/
diff --git a/docs/Makefile b/docs/Makefile
@@ -1,20 +0,0 @@
-# Minimal makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line, and also
-# from the environment for the first two.
-SPHINXOPTS ?=
-SPHINXBUILD ?= sphinx-build
-SOURCEDIR = source
-BUILDDIR = build
-
-# Put it first so that "make" without argument is like "make help".
-help:
- @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
-
-.PHONY: help Makefile
-
-# Catch-all target: route all unknown targets to Sphinx using the new
-# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
-%: Makefile
- @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
diff --git a/docs/make.bat b/docs/make.bat
@@ -1,35 +0,0 @@
-@ECHO OFF
-
-pushd %~dp0
-
-REM Command file for Sphinx documentation
-
-if "%SPHINXBUILD%" == "" (
- set SPHINXBUILD=sphinx-build
-)
-set SOURCEDIR=source
-set BUILDDIR=build
-
-%SPHINXBUILD% >NUL 2>NUL
-if errorlevel 9009 (
- echo.
- echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
- echo.installed, then set the SPHINXBUILD environment variable to point
- echo.to the full path of the 'sphinx-build' executable. Alternatively you
- echo.may add the Sphinx directory to PATH.
- echo.
- echo.If you don't have Sphinx installed, grab it from
- echo.https://www.sphinx-doc.org/
- exit /b 1
-)
-
-if "%1" == "" goto help
-
-%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
-goto end
-
-:help
-%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
-
-:end
-popd
diff --git a/docs/source/_static/logo.ico b/docs/source/_static/logo.ico
Binary files differ.
diff --git a/docs/source/_static/logo.png b/docs/source/_static/logo.png
Binary files differ.
diff --git a/docs/source/_static/logo80.png b/docs/source/_static/logo80.png
Binary files differ.
diff --git a/docs/source/conf.py b/docs/source/conf.py
@@ -1,57 +0,0 @@
-# Configuration file for the Sphinx documentation builder.
-#
-# For the full list of built-in configuration values, see the documentation:
-# https://www.sphinx-doc.org/en/master/usage/configuration.html
-
-# -- Project information -----------------------------------------------------
-# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
-
-project = "Jaypore CI"
-copyright = "2022, Arjoonn Sharma"
-author = "Arjoonn Sharma"
-
-# -- General configuration ---------------------------------------------------
-# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
-
-extensions = [
- "sphinx.ext.autodoc",
- "sphinx.ext.autosummary",
- "sphinx_rtd_theme",
- "sphinxcontrib.mermaid",
- "sphinx.ext.coverage",
- "sphinx.ext.viewcode",
- "sphinx.ext.todo",
-]
-
-templates_path = ["_templates"]
-exclude_patterns = []
-
-
-# -- Options for HTML output -------------------------------------------------
-# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
-
-html_sidebars = {
- "**": [
- "about.html",
- "navigation.html",
- "relations.html",
- "searchbox.html",
- "donate.html",
- ]
-}
-html_favicon = "_static/logo.ico"
-# html_theme = "alabaster"
-html_theme = "sphinx_rtd_theme"
-html_static_path = ["_static"]
-html_theme_options = {
- "display_version": True,
- "prev_next_buttons_location": "bottom",
- # Toc options
- "collapse_navigation": True,
- "sticky_navigation": True,
- "navigation_depth": 4,
- # "includehidden": True,
- # "titles_only": False,
-}
-master_doc = "contents"
-todo_include_todos = True
diff --git a/docs/source/contents.rst b/docs/source/contents.rst
@@ -1,3 +0,0 @@
-.. toctree::
-
- index
diff --git a/docs/source/examples/build_and_publish_docker_images.py b/docs/source/examples/build_and_publish_docker_images.py
@@ -1,5 +0,0 @@
-from jaypore_ci import jci
-
-with jci.Pipeline() as p:
- p.job("Docker", f"docker build -t myimage .")
- p.job("PyTest", "python3 -m pytest tests/", image="myimage", depends_on=["Docker"])
diff --git a/docs/source/examples/complex_dependencies.py b/docs/source/examples/complex_dependencies.py
@@ -1,16 +0,0 @@
-from jaypore_ci import jci
-
-with jci.Pipeline() as p:
-
- with p.stage("build"):
- p.job("DockDev", f"docker build --target DevEnv -t {p.repo.sha}_dev .")
-
- with p.stage("checking", image=f"{p.repo.sha}_dev"):
- p.job("Integration", "run test.sh integration")
- p.job("Unit", "run test.sh unit")
- p.job("Linting", "run lint.sh")
- p.job(
- "Fuzz testing",
- "bash test.sh fuzz",
- depends_on=["Integration", "Unit"],
- )
diff --git a/docs/source/examples/config_testing.py b/docs/source/examples/config_testing.py
@@ -1,14 +0,0 @@
-from jaypore_ci import jci
-
-pipeline = jci.Pipeline(poll_interval=0)
-with pipeline as p:
- for name in "pq":
- p.job(name, name)
- p.job("x", "x")
- p.job("y", "y", depends_on=["x"])
- p.job("z", "z", depends_on=["y"])
- for name in "ab":
- p.job(name, name)
-
-order = pipeline.executor.get_execution_order()
-# assert order["x"] < order["y"] < order["z"]
diff --git a/docs/source/examples/custom_services.py b/docs/source/examples/custom_services.py
@@ -1,18 +0,0 @@
-from jaypore_ci import jci
-
-# Services immediately return with a PASSED status
-# If they exit with a Non ZERO code they are marked as FAILED, otherwise
-# they are assumed to be PASSED
-with jci.Pipeline() as p:
-
- # Since we define all jobs in this section as `is_service=True`, they will
- # keep running for as long as the pipeline runs.
- with p.stage("Services", is_service=True):
- p.job("Mysql", None, image="mysql")
- p.job("Redis", None, image="redis")
- p.job("Api", "python3 -m src.run_api", image="python:3.11")
-
- with p.stage("Testing"):
- p.job("Unit", "pytest -m unit_tests tests")
- p.job("Integration", "pytest -m integration_tests tests")
- p.job("Regression", "pytest -m regression_tests tests")
diff --git a/docs/source/examples/extra_hosts.py b/docs/source/examples/extra_hosts.py
@@ -1,17 +0,0 @@
-from jaypore_ci import jci
-
-with jci.Pipeline() as p:
- p.job(
- "Pytest",
- "pytest",
- executor_kwargs={
- "extra_hosts": {
- # Access machines behind VPNs
- "machine.behind.vpn": "100.64.0.12",
- # Redirect localhost addresses to the docker gateway
- "dozzle.localhost": "172.0.0.1",
- # Replace production APIs with locally mocked APIs
- "api.myservice.com": "127.0.0.1",
- }
- },
- )
diff --git a/docs/source/examples/github_remote.py b/docs/source/examples/github_remote.py
@@ -1,8 +0,0 @@
-from jaypore_ci import jci, repos, remotes
-
-repo = repos.Git.from_env()
-# Specify JAYPORE_GITHUB_TOKEN in your secrets file
-remote = remotes.Github.from_env(repo=repo)
-
-with jci.Pipeline(repo=repo, remote=remote) as p:
- p.job("Pytest", "pytest ")
diff --git a/docs/source/examples/job_matrix.py b/docs/source/examples/job_matrix.py
@@ -1,15 +0,0 @@
-from jaypore_ci import jci
-
-with jci.Pipeline() as p:
- # This will have 18 jobs
- # one for each possible combination of BROWSER, SCREENSIZE, ONLINE
- for env in p.env_matrix(
- BROWSER=["firefox", "chromium", "webkit"],
- SCREENSIZE=["phone", "laptop", "extended"],
- ONLINE=["online", "offline"],
- ):
- p.job(
- f"Test: {env}",
- "pytest --browser=$BROWSER --device=$SCREENSIZE",
- env=env,
- )
diff --git a/docs/source/examples/jobs_based_on_commit_messages.py b/docs/source/examples/jobs_based_on_commit_messages.py
@@ -1,8 +0,0 @@
-from jaypore_ci import jci
-
-with jci.Pipeline() as p:
- p.job("build", "bash cicd/build.sh")
-
- # The job only gets defined when the commit message contains 'jci:release'
- if "jci:release" in p.repo.commit_message:
- p.job("release", "bash cicd/release.sh", depends_on=["build"])
diff --git a/docs/source/examples/optional_jobs.py b/docs/source/examples/optional_jobs.py
@@ -1,20 +0,0 @@
-from jaypore_ci import jci
-
-
-with jci.Pipeline() as p:
- p.job("testing", "bash cicd/lint_test_n_build.sh")
- # This job will only be defined when the branch is main. Otherwise it will
- # not be a part of the pipeline
- if p.repo.branch == "main":
- p.job(
- "publish",
- "bash cicd/publish_release.sh",
- depends_on=["testing"],
- )
- # The following job will only be run when documentation changes.
- if any(path.startswith("docs") for path in p.repo.files_changed("develop")):
- p.job(
- "build_docs",
- "bash cicd/build_docs.sh",
- depends_on=["testing"],
- )
diff --git a/docs/source/examples/report_via_email.py b/docs/source/examples/report_via_email.py
@@ -1,8 +0,0 @@
-from jaypore_ci import jci, executors, remotes, repos
-
-git = repos.Git.from_env()
-email = remotes.Email.from_env(repo=git)
-
-# The report for this pipeline will go via email.
-with jci.Pipeline(repo=git, remote=email) as p:
- p.job("hello", "bash -c 'echo hello'")
diff --git a/docs/source/index.rst b/docs/source/index.rst
@@ -1,433 +0,0 @@
-.. Jaypore CI documentation master file, created by
- sphinx-quickstart on Thu Dec 22 13:34:40 2022.
- You can adapt this file completely to your liking, but it should at least
- contain the root `toctree` directive.
-
-TLDR
-====
-
-|logo|
-
-- **Jaypore CI** is a *small*, *very flexible*, and *powerful* system for automation within software projects.
-- Latest version: |package_version|
-- `Test coverage </htmlcov>`_ : |coverage|
-- `PyPi <https://pypi.org/project/jaypore-ci/>`_
-- `Docker Hub <https://hub.docker.com/r/arjoonn/jci>`_
-- `Github Mirror <https://github.com/theSage21/jaypore_ci>`_
-
-------------
-
-- Configure pipelines in Python
-- Jobs are run using `Docker <https://www.docker.com/>`_; on your laptop and on cloud IF needed.
-- Send status reports anywhere, or nowhere at all. Email, commit to git, Gitea
- PR, Github PR, or write your own class and send it where you want.
-
-
-Getting Started
-===============
-
-Installation
-------------
-
-You can install Jaypore CI using a bash script. The script only makes changes in your
-repository so if you want you can do the installation manually as well.
-
-.. code-block:: console
-
- $ cd ~/myrepository
- $ curl https://www.jayporeci.in/setup.sh > setup.sh
- $ bash setup.sh -y
-
-
-**For a manual install** you can do the following. The names are convention,
-you can call your folders/files anything but you'll need to make sure they
-match everywhere.
-
-1. Create a directory called *cicd* in the root of your repo.
-2. Create a file *cicd/pre-push.sh*
-3. Create a file *cicd/cicd.py*
-4. Update your repo's pre-push git hook so that it runs the *cicd/pre-push.sh* file when you push.
- 1. Git hook should call `cicd/pre-push.sh`
- 2. After setting environment variables `cicd/pre-push.sh` calls
- `cicd/cicd.py` inside a docker container having JayporeCI installed.
- You can use `arjoonn/jci` if you don't have anything else ready.
- 3. `cicd/cicd.py` will run your jobs within other docker containers.
-
-
-Your entire config is inside `cicd/cicd.py`. Edit it to whatever you like! A basic config would look like this:
-
-.. code-block:: python
-
- from jaypore_ci import jci
-
- with jci.Pipeline(image='mydocker/image') as p:
- p.job("Black", "black --check .")
- p.job("Pylint", "pylint mycode/ tests/")
- p.job("PyTest", "pytest tests/")
-
-This would produce a CI report like::
-
- ╔ 🟢 : JayporeCI [sha edcb193bae]
- ┏━ Pipeline
- ┃
- ┃ 🟢 : Black [ffcda0a9] 0: 3
- ┃ 🟢 : Pylint [2417ad58] 0: 9
- ┃ 🟢 : PyTest [28d4985f] 0:15 [Cov: 65% ]
- ┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
-
-- **edcb193bae** is the SHA that the report is for.
-- **Pipeline** is the default pipeline stage.
-- 🟢 indicates that the job has passed
-- **Black**, **Pylint**, and **PyTest** are the job names.
-- **[ffcda0a9]** is the docker container ID for that job.
-- **1: 3** is the time taken by the job.
-- **[Cov: 65% ]** is custom reporting done by the job.
- - Any job can create a file **/jaypore_ci/run/<job name>.txt** and the first 5 characters from that file will be displayed in the report.
- - Although this is used for coverage reports you could potentially use this for anything you want.
- - You could report error codes here to indicate WHY a job failed.
- - Report information about artifacts created like package publish versions.
-
-
-To see the pipelines on your machine you can use a `Dozzle
-<https://dozzle.dev/>`_ container on your localhost to explore CI jobs.
-
-If you don't want to do this it's also possible to simply use `docker logs
-<container ID>` to explore jobs.
-
-
-Concepts
---------
-
-Pipeline config
-***************
-
-.. mermaid::
-
- sequenceDiagram
- autonumber
- loop Pipeline execution
- Pipeline ->> Executor: docker run [n jobs]
- Executor -->> Pipeline: docker inspect [k jobs]
- Pipeline ->> Reporter: Pipeline status
- Reporter -->> Pipeline: Rendered report
- Pipeline ->> Remote: Publish report
- Remote -->> Pipeline: ok
- end
-
-1. A pipeline is defined inside a python file that imports and uses **jaypore_ci**.
- - It can also import other libraries / configs. Do whatever your usecase needs.
-2. A config starts with creating a :class:`~jaypore_ci.jci.Pipeline` instance. Everything happens inside this context.
- - A pipeline has to have one implementation of a
- :class:`~jaypore_ci.interfaces.Remote`,
- :class:`~jaypore_ci.interfaces.Reporter`,
- :class:`~jaypore_ci.interfaces.Executor`, and
- :class:`~jaypore_ci.interfaces.Repo` specified.
- - If you do not specify them then the defaults are
- :class:`~jaypore_ci.remotes.gitea.Gitea`,
- :class:`~jaypore_ci.reporters.text.Text`,
- :class:`~jaypore_ci.executors.docker.Docker`, and
- :class:`~jaypore_ci.repos.git.Git`.
- - You can specify ANY other keyword arguments to the pipeline and they will
- be applied to jobs in that pipeline as a default. This allows you to keep
- your code DRY. For example, we can specify **image='some/docker:image'**
- and this will be used for all jobs in the pipeline.
-3. Parts of a pipeline
- 1. :class:`~jaypore_ci.interfaces.Repo` holds information about the project.
- - You can use this to get information about things like `sha` and `branch`.
- - It can also tell you which files have changed using
- :meth:`~jaypore_ci.interfaces.Repo.files_changed`.
- - Currently only :class:`~jaypore_ci.repos.git.Git` is supported.
- 2. :class:`~jaypore_ci.interfaces.Executor` Is used to run the job. Perhaps in
- the future we might have shell / VMs.
- 3. :class:`~jaypore_ci.interfaces.Reporter` Given the status of the pipeline
- the reporter is responsible for creating a text output that can be read by
- humans.
- Along with :class:`~jaypore_ci.reporters.text.Text` , we also have
- the :class:`~jaypore_ci.reporters.markdown.Markdown` reporter that uses
- Mermaid graphs to show you pipeline dependencies.
- 4. :class:`~jaypore_ci.interfaces.Remote` is where the report is published to. Currently we have:
- - :class:`~jaypore_ci.remotes.git.GitRemote` which can store the pipeline status
- in git itself. You can then push the status to your github and share it
- with others. This works similar to git-bug.
- - :class:`~jaypore_ci.remotes.gitea.Gitea` can open a PR and publish pipeline status as the PR description on Gitea.
- - :class:`~jaypore_ci.remotes.github.Github` can open a PR and publish pipeline status as the PR description on Github.
- - :class:`~jaypore_ci.remotes.email.Email` can email you the pipeline status.
-4. Each pipeline can declare multiple :meth:`~jaypore_ci.jci.Pipeline.stage` sections.
- - Stage names have to be unique. They cannot conflict with job names and other stage names.
- - Stages are executed in the order in which they are declared in the config.
- - The catch all stage is called **Pipeline**. Any job defined outside a stage belongs to this stage.
- - Any extra keyword arguments specified while creating the stage are
- passed to jobs. These arguments override whatever is specified at the
- Pipeline level.
-5. Finally, any number of :meth:`~jaypore_ci.jci.Pipeline.job` definitions can be made.
- - Jobs declared inside a stage belong to that stage.
- - Job names have to be unique. They cannot clash with stage names and other job names.
- - Jobs are run in parallel **UNLESS** they specify
- **depends_on=["other_job"]**, in which case the job runs after
- **other_job** has passed.
- - Jobs inherit keyword arguments from Pipelines, then stages, then whatever
- is specified at the job level.
-
-
-.. mermaid::
-
- flowchart TD
- A(REPO- holds information about the project) --> B(PIPELINE)
- %% flowchart LR
- B <-->C(EXECUTOR- runs the job)
- B --->D(REPORTER- responsible for creating output in Text/Markdown Reporter)
- D -->E(REMOTE- report is publish in Gitmore/gitea/github/email)
-
-
-Secrets and environment variables
-*********************************
-
-1. JayporeCI uses `SOPS <https://github.com/mozilla/sops>`_ to manage environment variables and secrets.
- - We add `secrets/<env_name>.enc` to store secrets.
- - We add `secrets/<env_name>.key` to decrypt corresponding secret files. This is an `AGE <https://github.com/FiloSottile/age>`_ key file. **Do NOT commit this to git!**. JayporeCI automatically adds a gitignore to ignore key files.
- - We also add `secrets/bin/edit_env.sh` and `secrets/bin/set_env.sh` to help you manage your secrets easily.
-2. It is a good idea to have separate secret files for each developer, each environment respectively.
- - For example, JayporeCI itself only has a single secret file called `ci`.
-
-
-How to
-======
-
-See job logs
-------------
-
-- The recommended way is to have a `Dozzle <https://dozzle.dev/>`_ container on your localhost to explore CI jobs.
-- You can also run `docker logs <container ID>` locally.
-- To debug running containers you can `docker exec <container ID>` while the job is running.
-
-Build and publish docker images
--------------------------------
-
-Environment / package dependencies can be cached in docker easily. Simply build
-your docker image and then run the job with that built image.
-
-.. literalinclude:: examples/build_and_publish_docker_images.py
- :language: python
- :linenos:
-
-Define complex job relations
-----------------------------
-
-This config builds docker images, runs linting, testing on the
-codebase, then builds and publishes documentation.
-
-.. literalinclude:: examples/complex_dependencies.py
- :language: python
- :linenos:
-
-
-Run a job matrix
-----------------
-
-There is no special concept for matrix jobs. Just declare as many jobs as you
-want in a while loop. There is a function to make this easier when you want to
-run combinations of variables.
-
-.. literalinclude:: examples/job_matrix.py
- :language: python
- :linenos:
-
-
-The above config generates 3 x 3 x 2 = 18 jobs and sets the environment for each to a unique combination of `BROWSER` , `SCREENSIZE`, and `ONLINE`.
-
-Run on cloud/remote runners
----------------------------
-
-- Make sure docker is installed on the remote machine.
-- Make sure you have ssh access to remote machine and the user you are logging in as can run docker commands.
-- Add to your local `~.ssh/config` an entry for your remote machine. Something like:
-
- .. code-block:: text
-
- Host my.aws.machine
- HostName some.aws.machine
- IdentityFile ~/.ssh/id_rsa
-
-- Now in your `cicd/pre-push.sh` file, where the `docker run` command is mentioned, simply add `DOCKER_HOST=ssh://my.aws.machine`
-- JayporeCi will then run on the remote machine.
-
-Use custom services for testing
--------------------------------
-
-Some jobs don't affect the status of the pipeline. They just need to be there
-while you are running your tests. For example, you might need a DB to run API
-testing, or you might need both the DB and API as a service to run integration
-testing.
-
-To do this you can add `is_service=True` to the job / stage / pipeline arguments.
-
-Services are only shut down when the pipeline is finished.
-
-
-
-.. literalinclude:: examples/custom_services.py
- :language: python
- :linenos:
-
-
-Import jobs with pip install
-----------------------------
-
-You can also import jobs defined by other people. Some examples of why you might want to do this:
-
-- A common lint policy for company / clients.
-- Common deploy targets and processes for things like docs / release notes.
-- Common notification targets like slack / telegram / email.
-- Common PR description checklist for company / clients.
-- Common PR merge policies / review policies etc.
-
-Since `JayporeCI` has a normal programming language as it's config language, most things can be solved without too much effort.
-
-
-Publish Artifacts / Cache
--------------------------
-
-- All jobs run in a shared directory **/jaypore_ci/run**.
-- Anything you write to this directory is available to all jobs so you can use this to pass artifacts / cache between jobs.
-- You can have a separate job to POST your artifacts to some remote location / git notes / S3 / gitea
-
-
-Jobs based on files change / branch name
-----------------------------------------
-
-Some jobs only need to run when your branch is **main** or in release branches.
-At other times we want to check commit messages and based on the message run
-different jobs.
-
-.. literalinclude:: examples/optional_jobs.py
- :language: python
- :linenos:
-
-
-Test your pipeline config
--------------------------
-
-Mistakes in the pipeline config can take a long time to catch if you are running a large test harness.
-
-With Jaypore CI it's fairly simple. Just write tests for your pipeline since it's normal Python code!
-
-To help you do this there are mock executors/remotes that you can use instead
-of Docker/Gitea. This example taken from Jaypore CI's own tests shows how you
-would test and make sure that jobs are running in order.
-
-.. literalinclude:: examples/optional_jobs.py
- :language: python
- :linenos:
-
-Status report via email
------------------------
-
-You can send pipeline status reports via email if you don't want to use the PR system for gitea/github etc.
-
-See the :class:`~jaypore_ci.remotes.email.Email` docs for the environment
-variables you will have to supply to make this work.
-
-
-.. literalinclude:: examples/report_via_email.py
- :language: python
- :linenos:
-
-Run selected jobs based on commit message
------------------------------------------
-
-Sometimes we want to control when some jobs run. For example, build/release jobs, or intensive testing jobs.
-A simple way to do this is to read the commit messsage and see if the author
-asked us to run these jobs. JayporeCI itself only runs release jobs when the
-commit message contains **jci:release** as one of it's lines.
-
-
-.. literalinclude:: examples/jobs_based_on_commit_messages.py
- :language: python
- :linenos:
-
-`💬 <https://github.com/theSage21/jaypore_ci/discussions/20>`_ :Select remote based on job status / branch / authors
---------------------------------------------------------------------------------------------------------------------
-
-.. note::
- If you want this feature please go and vote for it on the `github discussion
- <https://github.com/theSage21/jaypore_ci/discussions>`_.
-
-At times it's necessary to inform multiple people about CI failues / passing.
-
-For example
-
-- Stakeholders might need notifications when releases happen.
-- People who wrote code might need notifications when their code breaks on a more intensite test suite / fuzzying run.
-- Perhaps you have downstream codebases that need to get patched when you do bugfixes.
-- Or perhaps a failure in the build section of the pipeline needs one set of
- people to be informed and a failure in the user documentation building needs
- another set of people.
-
-
-While all of this is already possible with JayporeCI, if this is a common
-workflow you can vote on it and we can implement an easier way to declare this
-configuration.
-
-Run multiple pipelines on every commit
---------------------------------------
-
-You can modify `cicd/pre-push.sh` so that instead of creating a single pipeline
-it creates multiple pipelines. This can be useful when you have a personal CI
-config that you want to run and a separate team / organization pipeline that
-needs to be run as well.
-
-This is not the recommended way however since it would be a lot easier to make
-`cicd/cicd.py` a proper python package instead and put the two configs there
-itself.
-
-Passing extra_hosts and other arguments to docker
--------------------------------------------------
-
-Often times you want to configure some extra stuff for the docker run command
-that will be used to run your job, like when you want to pass `extra_hosts` or
-`device_requests` to the container.
-
-To do such things you can use the `executor_kwargs` argument while defining the
-job using :meth:`~jaypore_ci.jci.Pipeline.job`. Anything that you pass to
-this dictionary will be handed off to `Docker-py
-<https://docker-py.readthedocs.io/en/stable/containers.html#docker.models.containers.ContainerCollection.run>`_
-and so you can use anything that is mentioned in that documentation.
-
-.. literalinclude:: examples/extra_hosts.py
- :language: python
- :linenos:
-
-Using a github remote
----------------------
-
-If you want to use github instead of gitea, it's very simple to use.
-
-.. literalinclude:: examples/github_remote.py
- :language: python
- :linenos:
-
-
-Contributing
-============
-
-- Development happens on a self hosted gitea instance and the source code is mirrored at `Github <https://github.com/theSage21/jaypore_ci>`_.
-- If you are facing issues please file them on github.
-- Please use `Github discussions <https://github.com/theSage21/jaypore_ci/discussions>`_ for describing problems / asking for help / adding ideas.
-- Jaypore CI is open source, but not openly developed yet so instead of submitting PRs, please fork the project and start a discussion.
-
-Reference
-=========
-
-.. toctree::
- :glob:
-
- reference/modules.rst
-
-.. |logo| image:: _static/logo80.png
- :width: 80
- :alt: Jaypore CI
- :align: middle
-
-Changelog
-=========
-
diff --git a/git-jci b/git-jci
Binary files differ.
diff --git a/go.mod b/go.mod
@@ -0,0 +1,3 @@
+module github.com/exedev/git-jci
+
+go 1.22.2
diff --git a/internal/jci/git.go b/internal/jci/git.go
@@ -0,0 +1,144 @@
+package jci
+
+import (
+ "bytes"
+ "fmt"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "strings"
+)
+
+// git runs a git command and returns stdout
+func git(args ...string) (string, error) {
+ cmd := exec.Command("git", args...)
+ var stdout, stderr bytes.Buffer
+ cmd.Stdout = &stdout
+ cmd.Stderr = &stderr
+ if err := cmd.Run(); err != nil {
+ return "", fmt.Errorf("git %s: %v\n%s", strings.Join(args, " "), err, stderr.String())
+ }
+ return strings.TrimSpace(stdout.String()), nil
+}
+
+// GetCurrentCommit returns the current HEAD commit hash
+func GetCurrentCommit() (string, error) {
+ return git("rev-parse", "HEAD")
+}
+
+// GetRepoRoot returns the root directory of the git repository
+func GetRepoRoot() (string, error) {
+ return git("rev-parse", "--show-toplevel")
+}
+
+// RefExists checks if a ref exists
+func RefExists(ref string) bool {
+ _, err := git("rev-parse", "--verify", ref)
+ return err == nil
+}
+
+// StoreTree stores a directory as a tree object and creates a commit under refs/jci/<commit>
+func StoreTree(dir string, commit string, message string) error {
+ repoRoot, err := GetRepoRoot()
+ if err != nil {
+ return err
+ }
+
+ tmpIndex := repoRoot + "/.git/jci-index"
+ defer exec.Command("rm", "-f", tmpIndex).Run()
+
+ // We need to use git hash-object and mktree to build a tree
+ // from files outside the repo
+ treeID, err := hashDir(dir, repoRoot, tmpIndex)
+ if err != nil {
+ return fmt.Errorf("failed to hash directory: %w", err)
+ }
+
+ // Create commit from tree
+ commitTreeCmd := exec.Command("git", "commit-tree", treeID, "-m", message)
+ commitTreeCmd.Dir = repoRoot
+ commitOut, err := commitTreeCmd.Output()
+ if err != nil {
+ return fmt.Errorf("git commit-tree: %v", err)
+ }
+ commitID := strings.TrimSpace(string(commitOut))
+
+ // Update ref
+ ref := "refs/jci/" + commit
+ if _, err := git("update-ref", ref, commitID); err != nil {
+ return fmt.Errorf("git update-ref: %v", err)
+ }
+
+ return nil
+}
+
+// hashDir recursively hashes a directory and returns its tree ID
+func hashDir(dir string, repoRoot string, tmpIndex string) (string, error) {
+ entries, err := os.ReadDir(dir)
+ if err != nil {
+ return "", err
+ }
+
+ var treeEntries []string
+
+ for _, entry := range entries {
+ path := filepath.Join(dir, entry.Name())
+
+ if entry.IsDir() {
+ // Recursively hash subdirectory
+ subTreeID, err := hashDir(path, repoRoot, tmpIndex)
+ if err != nil {
+ return "", err
+ }
+ treeEntries = append(treeEntries, fmt.Sprintf("040000 tree %s\t%s", subTreeID, entry.Name()))
+ } else {
+ // Hash file
+ cmd := exec.Command("git", "hash-object", "-w", path)
+ cmd.Dir = repoRoot
+ out, err := cmd.Output()
+ if err != nil {
+ return "", fmt.Errorf("hash-object %s: %v", path, err)
+ }
+ blobID := strings.TrimSpace(string(out))
+
+ // Get file mode
+ info, err := entry.Info()
+ if err != nil {
+ return "", err
+ }
+ mode := "100644"
+ if info.Mode()&0111 != 0 {
+ mode = "100755"
+ }
+ treeEntries = append(treeEntries, fmt.Sprintf("%s blob %s\t%s", mode, blobID, entry.Name()))
+ }
+ }
+
+ // Create tree from entries
+ treeInput := strings.Join(treeEntries, "\n")
+ if treeInput != "" {
+ treeInput += "\n"
+ }
+
+ cmd := exec.Command("git", "mktree")
+ cmd.Dir = repoRoot
+ cmd.Stdin = strings.NewReader(treeInput)
+ out, err := cmd.Output()
+ if err != nil {
+ return "", fmt.Errorf("mktree: %v (input: %q)", err, treeInput)
+ }
+
+ return strings.TrimSpace(string(out)), nil
+}
+
+// ListJCIRefs returns all refs under refs/jci/
+func ListJCIRefs() ([]string, error) {
+ out, err := git("for-each-ref", "--format=%(refname:short)", "refs/jci/")
+ if err != nil {
+ return nil, err
+ }
+ if out == "" {
+ return nil, nil
+ }
+ return strings.Split(out, "\n"), nil
+}
diff --git a/internal/jci/prune.go b/internal/jci/prune.go
@@ -0,0 +1,403 @@
+package jci
+
+import (
+ "fmt"
+ "os/exec"
+ "regexp"
+ "strconv"
+ "strings"
+ "time"
+)
+
+// PruneOptions holds the options for the prune command
+type PruneOptions struct {
+ Commit bool
+ OnRemote string
+ OlderThan time.Duration
+}
+
+// ParsePruneArgs parses command line arguments for prune
+func ParsePruneArgs(args []string) (*PruneOptions, error) {
+ opts := &PruneOptions{}
+
+ for _, arg := range args {
+ if arg == "--commit" {
+ opts.Commit = true
+ } else if strings.HasPrefix(arg, "--on-remote=") {
+ opts.OnRemote = strings.TrimPrefix(arg, "--on-remote=")
+ } else if strings.HasPrefix(arg, "--on-remote") {
+ // Handle --on-remote origin (space separated)
+ continue
+ } else if strings.HasPrefix(arg, "--older-than=") {
+ durStr := strings.TrimPrefix(arg, "--older-than=")
+ dur, err := parseDuration(durStr)
+ if err != nil {
+ return nil, fmt.Errorf("invalid duration %q: %v", durStr, err)
+ }
+ opts.OlderThan = dur
+ } else if !strings.HasPrefix(arg, "-") && opts.OnRemote == "" {
+ // Check if previous arg was --on-remote
+ for i, a := range args {
+ if a == "--on-remote" && i+1 < len(args) && args[i+1] == arg {
+ opts.OnRemote = arg
+ break
+ }
+ }
+ }
+ }
+
+ return opts, nil
+}
+
+// parseDuration parses duration strings like "30d", "2w", "1h"
+func parseDuration(s string) (time.Duration, error) {
+ re := regexp.MustCompile(`^(\d+)([dhwm])$`)
+ matches := re.FindStringSubmatch(s)
+ if matches == nil {
+ // Try standard Go duration
+ return time.ParseDuration(s)
+ }
+
+ num, _ := strconv.Atoi(matches[1])
+ unit := matches[2]
+
+ switch unit {
+ case "d":
+ return time.Duration(num) * 24 * time.Hour, nil
+ case "w":
+ return time.Duration(num) * 7 * 24 * time.Hour, nil
+ case "m":
+ return time.Duration(num) * 30 * 24 * time.Hour, nil
+ case "h":
+ return time.Duration(num) * time.Hour, nil
+ }
+
+ return 0, fmt.Errorf("unknown unit: %s", unit)
+}
+
+// RefInfo holds information about a JCI ref
+type RefInfo struct {
+ Ref string
+ Commit string
+ Timestamp time.Time
+ Size int64
+}
+
+// Prune removes CI results based on options
+func Prune(args []string) error {
+ opts, err := ParsePruneArgs(args)
+ if err != nil {
+ return err
+ }
+
+ if opts.OnRemote != "" {
+ return pruneRemote(opts)
+ }
+ return pruneLocal(opts)
+}
+
+func pruneLocal(opts *PruneOptions) error {
+ refs, err := ListJCIRefs()
+ if err != nil {
+ return err
+ }
+
+ if len(refs) == 0 {
+ fmt.Println("No CI results to prune")
+ return nil
+ }
+
+ // Get info for all refs
+ var refInfos []RefInfo
+ var totalSize int64
+
+ fmt.Println("Scanning CI results...")
+ for i, ref := range refs {
+ commit := strings.TrimPrefix(ref, "jci/")
+ printProgress(i+1, len(refs), "Scanning")
+
+ info := RefInfo{
+ Ref: ref,
+ Commit: commit,
+ }
+
+ // Get timestamp from the JCI commit
+ timeStr, err := git("log", "-1", "--format=%ci", "refs/jci/"+commit)
+ if err == nil {
+ info.Timestamp, _ = time.Parse("2006-01-02 15:04:05 -0700", timeStr)
+ }
+
+ // Get size of the tree
+ info.Size = getRefSize("refs/jci/" + commit)
+ totalSize += info.Size
+
+ refInfos = append(refInfos, info)
+ }
+ fmt.Println() // newline after progress
+
+ // Filter refs to prune
+ var toPrune []RefInfo
+ var prunedSize int64
+ now := time.Now()
+
+ for _, info := range refInfos {
+ shouldPrune := false
+
+ // Check if commit still exists (original behavior)
+ _, err := git("cat-file", "-t", info.Commit)
+ if err != nil {
+ shouldPrune = true
+ }
+
+ // Check age if --older-than specified
+ if opts.OlderThan > 0 && !info.Timestamp.IsZero() {
+ age := now.Sub(info.Timestamp)
+ if age > opts.OlderThan {
+ shouldPrune = true
+ }
+ }
+
+ if shouldPrune {
+ toPrune = append(toPrune, info)
+ prunedSize += info.Size
+ }
+ }
+
+ if len(toPrune) == 0 {
+ fmt.Println("Nothing to prune")
+ fmt.Printf("Total CI data: %s\n", formatSize(totalSize))
+ return nil
+ }
+
+ // Show what will be pruned
+ fmt.Printf("\nFound %d ref(s) to prune:\n", len(toPrune))
+ for _, info := range toPrune {
+ age := ""
+ if !info.Timestamp.IsZero() {
+ age = fmt.Sprintf(" (age: %s)", formatAge(now.Sub(info.Timestamp)))
+ }
+ fmt.Printf(" %s %s%s\n", info.Commit[:12], formatSize(info.Size), age)
+ }
+
+ fmt.Printf("\nTotal to free: %s (of %s total)\n", formatSize(prunedSize), formatSize(totalSize))
+
+ if !opts.Commit {
+ fmt.Println("\n[DRY RUN] Use --commit to actually delete")
+ return nil
+ }
+
+ // Actually delete
+ fmt.Println("\nDeleting...")
+ deleted := 0
+ for i, info := range toPrune {
+ printProgress(i+1, len(toPrune), "Deleting")
+ if _, err := git("update-ref", "-d", "refs/jci/"+info.Commit); err != nil {
+ fmt.Printf("\n Warning: failed to delete %s: %v\n", info.Commit[:12], err)
+ continue
+ }
+ deleted++
+ }
+ fmt.Println() // newline after progress
+
+ // Run gc to actually free space
+ fmt.Println("Running git gc...")
+ exec.Command("git", "gc", "--prune=now", "--quiet").Run()
+
+ fmt.Printf("\nDeleted %d CI result(s), freed approximately %s\n", deleted, formatSize(prunedSize))
+ return nil
+}
+
+func pruneRemote(opts *PruneOptions) error {
+ remote := opts.OnRemote
+
+ fmt.Printf("Fetching CI refs from %s...\n", remote)
+
+ // Get remote refs
+ out, err := git("ls-remote", remote, "refs/jci/*")
+ if err != nil {
+ return fmt.Errorf("failed to list remote refs: %v", err)
+ }
+
+ if out == "" {
+ fmt.Println("No CI results on remote")
+ return nil
+ }
+
+ lines := strings.Split(out, "\n")
+ var refInfos []RefInfo
+
+ fmt.Println("Scanning remote CI results...")
+ for i, line := range lines {
+ if line == "" {
+ continue
+ }
+ printProgress(i+1, len(lines), "Scanning")
+
+ parts := strings.Fields(line)
+ if len(parts) != 2 {
+ continue
+ }
+
+ refName := parts[1]
+ commit := strings.TrimPrefix(refName, "refs/jci/")
+
+ info := RefInfo{
+ Ref: refName,
+ Commit: commit,
+ }
+
+ // Fetch this specific ref to get its timestamp
+ // We need to fetch it temporarily to inspect it
+ exec.Command("git", "fetch", remote, refName+":"+refName, "--quiet").Run()
+
+ timeStr, err := git("log", "-1", "--format=%ci", refName)
+ if err == nil {
+ info.Timestamp, _ = time.Parse("2006-01-02 15:04:05 -0700", timeStr)
+ }
+
+ info.Size = getRefSize(refName)
+ refInfos = append(refInfos, info)
+ }
+ fmt.Println() // newline after progress
+
+ // Filter refs to prune
+ var toPrune []RefInfo
+ var prunedSize int64
+ var totalSize int64
+ now := time.Now()
+
+ for _, info := range refInfos {
+ totalSize += info.Size
+ shouldPrune := false
+
+ // Check age if --older-than specified
+ if opts.OlderThan > 0 && !info.Timestamp.IsZero() {
+ age := now.Sub(info.Timestamp)
+ if age > opts.OlderThan {
+ shouldPrune = true
+ }
+ }
+
+ if shouldPrune {
+ toPrune = append(toPrune, info)
+ prunedSize += info.Size
+ }
+ }
+
+ if len(toPrune) == 0 {
+ fmt.Println("Nothing to prune on remote")
+ fmt.Printf("Total remote CI data: %s\n", formatSize(totalSize))
+ return nil
+ }
+
+ // Show what will be pruned
+ fmt.Printf("\nFound %d ref(s) to prune on %s:\n", len(toPrune), remote)
+ for _, info := range toPrune {
+ age := ""
+ if !info.Timestamp.IsZero() {
+ age = fmt.Sprintf(" (age: %s)", formatAge(now.Sub(info.Timestamp)))
+ }
+ fmt.Printf(" %s %s%s\n", info.Commit[:12], formatSize(info.Size), age)
+ }
+
+ fmt.Printf("\nTotal to free on remote: %s (of %s total)\n", formatSize(prunedSize), formatSize(totalSize))
+
+ if !opts.Commit {
+ fmt.Println("\n[DRY RUN] Use --commit to actually delete from remote")
+ return nil
+ }
+
+ // Delete from remote using git push with delete refspec
+ fmt.Println("\nDeleting from remote...")
+ deleted := 0
+ for i, info := range toPrune {
+ printProgress(i+1, len(toPrune), "Deleting")
+ // Push empty ref to delete
+ _, err := git("push", remote, ":refs/jci/"+info.Commit)
+ if err != nil {
+ fmt.Printf("\n Warning: failed to delete %s: %v\n", info.Commit[:12], err)
+ continue
+ }
+ deleted++
+ }
+ fmt.Println() // newline after progress
+
+ fmt.Printf("\nDeleted %d CI result(s) from %s, freed approximately %s\n", deleted, remote, formatSize(prunedSize))
+ return nil
+}
+
+// getRefSize estimates the size of objects in a ref
+func getRefSize(ref string) int64 {
+ // Get the tree and estimate size
+ out, err := exec.Command("git", "rev-list", "--objects", ref).Output()
+ if err != nil {
+ return 0
+ }
+
+ var totalSize int64
+ for _, line := range strings.Split(string(out), "\n") {
+ if line == "" {
+ continue
+ }
+ parts := strings.Fields(line)
+ if len(parts) == 0 {
+ continue
+ }
+ obj := parts[0]
+ sizeOut, err := exec.Command("git", "cat-file", "-s", obj).Output()
+ if err == nil {
+ size, _ := strconv.ParseInt(strings.TrimSpace(string(sizeOut)), 10, 64)
+ totalSize += size
+ }
+ }
+ return totalSize
+}
+
+// printProgress prints a progress bar
+func printProgress(current, total int, label string) {
+ width := 30
+ percent := float64(current) / float64(total)
+ filled := int(percent * float64(width))
+
+ bar := strings.Repeat("█", filled) + strings.Repeat("░", width-filled)
+ fmt.Printf("\r%s [%s] %d/%d (%.0f%%)", label, bar, current, total, percent*100)
+}
+
+// formatSize formats bytes as human-readable
+func formatSize(bytes int64) string {
+ const unit = 1024
+ if bytes < unit {
+ return fmt.Sprintf("%d B", bytes)
+ }
+ div, exp := int64(unit), 0
+ for n := bytes / unit; n >= unit; n /= unit {
+ div *= unit
+ exp++
+ }
+ return fmt.Sprintf("%.1f %cB", float64(bytes)/float64(div), "KMGTPE"[exp])
+}
+
+// formatAge formats a duration as human-readable age
+func formatAge(d time.Duration) string {
+ days := int(d.Hours() / 24)
+ if days >= 365 {
+ years := days / 365
+ return fmt.Sprintf("%dy", years)
+ }
+ if days >= 30 {
+ months := days / 30
+ return fmt.Sprintf("%dmo", months)
+ }
+ if days >= 7 {
+ weeks := days / 7
+ return fmt.Sprintf("%dw", weeks)
+ }
+ if days > 0 {
+ return fmt.Sprintf("%dd", days)
+ }
+ hours := int(d.Hours())
+ if hours > 0 {
+ return fmt.Sprintf("%dh", hours)
+ }
+ return "<1h"
+}
diff --git a/internal/jci/pull.go b/internal/jci/pull.go
@@ -0,0 +1,24 @@
+package jci
+
+import (
+ "fmt"
+)
+
+// Pull fetches CI results from remote
+func Pull(args []string) error {
+ remote := "origin"
+ if len(args) > 0 {
+ remote = args[0]
+ }
+
+ fmt.Printf("Fetching CI results from %s...\n", remote)
+
+ // Fetch all refs/jci/* from remote
+ _, err := git("fetch", remote, "refs/jci/*:refs/jci/*")
+ if err != nil {
+ return err
+ }
+
+ fmt.Println("Done")
+ return nil
+}
diff --git a/internal/jci/push.go b/internal/jci/push.go
@@ -0,0 +1,34 @@
+package jci
+
+import (
+ "fmt"
+)
+
+// Push pushes CI results to remote
+func Push(args []string) error {
+ remote := "origin"
+ if len(args) > 0 {
+ remote = args[0]
+ }
+
+ refs, err := ListJCIRefs()
+ if err != nil {
+ return err
+ }
+
+ if len(refs) == 0 {
+ fmt.Println("No CI results to push")
+ return nil
+ }
+
+ fmt.Printf("Pushing %d CI result(s) to %s...\n", len(refs), remote)
+
+ // Push all refs/jci/* to remote
+ _, err = git("push", remote, "refs/jci/*:refs/jci/*")
+ if err != nil {
+ return err
+ }
+
+ fmt.Println("Done")
+ return nil
+}
diff --git a/internal/jci/run.go b/internal/jci/run.go
@@ -0,0 +1,183 @@
+package jci
+
+import (
+ "fmt"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "time"
+)
+
+// Run executes CI for the current commit
+func Run(args []string) error {
+ // Get current commit
+ commit, err := GetCurrentCommit()
+ if err != nil {
+ return fmt.Errorf("failed to get current commit: %w", err)
+ }
+
+ fmt.Printf("Running CI for commit %s\n", commit[:12])
+
+ // Check if CI already ran for this commit
+ ref := "refs/jci/" + commit
+ if RefExists(ref) {
+ fmt.Printf("CI results already exist for %s\n", commit[:12])
+ return nil
+ }
+
+ repoRoot, err := GetRepoRoot()
+ if err != nil {
+ return fmt.Errorf("failed to get repo root: %w", err)
+ }
+
+ // Check if .jci/run.sh exists
+ runScript := filepath.Join(repoRoot, ".jci", "run.sh")
+ if _, err := os.Stat(runScript); os.IsNotExist(err) {
+ return fmt.Errorf(".jci/run.sh not found - create it to define your CI pipeline")
+ }
+
+ // Create output directory .jci/<commit>
+ outputDir := filepath.Join(repoRoot, ".jci", commit)
+ if err := os.MkdirAll(outputDir, 0755); err != nil {
+ return fmt.Errorf("failed to create output dir: %w", err)
+ }
+
+ // Run CI
+ err = runCI(repoRoot, outputDir, commit)
+ // Continue even if CI fails - we still want to store the results
+
+ // Generate index.html with results
+ if err := generateIndexHTML(outputDir, commit, err); err != nil {
+ fmt.Printf("Warning: failed to generate index.html: %v\n", err)
+ }
+
+ // Store results in git
+ msg := fmt.Sprintf("CI results for %s", commit[:12])
+ if storeErr := StoreTree(outputDir, commit, msg); storeErr != nil {
+ return fmt.Errorf("failed to store CI results: %w", storeErr)
+ }
+
+ // Clean up the output directory after storing in git
+ os.RemoveAll(outputDir)
+
+ fmt.Printf("CI results stored at %s\n", ref)
+ if err != nil {
+ return fmt.Errorf("CI failed (results stored): %w", err)
+ }
+ return nil
+}
+
+// runCI executes .jci/run.sh and captures output
+func runCI(repoRoot string, outputDir string, commit string) error {
+ runScript := filepath.Join(repoRoot, ".jci", "run.sh")
+ outputFile := filepath.Join(outputDir, "run.output.txt")
+
+ // Create output file
+ f, err := os.Create(outputFile)
+ if err != nil {
+ return fmt.Errorf("failed to create output file: %w", err)
+ }
+ defer f.Close()
+
+ // Write header
+ fmt.Fprintf(f, "=== JCI Run Output ===\n")
+ fmt.Fprintf(f, "Commit: %s\n", commit)
+ fmt.Fprintf(f, "Started: %s\n", time.Now().Format(time.RFC3339))
+ fmt.Fprintf(f, "======================\n\n")
+
+ // Run the script
+ cmd := exec.Command("bash", runScript)
+ cmd.Dir = outputDir
+ cmd.Env = append(os.Environ(),
+ "JCI_COMMIT="+commit,
+ "JCI_REPO_ROOT="+repoRoot,
+ "JCI_OUTPUT_DIR="+outputDir,
+ )
+
+ // Capture both stdout and stderr to the same file
+ cmd.Stdout = f
+ cmd.Stderr = f
+
+ fmt.Printf("Executing .jci/run.sh...\n")
+ startTime := time.Now()
+ runErr := cmd.Run()
+ duration := time.Since(startTime)
+
+ // Write footer
+ fmt.Fprintf(f, "\n======================\n")
+ fmt.Fprintf(f, "Finished: %s\n", time.Now().Format(time.RFC3339))
+ fmt.Fprintf(f, "Duration: %s\n", duration.Round(time.Millisecond))
+ if runErr != nil {
+ fmt.Fprintf(f, "Exit: FAILED - %v\n", runErr)
+ } else {
+ fmt.Fprintf(f, "Exit: SUCCESS\n")
+ }
+
+ return runErr
+}
+
+// generateIndexHTML creates a minimal index.html for standalone viewing
+// The main UI is served by the web server; this is for direct file access
+func generateIndexHTML(outputDir string, commit string, ciErr error) error {
+ commitMsg, _ := git("log", "-1", "--format=%s", commit)
+
+ status := "success"
+ statusIcon := "✓ PASSED"
+ if ciErr != nil {
+ status = "failed"
+ statusIcon = "✗ FAILED"
+ }
+
+ // Read output for standalone view
+ outputContent := ""
+ outputFile := filepath.Join(outputDir, "run.output.txt")
+ if data, err := os.ReadFile(outputFile); err == nil {
+ outputContent = string(data)
+ }
+
+ html := fmt.Sprintf(`<!DOCTYPE html>
+<html>
+<head>
+ <meta charset="utf-8">
+ <title>%s %s</title>
+ <style>
+ body { font-family: monospace; font-size: 12px; background: #1a1a1a; color: #e0e0e0; padding: 8px; }
+ .header { margin-bottom: 8px; }
+ .%s { color: %s; font-weight: bold; }
+ pre { white-space: pre-wrap; }
+ </style>
+</head>
+<body>
+ <div class="header">
+ <span class="%s">%s</span> %s %s
+ </div>
+ <pre>%s</pre>
+</body>
+</html>
+`, commit[:7], escapeHTML(commitMsg),
+ status, map[string]string{"success": "#3fb950", "failed": "#f85149"}[status],
+ status, statusIcon, commit[:7], escapeHTML(commitMsg),
+ escapeHTML(outputContent))
+
+ indexPath := filepath.Join(outputDir, "index.html")
+ return os.WriteFile(indexPath, []byte(html), 0644)
+}
+
+func escapeHTML(s string) string {
+ replacer := map[rune]string{
+ '<': "<",
+ '>': ">",
+ '&': "&",
+ '"': """,
+ '\'': "'",
+ }
+ result := ""
+ for _, r := range s {
+ if rep, ok := replacer[r]; ok {
+ result += rep
+ } else {
+ result += string(r)
+ }
+ }
+ return result
+}
diff --git a/internal/jci/web.go b/internal/jci/web.go
@@ -0,0 +1,640 @@
+package jci
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "strings"
+)
+
+// BranchInfo holds branch data for the UI
+type BranchInfo struct {
+ Name string `json:"name"`
+ IsRemote bool `json:"isRemote"`
+ Commits []CommitInfo `json:"commits"`
+}
+
+// CommitInfo holds commit data for the UI
+type CommitInfo struct {
+ Hash string `json:"hash"`
+ ShortHash string `json:"shortHash"`
+ Message string `json:"message"`
+ HasCI bool `json:"hasCI"`
+ CIStatus string `json:"ciStatus"` // "success", "failed", or ""
+ CIPushed bool `json:"ciPushed"` // whether CI ref is pushed to remote
+}
+
+// Web starts a web server to view CI results
+func Web(args []string) error {
+ port := "8000"
+ if len(args) > 0 {
+ port = args[0]
+ }
+
+ repoRoot, err := GetRepoRoot()
+ if err != nil {
+ return err
+ }
+
+ fmt.Printf("Starting JCI web server on http://localhost:%s\n", port)
+ fmt.Println("Press Ctrl+C to stop")
+
+ http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
+ handleRequest(w, r, repoRoot)
+ })
+
+ return http.ListenAndServe(":"+port, nil)
+}
+
+func handleRequest(w http.ResponseWriter, r *http.Request, repoRoot string) {
+ path := r.URL.Path
+
+ // Root or /jci/... without file: show main SPA
+ if path == "/" || (strings.HasPrefix(path, "/jci/") && !strings.Contains(strings.TrimPrefix(path, "/jci/"), ".")) {
+ showMainPage(w, r)
+ return
+ }
+
+ // API endpoint for branch data
+ if path == "/api/branches" {
+ serveBranchesAPI(w)
+ return
+ }
+
+ // API endpoint for commit info
+ if strings.HasPrefix(path, "/api/commit/") {
+ commit := strings.TrimPrefix(path, "/api/commit/")
+ serveCommitAPI(w, commit)
+ return
+ }
+
+ // /jci/<commit>/<file> - serve files from that commit's CI results
+ if strings.HasPrefix(path, "/jci/") {
+ parts := strings.SplitN(strings.TrimPrefix(path, "/jci/"), "/", 2)
+ commit := parts[0]
+ filePath := ""
+ if len(parts) > 1 {
+ filePath = parts[1]
+ }
+ if filePath == "" {
+ showMainPage(w, r)
+ return
+ }
+ serveFromRef(w, commit, filePath)
+ return
+ }
+
+ http.NotFound(w, r)
+}
+
+// getLocalBranches returns local branch names
+func getLocalBranches() ([]string, error) {
+ out, err := git("branch", "--format=%(refname:short)")
+ if err != nil {
+ return nil, err
+ }
+ if out == "" {
+ return nil, nil
+ }
+ return strings.Split(out, "\n"), nil
+}
+
+// getRemoteJCIRefs returns a set of commits that have CI refs pushed to remote
+func getRemoteJCIRefs(remote string) map[string]bool {
+ remoteCI := make(map[string]bool)
+
+ // Get remote JCI refs
+ out, err := git("ls-remote", "--refs", remote, "refs/jci/*")
+ if err != nil {
+ return remoteCI
+ }
+ if out == "" {
+ return remoteCI
+ }
+
+ for _, line := range strings.Split(out, "\n") {
+ parts := strings.Fields(line)
+ if len(parts) >= 2 {
+ // refs/jci/<commit> -> <commit>
+ ref := parts[1]
+ commit := strings.TrimPrefix(ref, "refs/jci/")
+ remoteCI[commit] = true
+ }
+ }
+ return remoteCI
+}
+
+// getBranchCommits returns recent commits for a branch
+func getBranchCommits(branch string, limit int) ([]CommitInfo, error) {
+ // Get commit hash and message
+ out, err := git("log", branch, fmt.Sprintf("--max-count=%d", limit), "--format=%H|%s")
+ if err != nil {
+ return nil, err
+ }
+ if out == "" {
+ return nil, nil
+ }
+
+ // Get local JCI refs
+ jciRefs, _ := ListJCIRefs()
+ jciSet := make(map[string]bool)
+ for _, ref := range jciRefs {
+ commit := strings.TrimPrefix(ref, "jci/")
+ jciSet[commit] = true
+ }
+
+ // Get remote JCI refs for CI push status
+ remoteCI := getRemoteJCIRefs("origin")
+
+ var commits []CommitInfo
+ for _, line := range strings.Split(out, "\n") {
+ parts := strings.SplitN(line, "|", 2)
+ if len(parts) != 2 {
+ continue
+ }
+ hash := parts[0]
+ msg := parts[1]
+
+ commit := CommitInfo{
+ Hash: hash,
+ ShortHash: hash[:7],
+ Message: msg,
+ HasCI: jciSet[hash],
+ CIPushed: remoteCI[hash],
+ }
+
+ if commit.HasCI {
+ commit.CIStatus = getCIStatus(hash)
+ }
+
+ commits = append(commits, commit)
+ }
+
+ return commits, nil
+}
+
+// getCIStatus returns "success" or "failed" based on CI results
+func getCIStatus(commit string) string {
+ // Try to read the index.html and look for status
+ ref := "refs/jci/" + commit
+ cmd := exec.Command("git", "show", ref+":index.html")
+ out, err := cmd.Output()
+ if err != nil {
+ return ""
+ }
+
+ content := string(out)
+ if strings.Contains(content, "class=\"status success\"") {
+ return "success"
+ }
+ if strings.Contains(content, "class=\"status failed\"") {
+ return "failed"
+ }
+ return ""
+}
+
+// CommitDetail holds detailed commit info for the API
+type CommitDetail struct {
+ Hash string `json:"hash"`
+ Author string `json:"author"`
+ Date string `json:"date"`
+ Status string `json:"status"`
+ Files []string `json:"files"`
+}
+
+// serveCommitAPI returns commit details and file list
+func serveCommitAPI(w http.ResponseWriter, commit string) {
+ ref := "refs/jci/" + commit
+ if !RefExists(ref) {
+ http.Error(w, "not found", 404)
+ return
+ }
+
+ // Get commit info
+ author, _ := git("log", "-1", "--format=%an", commit)
+ date, _ := git("log", "-1", "--format=%cr", commit)
+ status := getCIStatus(commit)
+
+ // List files in the CI ref
+ filesOut, err := git("ls-tree", "--name-only", ref)
+ var files []string
+ if err == nil && filesOut != "" {
+ for _, f := range strings.Split(filesOut, "\n") {
+ if f != "" && f != "index.html" {
+ files = append(files, f)
+ }
+ }
+ }
+
+ detail := CommitDetail{
+ Hash: commit,
+ Author: author,
+ Date: date,
+ Status: status,
+ Files: files,
+ }
+
+ w.Header().Set("Content-Type", "application/json")
+ json.NewEncoder(w).Encode(detail)
+}
+
+// serveBranchesAPI returns branch/commit data as JSON
+func serveBranchesAPI(w http.ResponseWriter) {
+ branches, err := getLocalBranches()
+ if err != nil {
+ http.Error(w, err.Error(), 500)
+ return
+ }
+
+ var branchInfos []BranchInfo
+ for _, branch := range branches {
+ commits, err := getBranchCommits(branch, 20)
+ if err != nil {
+ continue
+ }
+ branchInfos = append(branchInfos, BranchInfo{
+ Name: branch,
+ IsRemote: false,
+ Commits: commits,
+ })
+ }
+
+ w.Header().Set("Content-Type", "application/json")
+ json.NewEncoder(w).Encode(branchInfos)
+}
+
+func showMainPage(w http.ResponseWriter, r *http.Request) {
+ w.Header().Set("Content-Type", "text/html")
+ fmt.Fprint(w, `<!DOCTYPE html>
+<html>
+<head>
+ <meta charset="utf-8">
+ <meta name="viewport" content="width=device-width, initial-scale=1">
+ <title>JCI</title>
+ <style>
+ * { box-sizing: border-box; margin: 0; padding: 0; }
+ body {
+ font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, monospace;
+ font-size: 12px;
+ background: #1a1a1a;
+ color: #e0e0e0;
+ display: flex;
+ height: 100vh;
+ overflow: hidden;
+ }
+ a { color: #58a6ff; text-decoration: none; }
+ a:hover { text-decoration: underline; }
+
+ /* Left panel - commits */
+ .commits-panel {
+ width: 240px;
+ background: #1e1e1e;
+ border-right: 1px solid #333;
+ display: flex;
+ flex-direction: column;
+ flex-shrink: 0;
+ }
+ .panel-header {
+ padding: 6px 8px;
+ background: #252525;
+ border-bottom: 1px solid #333;
+ display: flex;
+ align-items: center;
+ gap: 6px;
+ }
+ .panel-header h1 { font-size: 12px; font-weight: 600; color: #888; }
+ .branch-selector {
+ flex: 1;
+ padding: 2px 4px;
+ font-size: 11px;
+ border: 1px solid #444;
+ border-radius: 3px;
+ background: #2a2a2a;
+ color: #fff;
+ }
+ .commit-list {
+ list-style: none;
+ overflow-y: auto;
+ flex: 1;
+ }
+ .commit-item {
+ padding: 3px 6px;
+ cursor: pointer;
+ display: flex;
+ align-items: center;
+ gap: 4px;
+ border-bottom: 1px solid #252525;
+ }
+ .commit-item:hover { background: #2a2a2a; }
+ .commit-item.selected { background: #2d4a3e; }
+ .commit-item.no-ci { opacity: 0.5; }
+ .ci-dot { width: 6px; height: 6px; border-radius: 50%; flex-shrink: 0; }
+ .ci-dot.success { background: #3fb950; }
+ .ci-dot.failed { background: #f85149; }
+ .ci-dot.none { background: #484f58; }
+ .commit-hash { font-size: 10px; color: #58a6ff; flex-shrink: 0; }
+ .commit-msg { flex: 1; white-space: nowrap; overflow: hidden; text-overflow: ellipsis; color: #888; font-size: 11px; }
+ .ci-push-badge { font-size: 8px; color: #666; }
+ .ci-push-badge.pushed { color: #3fb950; }
+
+ /* Middle panel - files */
+ .files-panel {
+ width: 180px;
+ background: #1e1e1e;
+ border-right: 1px solid #333;
+ display: flex;
+ flex-direction: column;
+ flex-shrink: 0;
+ }
+ .files-panel.hidden { display: none; }
+ .commit-info {
+ padding: 6px 8px;
+ background: #252525;
+ border-bottom: 1px solid #333;
+ font-size: 11px;
+ }
+ .commit-info .status { font-weight: 600; }
+ .commit-info .status.success { color: #3fb950; }
+ .commit-info .status.failed { color: #f85149; }
+ .commit-info .hash { color: #58a6ff; }
+ .commit-info .meta { color: #666; margin-top: 2px; }
+ .file-list {
+ list-style: none;
+ overflow-y: auto;
+ flex: 1;
+ }
+ .file-item {
+ padding: 3px 8px;
+ cursor: pointer;
+ border-bottom: 1px solid #252525;
+ white-space: nowrap;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ font-size: 11px;
+ }
+ .file-item:hover { background: #2a2a2a; }
+ .file-item.selected { background: #2d4a3e; }
+
+ /* Right panel - content */
+ .content-panel {
+ flex: 1;
+ display: flex;
+ flex-direction: column;
+ min-width: 0;
+ background: #1a1a1a;
+ }
+ .content-header {
+ padding: 4px 8px;
+ background: #252525;
+ border-bottom: 1px solid #333;
+ font-size: 11px;
+ color: #888;
+ }
+ .content-body {
+ flex: 1;
+ overflow: auto;
+ }
+ .content-body pre {
+ padding: 8px;
+ font-family: "Monaco", "Menlo", monospace;
+ font-size: 11px;
+ line-height: 1.4;
+ white-space: pre-wrap;
+ word-wrap: break-word;
+ }
+ .content-body iframe {
+ width: 100%;
+ height: 100%;
+ border: none;
+ background: #fff;
+ }
+ .empty-state {
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ height: 100%;
+ color: #666;
+ }
+
+ @media (max-width: 700px) {
+ .commits-panel { width: 180px; }
+ .files-panel { width: 140px; }
+ }
+ </style>
+</head>
+<body>
+ <div class="commits-panel">
+ <div class="panel-header">
+ <h1>JCI</h1>
+ <select class="branch-selector" id="branchSelect"></select>
+ </div>
+ <ul class="commit-list" id="commitList"></ul>
+ </div>
+ <div class="files-panel hidden" id="filesPanel">
+ <div class="commit-info" id="commitInfo"></div>
+ <ul class="file-list" id="fileList"></ul>
+ </div>
+ <div class="content-panel">
+ <div class="content-header" id="contentHeader"></div>
+ <div class="content-body" id="contentBody">
+ <div class="empty-state">Select a commit</div>
+ </div>
+ </div>
+
+ <script>
+ let branches = [], currentCommit = null, currentFiles = [], currentFile = null;
+
+ async function loadBranches() {
+ const res = await fetch('/api/branches');
+ branches = await res.json() || [];
+ const select = document.getElementById('branchSelect');
+ select.innerHTML = branches.map(b => '<option value="' + b.name + '">' + b.name + '</option>').join('');
+ const def = branches.find(b => b.name === 'main') || branches[0];
+ if (def) { select.value = def.name; showBranch(def.name); }
+
+ // Check URL for initial commit
+ const m = location.pathname.match(/^\/jci\/([a-f0-9]+)/);
+ if (m) selectCommitByHash(m[1]);
+ }
+
+ function showBranch(name) {
+ const branch = branches.find(b => b.name === name);
+ if (!branch) return;
+ const list = document.getElementById('commitList');
+ list.innerHTML = (branch.commits || []).map(c => {
+ const status = c.hasCI ? c.ciStatus : 'none';
+ const pushIcon = c.hasCI ? (c.ciPushed ? '↑' : '○') : '';
+ const pushClass = c.ciPushed ? 'pushed' : '';
+ const noCiClass = c.hasCI ? '' : 'no-ci';
+ return '<li class="commit-item ' + noCiClass + '" data-hash="' + c.hash + '" data-hasci="' + c.hasCI + '">' +
+ '<span class="ci-dot ' + status + '"></span>' +
+ '<span class="commit-hash">' + c.shortHash + '</span>' +
+ '<span class="commit-msg">' + escapeHtml(c.message) + '</span>' +
+ '<span class="ci-push-badge ' + pushClass + '">' + pushIcon + '</span></li>';
+ }).join('');
+ list.querySelectorAll('.commit-item').forEach(el => {
+ el.onclick = () => selectCommit(el.dataset.hash, el.dataset.hasci === 'true');
+ });
+ }
+
+ function selectCommitByHash(hash) {
+ // Find full hash from branches
+ for (const b of branches) {
+ const c = (b.commits || []).find(c => c.hash.startsWith(hash));
+ if (c) { selectCommit(c.hash, c.hasCI); return; }
+ }
+ }
+
+ async function selectCommit(hash, hasCI) {
+ currentCommit = hash;
+ document.querySelectorAll('.commit-item').forEach(el =>
+ el.classList.toggle('selected', el.dataset.hash === hash)
+ );
+
+ const filesPanel = document.getElementById('filesPanel');
+ const contentBody = document.getElementById('contentBody');
+ const contentHeader = document.getElementById('contentHeader');
+
+ if (!hasCI) {
+ filesPanel.classList.add('hidden');
+ contentHeader.textContent = '';
+ contentBody.innerHTML = '<div class="empty-state">No CI results. Run: git jci run</div>';
+ history.pushState(null, '', '/');
+ return;
+ }
+
+ filesPanel.classList.remove('hidden');
+ history.pushState(null, '', '/jci/' + hash + '/');
+
+ // Load commit info and files
+ try {
+ const infoRes = await fetch('/api/commit/' + hash);
+ const info = await infoRes.json();
+
+ document.getElementById('commitInfo').innerHTML =
+ '<div><span class="status ' + info.status + '">' + (info.status === 'success' ? '✓' : '✗') + '</span> ' +
+ '<span class="hash">' + hash.slice(0,7) + '</span></div>' +
+ '<div class="meta">' + escapeHtml(info.author) + ' · ' + escapeHtml(info.date) + '</div>';
+
+ currentFiles = info.files || [];
+ const fileList = document.getElementById('fileList');
+ fileList.innerHTML = currentFiles.map(f =>
+ '<li class="file-item" data-file="' + f + '">' + f + '</li>'
+ ).join('');
+ fileList.querySelectorAll('.file-item').forEach(el => {
+ el.onclick = () => loadFile(el.dataset.file);
+ });
+
+ // Load default file
+ const defaultFile = currentFiles.find(f => f === 'run.output.txt') || currentFiles[0];
+ if (defaultFile) loadFile(defaultFile);
+ } catch (e) {
+ contentBody.innerHTML = '<div class="empty-state">Failed to load</div>';
+ }
+ }
+
+ function loadFile(name) {
+ currentFile = name;
+ document.querySelectorAll('.file-item').forEach(el =>
+ el.classList.toggle('selected', el.dataset.file === name)
+ );
+
+ const contentHeader = document.getElementById('contentHeader');
+ const contentBody = document.getElementById('contentBody');
+ contentHeader.textContent = name;
+
+ history.pushState(null, '', '/jci/' + currentCommit + '/' + name);
+
+ const ext = name.split('.').pop().toLowerCase();
+ const textExts = ['txt', 'log', 'sh', 'go', 'py', 'js', 'json', 'yaml', 'yml', 'md', 'css', 'xml', 'toml', 'ini', 'conf'];
+ const url = '/jci/' + currentCommit + '/' + name;
+
+ if (ext === 'html' || ext === 'htm') {
+ contentBody.innerHTML = '<iframe src="' + url + '"></iframe>';
+ } else if (textExts.includes(ext) || !name.includes('.')) {
+ fetch(url).then(r => r.text()).then(text => {
+ contentBody.innerHTML = '<pre>' + escapeHtml(text) + '</pre>';
+ });
+ } else {
+ contentBody.innerHTML = '<div class="empty-state"><a href="' + url + '" download>Download ' + name + '</a></div>';
+ }
+ }
+
+ function escapeHtml(t) {
+ if (!t) return '';
+ return t.replace(/&/g,'&').replace(/</g,'<').replace(/>/g,'>');
+ }
+
+ window.onpopstate = () => {
+ const m = location.pathname.match(/^\/jci\/([a-f0-9]+)(?:\/(.+))?/);
+ if (m) {
+ if (m[1] !== currentCommit) selectCommitByHash(m[1]);
+ else if (m[2] && m[2] !== currentFile) loadFile(m[2]);
+ }
+ };
+
+ document.getElementById('branchSelect').onchange = e => showBranch(e.target.value);
+ loadBranches();
+ </script>
+</body>
+</html>
+`)
+}
+
+func serveFromRef(w http.ResponseWriter, commit string, filePath string) {
+ ref := "refs/jci/" + commit
+ if !RefExists(ref) {
+ http.Error(w, "CI results not found for commit: "+commit, 404)
+ return
+ }
+
+ // Use git show to get file content from the ref
+ cmd := exec.Command("git", "show", ref+":"+filePath)
+ out, err := cmd.Output()
+ if err != nil {
+ http.Error(w, "File not found: "+filePath, 404)
+ return
+ }
+
+ // Set content type based on extension
+ ext := filepath.Ext(filePath)
+ switch ext {
+ case ".html":
+ w.Header().Set("Content-Type", "text/html")
+ case ".css":
+ w.Header().Set("Content-Type", "text/css")
+ case ".js":
+ w.Header().Set("Content-Type", "application/javascript")
+ case ".json":
+ w.Header().Set("Content-Type", "application/json")
+ case ".txt":
+ w.Header().Set("Content-Type", "text/plain")
+ default:
+ // Binary files (executables, etc.)
+ w.Header().Set("Content-Type", "application/octet-stream")
+ w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%q", filepath.Base(filePath)))
+ }
+
+ w.Write(out)
+}
+
+// extractRef extracts files from a ref to a temp directory (not used currently but useful)
+func extractRef(ref string) (string, error) {
+ tmpDir, err := os.MkdirTemp("", "jci-view-*")
+ if err != nil {
+ return "", err
+ }
+
+ cmd := exec.Command("git", "archive", ref)
+ tar := exec.Command("tar", "-xf", "-", "-C", tmpDir)
+
+ tar.Stdin, _ = cmd.StdoutPipe()
+ tar.Start()
+ cmd.Run()
+ tar.Wait()
+
+ return tmpDir, nil
+}
diff --git a/jaypore_ci/__init__.py b/jaypore_ci/__init__.py
diff --git a/jaypore_ci/__main__.py b/jaypore_ci/__main__.py
diff --git a/jaypore_ci/changelog.py b/jaypore_ci/changelog.py
@@ -1,84 +0,0 @@
-from jaypore_ci.config import Version
-
-V = Version.parse
-NEW = "🎁"
-CHANGE = "⚙️"
-BUGFIX = "🐞"
-
-version_map = {
- V("0.2.31"): {
- "changes": [
- (
- f"{NEW}: Old networks will also be removed automatically for "
- "jobs that are older than a week."
- ),
- ],
- "instructions": [],
- },
- V("0.2.30"): {
- "changes": [
- (
- f"{NEW}: You can pass arbitrary arguments to the `docker run` "
- "command simply by using the `executor_kwargs` argument while "
- "defining the job. Read more in `Passing extra_hosts and other "
- "arguments to docker`_."
- ),
- f"{NEW}: SSH remotes are now compatible with Jaypore CI.",
- ],
- "instructions": [],
- },
- V("0.2.29"): {
- "changes": [
- (
- f"{BUGFIX}: When gitea token does not have enough scope log"
- " correctly and exit"
- )
- ],
- "instructions": [],
- },
- V("0.2.28"): {
- "changes": [
- (
- f"{BUGFIX}: When there are multiple (push) remotes, Jaypore CI"
- " will pick the first one and use that."
- )
- ],
- "instructions": [],
- },
- V("0.2.27"): {
- "changes": [
- (
- f"{NEW}: Jobs older than 1 week will be removed before starting"
- " a new pipeline."
- )
- ],
- "instructions": [],
- },
- V("0.2.26"): {
- "changes": [
- (
- f"{CHANGE}: The Dockerfile inside `cicd/Dockerfile` now"
- " requires a build arg that specifies the version of Jaypore CI"
- " to install."
- ),
- ],
- "instructions": [
- "Please run the Jaypore CI setup once again.",
- ],
- },
- V("0.2.25"): {
- "changes": [
- (
- f"{NEW}: A dockerfile is now used to send context of the"
- " codebase to the docker daemon instead of directly mounting the"
- " code. This allows us to easily use remote systems for jobs"
- )
- ],
- "instructions": [],
- },
-}
-assert all(
- line.startswith(NEW) or line.startswith(CHANGE) or line.startswith(BUGFIX)
- for log in version_map.values()
- for line in log["changes"]
-), "All change lines must start with one of NEW/CHANGE/BUGFIX"
diff --git a/jaypore_ci/clean.py b/jaypore_ci/clean.py
@@ -1,9 +0,0 @@
-allowed_alphabet = "abcdefghijklmnopqrstuvwxyz1234567890"
-allowed_alphabet += allowed_alphabet.upper()
-
-
-def name(given):
- """
- Clean a given name so that it can be used inside of JCI.
- """
- return "".join(l if l in allowed_alphabet else "-" for l in given)
diff --git a/jaypore_ci/config.py b/jaypore_ci/config.py
@@ -1,59 +0,0 @@
-import os
-import tomllib
-from typing import NamedTuple
-import importlib.metadata
-from pathlib import Path
-
-
-class Version(NamedTuple):
- major: int
- minor: int
- patch: int
- trail: str = None
-
- def __repr__(self):
- if self.trail:
- return f"{self.major}.{self.minor}.{self.patch}-{self.trail}"
- return f"{self.major}.{self.minor}.{self.patch}"
-
- def __str__(self):
- return self.__repr__()
-
- @classmethod
- def parse(cls, inp: str) -> "Version":
- if inp is None or inp == "":
- return None
- trail = None
- major, minor, patch = inp.split(".")
- major = major[1:] if major[0].lower() == "v" else major
- assert major.isdigit()
- assert minor.isdigit()
- if "-" in patch:
- patch, trail = patch.split("-", 1)
- assert patch.isdigit()
- return cls(major=int(major), minor=int(minor), patch=int(patch), trail=trail)
-
-
-def get_version() -> Version:
- try:
- return Version.parse(importlib.metadata.version(__package__ or __name__))
- except importlib.metadata.PackageNotFoundError:
- try:
- with open(
- (Path(__file__) / "../../pyproject.toml").resolve(),
- "rb",
- ) as fl:
- data = tomllib.load(fl)
- return Version.parse(data["tool"]["poetry"]["version"])
- except FileNotFoundError:
- return None
-
-
-class Const(NamedTuple):
- expected_version: Version = Version.parse(
- os.environ.get("EXPECTED_JAYPORECI_VERSION")
- )
- version: Version = get_version()
-
-
-const = Const()
diff --git a/jaypore_ci/exceptions.py b/jaypore_ci/exceptions.py
@@ -1,5 +0,0 @@
-class BadConfig(Exception):
- """
- Raised when a given configuration for a pipeline will cause errors /
- unexpected behaviour if it is allowed to run.
- """
diff --git a/jaypore_ci/executors/__init__.py b/jaypore_ci/executors/__init__.py
@@ -1 +0,0 @@
-from .docker import Docker
diff --git a/jaypore_ci/executors/docker.py b/jaypore_ci/executors/docker.py
@@ -1,223 +0,0 @@
-"""
-A docker executor for Jaypore CI.
-"""
-from copy import deepcopy
-
-import pendulum
-import docker
-from rich import print as rprint
-from tqdm import tqdm
-
-from jaypore_ci import clean
-from jaypore_ci.interfaces import Executor, TriggerFailed, JobStatus
-from jaypore_ci.logging import logger
-
-
-class Docker(Executor):
- """
- Run jobs via docker. To communicate with docker we use the `Python docker
- sdk <https://docker-py.readthedocs.io/en/stable/client.html>`_.
-
- Using this executor will:
- - Create a separate network for each run
- - Run jobs as part of the network
- - Clean up all jobs when the pipeline exits.
- """
-
- def __init__(self):
- super().__init__()
- self.pipe_id = None
- self.pipeline = None
- self.docker = docker.from_env()
- self.client = docker.APIClient()
- self.__execution_order__ = []
-
- def logging(self):
- """
- Returns a logging instance that has executor specific
- information bound to it.
- """
- return logger.bind(pipe_id=self.pipe_id, network_name=self.get_net())
-
- def set_pipeline(self, pipeline):
- """
- Set executor's pipeline to the given one.
-
- This will clean up old networks and create new ones.
- """
- if self.pipe_id is not None:
- self.delete_network()
- self.delete_all_jobs()
- self.pipe_id = pipeline.pipe_id
- self.pipeline = pipeline
- self.create_network()
-
- def teardown(self):
- self.delete_network()
- self.delete_all_jobs()
-
- def setup(self):
- self.delete_old_containers()
-
- def delete_old_containers(self):
- a_week_back = pendulum.now().subtract(days=7)
- pipe_ids_removed = set()
- for container in tqdm(
- self.docker.containers.list(filters={"status": "exited"}),
- desc="Removing jobs older than a week",
- ):
- if "jayporeci_" not in container.name:
- continue
- if "__job__" in container.name:
- pipe_ids_removed.add(
- container.name.split("__job__")[1].split("__", 1)[0]
- )
- finished_at = pendulum.parse(container.attrs["State"]["FinishedAt"])
- if finished_at <= a_week_back:
- container.remove(v=True)
- for network in tqdm(
- self.docker.networks.list(
- names=[self.get_net(pipe_id=pipe_id) for pipe_id in pipe_ids_removed]
- ),
- desc="Removing related networks",
- ):
- network.remove()
-
- def get_net(self, *, pipe_id=None):
- """
- Return a network name based on what the curent pipeline is.
- """
- pipe_id = pipe_id if pipe_id is not None else self.pipe_id
- return f"jayporeci__net__{pipe_id}" if pipe_id is not None else None
-
- def create_network(self):
- """
- Will create a docker network.
-
- If it fails to do so in 3 attempts it will raise an
- exception and fail.
- """
- assert self.pipe_id is not None, "Cannot create network if pipe is not set"
- for _ in range(3):
- if len(self.docker.networks.list(names=[self.get_net()])) != 0:
- self.logging().info("Found network", network_name=self.get_net())
- return
- self.logging().info(
- "Create network",
- subprocess=self.docker.networks.create(
- name=self.get_net(), driver="bridge"
- ),
- )
- raise TriggerFailed("Cannot create network")
-
- def delete_all_jobs(self):
- """
- Deletes all jobs associated with the pipeline for this
- executor.
-
- It will stop any jobs that are still running.
- """
- assert self.pipe_id is not None, "Cannot delete jobs if pipe is not set"
- job = None
- for job in self.pipeline.jobs.values():
- if job.run_id is not None and not job.run_id.startswith("pyrun_"):
- container = self.docker.containers.get(job.run_id)
- container.stop(timeout=1)
- self.logging().info("Stop job:", run_id=job.run_id)
- job.check_job(with_update_report=False)
- if job is not None:
- job.check_job()
- self.logging().info("All jobs stopped")
-
- def delete_network(self):
- """
- Delete the network for this executor.
- """
- assert self.pipe_id is not None, "Cannot delete network if pipe is not set"
- try:
- net = self.docker.networks.get(self.get_net())
- net.remove()
- except docker.errors.NotFound:
- self.logging().error("Delete network: Not found", netid=self.get_net())
-
- def get_job_name(self, job, tail=False):
- """
- Generates a clean job name slug.
- """
- name = clean.name(job.name)
- if tail:
- return name
- return f"jayporeci__job__{self.pipe_id}__{name}"
-
- def run(self, job: "Job") -> str:
- """
- Run the given job and return a docker container ID.
- In case something goes wrong it will raise TriggerFailed
- """
- assert self.pipe_id is not None, "Cannot run job if pipe id is not set"
- ex_kwargs = deepcopy(job.executor_kwargs)
- env = job.get_env()
- env.update(ex_kwargs.pop("environment", {}))
- trigger = {
- "detach": True,
- "environment": env,
- "volumes": list(
- set(
- [
- "/var/run/docker.sock:/var/run/docker.sock",
- "/usr/bin/docker:/usr/bin/docker:ro",
- "/tmp/jayporeci__cidfiles:/jaypore_ci/cidfiles:ro",
- f"/tmp/jayporeci__src__{self.pipeline.remote.sha}:/jaypore_ci/run",
- ]
- + (ex_kwargs.pop("volumes", []))
- )
- ),
- "name": self.get_job_name(job),
- "network": self.get_net(),
- "image": job.image,
- "command": job.command if not job.is_service else None,
- }
- for key, value in ex_kwargs.items():
- if key in trigger:
- self.logging().warning(
- f"Overwriting existing value of `{key}` for job trigger.",
- old_value=trigger[key],
- new_value=value,
- )
- trigger[key] = value
- if not job.is_service:
- trigger["working_dir"] = "/jaypore_ci/run"
- if not job.is_service:
- assert job.command
- rprint(trigger)
- try:
- container = self.docker.containers.run(**trigger)
- self.__execution_order__.append(
- (self.get_job_name(job, tail=True), container.id, "Run")
- )
- return container.id
- except docker.errors.APIError as e:
- self.logging().exception(e)
- raise TriggerFailed(e) from e
-
- def get_status(self, run_id: str) -> JobStatus:
- """
- Given a run_id, it will get the status for that run.
- """
- inspect = self.client.inspect_container(run_id)
- status = JobStatus(
- is_running=inspect["State"]["Running"],
- exit_code=int(inspect["State"]["ExitCode"]),
- logs="",
- started_at=pendulum.parse(inspect["State"]["StartedAt"]),
- finished_at=pendulum.parse(inspect["State"]["FinishedAt"])
- if inspect["State"]["FinishedAt"] != "0001-01-01T00:00:00Z"
- else None,
- )
- # --- logs
- self.logging().debug("Check status", status=status)
- logs = self.docker.containers.get(run_id).logs().decode()
- return status._replace(logs=logs)
-
- def get_execution_order(self):
- return {name: i for i, (name, *_) in enumerate(self.__execution_order__)}
diff --git a/jaypore_ci/interfaces.py b/jaypore_ci/interfaces.py
@@ -1,202 +0,0 @@
-"""
-Defines interfaces for remotes and executors.
-
-Currently only gitea and docker are supported as remote and executor
-respectively.
-"""
-from enum import Enum
-from pathlib import Path
-from urllib.parse import urlparse
-from typing import NamedTuple, List
-
-
-class TriggerFailed(Exception):
- "Failure to trigger a job"
-
-
-class RemoteApiFailed(Exception):
- "Failure while working with a remote"
-
-
-class JobStatus(NamedTuple):
- is_running: bool
- exit_code: int
- logs: str
- started_at: str
- finished_at: str
-
-
-class Status(Enum):
- "Each pipeline can ONLY be in any one of these statuses"
- PENDING = 10
- RUNNING = 30
- FAILED = 40
- PASSED = 50
- TIMEOUT = 60
- SKIPPED = 70
-
-
-class RemoteInfo(NamedTuple):
- """
- Holds information about the remote irrespective of if the remote was ssh or
- https.
- """
-
- netloc: str
- owner: str
- repo: str
- original: str
-
- @classmethod
- def parse(cls, remote: str) -> "RemoteInfo":
- """
- Given a git remote url string, parses and breaks down information
- contained in the url.
-
- Works with the following formats:
-
- ssh://git@gitea.arjoonn.com:arjoonn/jaypore_ci.git
- ssh+git://git@gitea.arjoonn.com:arjoonn/jaypore_ci.git
-
- git@gitea.arjoonn.com:arjoonn/jaypore_ci.git
- git@gitea.arjoonn.com:arjoonn/jaypore_ci.git
-
- https://gitea.arjoonn.com/midpath/jaypore_ci.git
- http://gitea.arjoonn.com/midpath/jaypore_ci.git
- """
- original = remote
- if (
- ("ssh://" in remote or "ssh+git://" in remote or "://" not in remote)
- and "@" in remote
- and remote.endswith(".git")
- ):
- _, remote = remote.split("@")
- netloc, path = remote.split(":")
- owner, repo = path.split("/")
- return RemoteInfo(
- netloc=netloc,
- owner=owner,
- repo=repo.replace(".git", ""),
- original=original,
- )
- url = urlparse(remote)
- return RemoteInfo(
- netloc=url.netloc,
- owner=Path(url.path).parts[1],
- repo=Path(url.path).parts[2].replace(".git", ""),
- original=original,
- )
-
-
-class Repo:
- """
- Contains information about the current VCS repo.
- """
-
- def __init__(self, sha: str, branch: str, remote: str, commit_message: str):
- self.sha: str = sha
- self.branch: str = branch
- self.remote: str = remote
- self.commit_message: str = commit_message
-
- def files_changed(self, target: str) -> List[str]:
- """
- Returns list of file paths that have changed between current sha and
- target.
- """
- raise NotImplementedError()
-
- @classmethod
- def from_env(cls) -> "Repo":
- """
- Creates a :class:`~jaypore_ci.interfaces.Repo` instance
- from the environment and git repo on disk.
- """
- raise NotImplementedError()
-
-
-class Executor:
- """
- An executor is something used to run a job.
- It could be docker / podman / shell etc.
- """
-
- def run(self, job: "Job") -> str:
- "Run a job and return it's ID"
- raise NotImplementedError()
-
- def __init__(self):
- self.pipe_id = None
- self.pipeline = None
-
- def set_pipeline(self, pipeline: "Pipeline") -> None:
- """Set the current pipeline to the given one."""
- self.pipe_id = id(pipeline)
- self.pipeline = pipeline
-
- def setup(self) -> None:
- """
- This function is meant to perform any work that should be done before
- running any jobs.
- """
-
- def teardown(self) -> None:
- """
- On exit the executor must clean up any pending / stuck / zombie jobs that are still there.
- """
-
- def get_status(self, run_id: str) -> JobStatus:
- """
- Returns the status of a given run.
- """
- raise NotImplementedError()
-
-
-class Remote:
- """
- Something that allows us to show other people the status of the CI job.
- It could be gitea / github / gitlab / email system.
- """
-
- def __init__(self, *, sha, branch):
- self.sha = sha
- self.branch = branch
-
- def publish(self, report: str, status: str):
- """
- Publish this report somewhere.
- """
- raise NotImplementedError()
-
- def setup(self) -> None:
- """
- This function is meant to perform any work that should be done before
- running any jobs.
- """
-
- def teardown(self) -> None:
- """
- This function will be called once the pipeline is finished.
- """
-
- @classmethod
- def from_env(cls, *, repo: "Repo"):
- """
- This function should create a Remote instance from the given environment.
- It can read git information / look at environment variables etc.
- """
- raise NotImplementedError()
-
-
-class Reporter:
- """
- Something that generates the status of a pipeline.
-
- It can be used to generate reports in markdown, plaintext, html, pdf etc.
- """
-
- def render(self, pipeline: "Pipeline") -> str:
- """
- Render a report for the pipeline.
- """
- raise NotImplementedError()
diff --git a/jaypore_ci/jci.py b/jaypore_ci/jci.py
@@ -1,511 +0,0 @@
-"""
-The code submodule for Jaypore CI.
-"""
-import time
-import os
-from itertools import product
-from collections import defaultdict
-from typing import List, Union, Callable
-from contextlib import contextmanager
-
-import structlog
-import pendulum
-
-from jaypore_ci.exceptions import BadConfig
-from jaypore_ci.config import const
-from jaypore_ci.changelog import version_map
-from jaypore_ci import remotes, executors, reporters, repos, clean
-from jaypore_ci.interfaces import (
- Remote,
- Executor,
- Reporter,
- TriggerFailed,
- Status,
- Repo,
-)
-from jaypore_ci.logging import logger
-
-TZ = "UTC"
-
-__all__ = ["Pipeline", "Job"]
-
-
-# All of these statuses are considered "finished" statuses
-FIN_STATUSES = (Status.FAILED, Status.PASSED, Status.TIMEOUT, Status.SKIPPED)
-PREFIX = "JAYPORE_"
-
-# Check if we need to upgrade Jaypore CI
-def ensure_version_is_correct() -> None:
- """
- Ensure that the version of Jaypore CI that is running, the code inside
- cicd.py, and pre-push.sh are at compatible versions.
-
- If versions do not match then this function will print out instructions on
- what to do in order to upgrade.
-
- Downgrades are not allowed, you need to re-install that specific version.
- """
- if (
- const.expected_version is not None
- and const.version is not None
- and const.expected_version != const.version
- ):
- print("Expected : ", const.expected_version)
- print("Got : ", const.version)
- if const.version > const.expected_version:
- print(
- "Your current version is higher than the expected one. Please "
- "re-install Jaypore CI in this repo as downgrades are not "
- "supported."
- )
- if const.version < const.expected_version:
- print("--- Upgrade Instructions ---")
- for version in sorted(version_map.keys()):
- if version < const.version or version > const.expected_version:
- continue
- for line in version_map[version]["instructions"]:
- print(line)
- print("--- -------------------- ---")
- raise BadConfig(
- "Version mismatch between arjoonn/jci:<tag> docker container and pre-push.sh script"
- )
-
-
-class Job: # pylint: disable=too-many-instance-attributes
- """
- This is the fundamental building block for running jobs.
- Each job goes through a lifecycle defined by
- :class:`~jaypore_ci.interfaces.Status`.
-
- A job is run by an :class:`~jaypore_ci.interfaces.Executor` as part of a
- :class:`~jaypore_ci.jci.Pipeline`.
-
- It is never created manually. The correct way to create a job is to use
- :meth:`~jaypore_ci.jci.Pipeline.job`.
-
- :param name: The name for the job. Names must be unique across
- jobs and stages.
- :param command: The command that we need to run for the job. It can
- be set to `None` when `is_service` is True.
- :param is_service: Is this job a service or not? Service jobs are
- assumed to be
- :class:`~jaypore_ci.interfaces.Status.PASSED` as
- long as they start. They are shut down when the
- entire pipeline has finished executing.
- :param pipeline: The pipeline this job is associated with.
- :param status: The :class:`~jaypore_ci.interfaces.Status` of this job.
- :param image: What docker image to use for this job.
- :param timeout: Defines how long a job is allowed to run before being
- killed and marked as
- class:`~jaypore_ci.interfaces.Status.FAILED`.
- :param env: A dictionary of environment variables to pass to
- the docker run command.
- :param children: Defines which jobs depend on this job's output
- status.
- :param parents: Defines which jobs need to pass before this job can
- be run.
- :param stage: What stage the job belongs to. This stage name must
- exist so that we can assign jobs to it.
- :param executor_kwargs: A dictionary of keyword arguments that the executor
- can use when running a job. Different executors may
- use this in different ways, for example with the
- :class:`~jaypore_ci.executors.docker.Docker`
- executor this may be used to run jobs with
- `--add-host or --device
- <https://docker-py.readthedocs.io/en/stable/containers.html#docker.models.containers.ContainerCollection.run>`_
- .
- """
-
- def __init__(
- self,
- name: str,
- command: Union[str, Callable],
- pipeline: "Pipeline",
- *,
- status: str = None,
- children: List["Job"] = None,
- parents: List["Job"] = None,
- is_service: bool = False,
- stage: str = None,
- # --- executor kwargs
- image: str = None,
- timeout: int = None,
- env: dict = None,
- executor_kwargs: dict = None,
- ):
- self.name = name
- self.command = command
- self.image = image
- self.status = status
- self.run_state = None
- self.timeout = timeout
- self.pipeline = pipeline
- self.env = env
- self.children = children if children is not None else []
- self.parents = parents if parents is not None else []
- self.is_service = is_service
- self.stage = stage
- self.executor_kwargs = executor_kwargs if executor_kwargs is not None else {}
- # --- run information
- self.logs = defaultdict(list)
- self.job_id = id(self)
- self.run_id = None
- self.run_start = None
- self.last_check = None
-
- def logging(self):
- """
- Returns a logging instance that has job specific information bound to
- it.
- """
- return self.pipeline.logging().bind(
- job_id=self.job_id,
- job_name=self.name,
- run_id=self.run_id,
- )
-
- def update_report(self) -> str:
- """
- Update the status report. Usually called when a job changes some of
- it's internal state like when logs are updated or when status has
- changed.
- """
- self.logging().debug("Update report")
- status = {
- Status.PENDING: "pending",
- Status.RUNNING: "pending",
- Status.FAILED: "failure",
- Status.PASSED: "success",
- Status.TIMEOUT: "warning",
- Status.SKIPPED: "warning",
- }[self.pipeline.get_status()]
- report = self.pipeline.reporter.render(self.pipeline)
- with open("/jaypore_ci/run/jaypore_ci.status.txt", "w", encoding="utf-8") as fl:
- fl.write(report)
- self.pipeline.remote.publish(report, status)
- return report
-
- def trigger(self):
- """
- Trigger the job via the pipeline's executor.
- This will immediately return and will not wait for the job to finish.
-
- It is also idempotent. Calling this multiple times will only trigger
- the job once.
- """
- if self.status == Status.PENDING:
- self.run_start = pendulum.now(TZ)
- self.logging().info("Trigger called")
- self.status = Status.RUNNING
- if isinstance(self.command, str):
- try:
- self.run_id = self.pipeline.executor.run(self)
- self.logging().info("Trigger done")
- except TriggerFailed as e:
- self.logging().error(
- "Trigger failed",
- error=e,
- job_name=self.name,
- )
- self.status = Status.FAILED
- else:
- self.logging().info("Trigger called but job already running")
- self.check_job()
-
- def check_job(self, *, with_update_report=True):
- """
- This will check the status of the job.
- If `with_update_report` is False, it will not push an update to the remote.
- """
- if isinstance(self.command, str) and self.run_id is not None:
- self.logging().debug("Checking job run")
- self.run_state = self.pipeline.executor.get_status(self.run_id)
- self.last_check = pendulum.now(TZ)
- self.logging().debug(
- "Job run status found",
- is_running=self.run_state.is_running,
- exit_code=self.run_state.exit_code,
- )
- if self.run_state.is_running:
- self.status = Status.RUNNING if not self.is_service else Status.PASSED
- else:
- self.status = (
- Status.PASSED if self.run_state.exit_code == 0 else Status.FAILED
- )
- self.logs["stdout"] = reporters.clean_logs(self.run_state.logs)
- if with_update_report:
- self.update_report()
-
- def is_complete(self) -> bool:
- """
- Is this job complete? It could have passed/ failed etc.
- We no longer need to check for updates in a complete job.
- """
- return self.status in FIN_STATUSES
-
- def get_env(self):
- """
- Gets the environment variables for a given job.
- Order of precedence for setting values is:
-
- 1. Pipeline
- 2. Stage
- 3. Job
- """
- env = {
- k[len(PREFIX) :]: v for k, v in os.environ.items() if k.startswith(PREFIX)
- }
- env.update(self.pipeline.pipe_kwargs.get("env", {}))
- env.update(self.env) # Includes env specified in stage kwargs AND job kwargs
- return env
-
-
-class Pipeline: # pylint: disable=too-many-instance-attributes
- """
- A pipeline acts as a controlling/organizing mechanism for multiple jobs.
-
- :param repo: Provides information about the codebase.
- :param reporter: Provides reports based on the state of the pipeline.
- :param remote: Allows us to publish reports to somewhere like gitea/email.
- :param executor: Runs the specified jobs.
- :param poll_interval: Defines how frequently (in seconds) to check the
- pipeline status and publish a report.
- """
-
- # We need a way to avoid actually running the examples. Something like a
- # "dry-run" option so that only the building of the config is done and it's
- # never actually run. It might be a good idea to make this an actual config
- # variable but I'm not sure if we should do that or not.
- __run_on_exit__ = True
-
- def __init__( # pylint: disable=too-many-arguments
- self,
- *,
- repo: Repo = None,
- remote: Remote = None,
- executor: Executor = None,
- reporter: Reporter = None,
- poll_interval: int = 10,
- **kwargs,
- ) -> "Pipeline":
- self.jobs = {}
- self.services = []
- self.should_pass_called = set()
- self.repo = repo if repo is not None else repos.Git.from_env()
- self.remote = (
- remote
- if remote is not None
- else remotes.gitea.Gitea.from_env(repo=self.repo)
- )
- self.executor = executor if executor is not None else executors.docker.Docker()
- self.reporter = reporter if reporter is not None else reporters.text.Text()
- self.poll_interval = poll_interval
- self.stages = ["Pipeline"]
- self.__pipe_id__ = None
- self.executor.set_pipeline(self)
- # ---
- kwargs["image"] = kwargs.get("image", "arjoonn/jci")
- kwargs["timeout"] = kwargs.get("timeout", 15 * 60)
- kwargs["env"] = kwargs.get("env", {})
- kwargs["stage"] = "Pipeline"
- self.pipe_kwargs = kwargs
- self.stage_kwargs = None
-
- @property
- def pipe_id(self):
- if self.__pipe_id__ is None:
- self.__pipe_id__ = self.__get_pipe_id__()
- return self.__pipe_id__
-
- def __get_pipe_id__(self):
- """
- This is mainly here so that during testing we can override this and
- provide a different way to get the pipe id
- """
- with open(f"/jaypore_ci/cidfiles/{self.repo.sha}", "r", encoding="utf-8") as fl:
- return fl.read().strip()
-
- def logging(self):
- """
- Return a logger with information about the current pipeline bound to
- it.
- """
- return logger.bind(
- **{
- **structlog.get_context(self.remote.logging()),
- **structlog.get_context(self.executor.logging()),
- "pipe_id": id(self),
- }
- )
-
- def __enter__(self):
- ensure_version_is_correct()
- self.executor.setup()
- self.remote.setup()
- return self
-
- def __exit__(self, exc_type, exc_value, traceback):
- if Pipeline.__run_on_exit__:
- self.run()
- self.executor.teardown()
- self.remote.teardown()
- return False
-
- def get_status(self) -> Status:
- """
- Calculates a pipeline's status based on the status of it's jobs.
- """
- for job in self.jobs.values():
- if job.status == Status.RUNNING:
- return Status.RUNNING
- service = None
- for service in self.services:
- service.check_job(with_update_report=False)
- if service is not None:
- service.check_job(with_update_report=False)
- has_pending = False
- for job in self.jobs.values():
- job.check_job(with_update_report=False)
- if not job.is_complete():
- has_pending = True
- else:
- if job.status != Status.PASSED:
- return Status.FAILED
- return Status.PENDING if has_pending else Status.PASSED
-
- def get_status_dot(self) -> str:
- """
- Get's the status dot for the pipeline.
- """
- if self.get_status() == Status.PASSED:
- return "🟢"
- if self.get_status() == Status.FAILED:
- return "🔴"
- if self.get_status() == Status.SKIPPED:
- return "🔵"
- return "🟡"
-
- def job(
- self,
- name: str,
- command: str,
- *,
- depends_on: List[str] = None,
- **kwargs,
- ) -> Job:
- """
- Creates a :class:`~jaypore_ci.jci.Job` instance based on the
- pipeline/stage that it is being defined in. See
- :class:`~jaypore_ci.jci.Job` for details on what parameters can be
- passed to the job.
- """
- depends_on = [] if depends_on is None else depends_on
- depends_on = [depends_on] if isinstance(depends_on, str) else depends_on
- name = clean.name(name)
- assert name, "Name should have some value after it is cleaned"
- assert name not in self.jobs, f"{name} already defined"
- assert name not in self.stages, "Stage name cannot match a job's name"
- kwargs, job_kwargs = dict(self.pipe_kwargs), kwargs
- kwargs.update(self.stage_kwargs if self.stage_kwargs is not None else {})
- kwargs.update(job_kwargs)
- if not kwargs.get("is_service"):
- assert command, f"Command: {command}"
- job = Job(
- name=name if name is not None else " ",
- command=command,
- status=Status.PENDING,
- pipeline=self,
- children=[],
- parents=depends_on,
- **kwargs,
- )
- for parent_name in depends_on:
- assert (
- parent_name in self.jobs
- ), f"Parent job has to be defined before a child. Cannot find {parent_name}"
- parent = self.jobs[parent_name]
- assert parent.stage == job.stage, "Cannot have dependencies across stages"
- self.jobs[name] = job
- if kwargs.get("is_service"):
- self.services.append(job)
- return job
-
- @classmethod
- def env_matrix(cls, **kwargs):
- """
- Return a cartesian product of all the provided kwargs.
- """
- keys = list(sorted(kwargs.keys()))
- for values in product(*[kwargs[key] for key in keys]):
- yield dict(list(zip(keys, values)))
-
- def __ensure_duplex__(self):
- for name, job in self.jobs.items():
- for parent_name in job.parents:
- parent = self.jobs[parent_name]
- parent.children = list(sorted(set(parent.children).union(set([name]))))
-
- def run(self):
- """
- Run the pipeline. This is always called automatically when the context
- of the pipeline declaration finishes and so unless you are doing
- something fancy you don't need to call this manually.
- """
- self.__ensure_duplex__()
- # Run stages one by one
- job = None
- for stage in self.stages:
- # --- Trigger starting jobs
- jobs = {name: job for name, job in self.jobs.items() if job.stage == stage}
- for name in {job.name for job in jobs.values() if not job.parents}:
- jobs[name].trigger()
- # --- monitor and ensure all jobs run
- while not all(job.is_complete() for job in jobs.values()):
- for job in jobs.values():
- job.check_job(with_update_report=False)
- if not job.is_complete():
- # If all dependencies are met: trigger
- if len(job.parents) == 0 or all(
- jobs[parent_name].is_complete()
- and jobs[parent_name].status == Status.PASSED
- for parent_name in job.parents
- ):
- job.trigger()
- elif any(
- jobs[parent_name].is_complete()
- and jobs[parent_name].status != Status.PASSED
- for parent_name in job.parents
- ):
- job.status = Status.SKIPPED
- job.check_job()
- time.sleep(self.poll_interval)
- # --- has this stage passed?
- if not all(
- job.is_complete() and job.status == Status.PASSED
- for job in jobs.values()
- ):
- self.logging().error("Stage failed")
- job.update_report()
- break
- self.logging().error("Pipeline passed")
- if job is not None:
- report = job.update_report()
- self.logging().info("Report:", report=report)
-
- @contextmanager
- def stage(self, name, **kwargs):
- """
- A stage in a pipeline.
-
- Any kwargs passed to this stage are supplied to jobs created within
- this stage.
- """
- name = clean.name(name)
- assert name, "Name should have some value after it is cleaned"
- assert name not in self.jobs, "Stage name cannot match a job's name"
- assert name not in self.stages, "Stage names cannot be re-used"
- self.stages.append(name)
- kwargs["stage"] = name
- self.stage_kwargs = kwargs
- yield # -------------------------
- self.stage_kwargs = None
diff --git a/jaypore_ci/logging.py b/jaypore_ci/logging.py
@@ -1,63 +0,0 @@
-"""
-The basic logging module.
-"""
-import logging
-from typing import Any
-
-import structlog
-
-# This is used to accumulate logs and is later sent over to the CI status as a
-# separate log list
-jaypore_logs = []
-
-
-class JayporeLogger:
- """
- This is mainly used to collect logs into a single global variable so that
- the logs of the CI runner itself can also be posted as part of the CI
- report.
- """
-
- def __getstate__(self) -> str:
- return "stdout"
-
- def __setstate__(self, state: Any) -> None:
- pass
-
- def __deepcopy__(self, memodict: dict[Any, Any] = None) -> "JayporeLogger":
- return self.__class__()
-
- def msg(self, message: str) -> None:
- global jaypore_logs # pylint: disable=global-statement
- jaypore_logs.append(message)
- if len(jaypore_logs) > 1500:
- jaypore_logs = jaypore_logs[-1000:]
- print(message)
-
- log = debug = info = warn = warning = msg
- fatal = failure = err = error = critical = exception = msg
-
-
-class JayporeLoggerFactory:
- def __init__(self):
- pass
-
- def __call__(self, *args) -> JayporeLogger:
- return JayporeLogger()
-
-
-structlog.configure(
- processors=[
- structlog.contextvars.merge_contextvars,
- structlog.processors.add_log_level,
- # structlog.processors.StackInfoRenderer(),
- # structlog.dev.set_exc_info,
- structlog.processors.TimeStamper(fmt="iso"),
- structlog.dev.ConsoleRenderer(colors=False),
- ],
- wrapper_class=structlog.make_filtering_bound_logger(logging.NOTSET),
- context_class=dict,
- logger_factory=JayporeLoggerFactory(),
- cache_logger_on_first_use=False,
-)
-logger = structlog.get_logger()
diff --git a/jaypore_ci/remotes/__init__.py b/jaypore_ci/remotes/__init__.py
@@ -1,5 +0,0 @@
-from .mock import Mock
-from .git import GitRemote
-from .gitea import Gitea
-from .github import Github
-from .email import Email
diff --git a/jaypore_ci/remotes/email.py b/jaypore_ci/remotes/email.py
@@ -1,173 +0,0 @@
-"""
-An email remote.
-
-This is used to report pipeline status via email.
-Multiple updates appear as a single thread.
-"""
-import os
-import time
-import smtplib
-from html import escape as html_escape
-
-from email.headerregistry import Address
-from email.message import EmailMessage
-from pathlib import Path
-from urllib.parse import urlparse
-
-
-from jaypore_ci.interfaces import Remote, Repo
-from jaypore_ci.logging import logger
-
-
-class Email(Remote): # pylint: disable=too-many-instance-attributes
- """
- You can send pipeline status via email using this remote. In order to use it you
- can specify the following environment variables in your secrets:
-
- .. code-block:: console
-
- JAYPORE_EMAIL_ADDR=email-account@gmail.com
- JAYPORE_EMAIL_PASSWORD=some-app-password
- JAYPORE_EMAIL_TO=myself@gmail.com,mailing-list@gmail.com
- JAYPORE_EMAIL_FROM=noreply@gmail.com
-
- If you're using something other than gmail, you can specify
- `JAYPORE_EMAIL_HOST` and `JAYPORE_EMAIL_PORT` as well.
-
- Once that is done you can supply this remote to your pipeline instead of
- the usual gitea one.
-
- .. code-block:: python
-
- from jaypore_ci import jci, remotes, repos
-
- git = repos.Git.from_env()
- email = remotes.Email.from_env(repo=git)
- with jci.Pipeline(repo=git, remote=email) as p:
- pass
- # Do something
-
- :param host: What smtp host to use.
- :param port: Smtp port to use.
- :param addr: Smtp address to use for login.
- :param password: Smtp password to use for login.
- :param email_to: Which address the email should go to.
- :param email_from: Which address should be the sender of this email.
- :param subject: The subject line of the email.
- :param only_on_failure: If set to True, a single email will be sent when
- the pipeline fails. In all other cases no email is
- sent.
- :param publish_interval: Determines the delay in sending another email when
- we are sending multiple email updates in a single
- email thread. If `only_on_failure` is set, this
- option is ignored.
- """
-
- @classmethod
- def from_env(cls, *, repo: Repo) -> "Email":
- """
- Creates a remote instance from the environment.
- """
- remote = urlparse(repo.remote)
- owner = Path(remote.path).parts[1]
- name = Path(remote.path).parts[2].replace(".git", "")
- return cls(
- host=os.environ.get("JAYPORE_EMAIL_HOST", "smtp.gmail.com"),
- port=int(os.environ.get("JAYPORE_EMAIL_PORT", 465)),
- addr=os.environ["JAYPORE_EMAIL_ADDR"],
- password=os.environ["JAYPORE_EMAIL_PASSWORD"],
- email_to=os.environ["JAYPORE_EMAIL_TO"],
- email_from=os.environ.get(
- "JAYPORE_EMAIL_FROM", os.environ["JAYPORE_EMAIL_ADDR"]
- ),
- subject=f"JCI [{owner}/{name}] [{repo.branch} {repo.sha[:8]}]",
- branch=repo.branch,
- sha=repo.sha,
- )
-
- def __init__(
- self,
- *,
- host: str,
- port: int,
- addr: str,
- password: str,
- email_to: str,
- email_from: str,
- subject: str,
- only_on_failure: bool = False,
- publish_interval: int = 30,
- **kwargs,
- ): # pylint: disable=too-many-arguments
- super().__init__(**kwargs)
- # --- customer
- self.host = host
- self.port = port
- self.addr = addr
- self.password = password
- self.email_to = email_to
- self.email_from = email_from
- self.subject = subject
- self.timeout = 10
- self.publish_interval = publish_interval
- self.only_on_failure = only_on_failure
- # ---
- self.__smtp__ = None
- self.__last_published_at__ = None
- self.__last_report__ = None
-
- @property
- def smtp(self):
- if self.__smtp__ is None:
- smtp = smtplib.SMTP_SSL(self.host, self.port)
- smtp.ehlo()
- smtp.login(self.addr, self.password)
- self.__smtp__ = smtp
- return self.__smtp__
-
- def logging(self):
- """
- Return's a logging instance with information about gitea bound to it.
- """
- return logger.bind(addr=self.addr, host=self.host, port=self.port)
-
- def publish(self, report: str, status: str) -> None:
- """
- Will publish the report via email.
-
- :param report: Report to write to remote.
- :param status: One of ["pending", "success", "error", "failure",
- "warning"] This is the dot next to each commit in gitea.
- """
- assert status in ("pending", "success", "error", "failure", "warning")
- if (
- self.__last_published_at__ is not None
- and (time.time() - self.__last_published_at__) < self.publish_interval
- and status not in ("success", "failure")
- ) or (self.only_on_failure and status != "failure"):
- return
- if self.__last_report__ == report:
- return
- self.__last_report__ = report
- self.__last_published_at__ = time.time()
- # Let's send the email
- msg = EmailMessage()
- msg["Subject"] = self.subject
- msg["From"] = Address("JayporeCI", "JayporeCI", self.email_from)
- msg["To"] = self.email_to
- msg.set_content(report)
- msg.add_alternative(
- f"<html><body><pre>{html_escape(report)}</pre></body></html>",
- subtype="html",
- )
- try:
- self.smtp.send_message(msg)
- except Exception as e: # pylint: disable=broad-except
- self.logging().exception(e)
- self.__last_published_at__ = time.time()
- self.logging().info(
- "Report published",
- subject=self.subject,
- email_from=self.email_from,
- email_to=self.email_to,
- )
diff --git a/jaypore_ci/remotes/git.py b/jaypore_ci/remotes/git.py
@@ -1,96 +0,0 @@
-"""
-This is used to save the pipeline status to git itself.
-"""
-import time
-import subprocess
-
-from jaypore_ci.interfaces import Remote
-from jaypore_ci.repos import Git
-from jaypore_ci.logging import logger
-
-
-class GitRemote(Remote): # pylint: disable=too-many-instance-attributes
- """
- You can save pipeline status to git using this remote.
-
- To push/fetch your local refs to a git remote you can run
-
- .. code-block:: console
-
- git fetch origin refs/jayporeci/*:refs/jayporeci/*
- git push origin refs/jayporeci/*:refs/jayporeci/*
- """
-
- @classmethod
- def from_env(cls, *, repo: Git) -> "GitRemote":
- """
- Creates a remote instance from the environment.
- """
- assert isinstance(repo, Git), "Git remote can only work in a git repo"
- return cls(
- repo=repo,
- branch=repo.branch,
- sha=repo.sha,
- )
-
- def __init__(self, *, repo, **kwargs):
- super().__init__(**kwargs)
- self.repo = repo
-
- def logging(self):
- """
- Return's a logging instance with information about git bound to it.
- """
- return logger.bind(repo=self.repo)
-
- def publish(self, report: str, status: str) -> None:
- """
- Will publish the report via email.
-
- :param report: Report to write to remote.
- :param status: One of ["pending", "success", "error", "failure",
- "warning"] This is the dot next to each commit in gitea.
- """
- assert status in ("pending", "success", "error", "failure", "warning")
- now = time.time()
- lines = ""
- git_blob_sha = subprocess.check_output(
- "git hash-object -w --stdin",
- input=report,
- text=True,
- stderr=subprocess.STDOUT,
- shell=True,
- ).strip()
- lines += f"\n100644 blob {git_blob_sha}\t{now}.txt"
- lines = lines.strip()
- git_tree_sha = subprocess.run(
- "git mktree",
- input=lines,
- text=True,
- shell=True,
- check=False,
- stderr=subprocess.STDOUT,
- stdout=subprocess.PIPE,
- ).stdout.strip()
- git_commit_sha = subprocess.run(
- f"git commit-tree {git_tree_sha}",
- text=True,
- input=f"JayporeCI status: {now}",
- shell=True,
- check=False,
- stderr=subprocess.STDOUT,
- stdout=subprocess.PIPE,
- )
- assert git_commit_sha.returncode == 0
- git_commit_sha = (
- subprocess.check_output(
- f"git update-ref refs/jayporeci/{self.repo.sha} {git_commit_sha.stdout.strip()}",
- shell=True,
- stderr=subprocess.STDOUT,
- )
- .decode()
- .strip()
- )
- self.logging().info(
- "Published status to local git: refs/jayporeci/{self.repo.sha} {git_commit_sha}"
- )
diff --git a/jaypore_ci/remotes/gitea.py b/jaypore_ci/remotes/gitea.py
@@ -1,159 +0,0 @@
-"""
-A gitea remote git host.
-
-This is used to report pipeline status to the remote.
-"""
-import os
-
-import requests
-
-from jaypore_ci.interfaces import Remote, RemoteApiFailed, Repo, RemoteInfo
-from jaypore_ci.logging import logger
-
-
-class Gitea(Remote): # pylint: disable=too-many-instance-attributes
- """
- The remote implementation for gitea.
- """
-
- @classmethod
- def from_env(cls, *, repo: Repo) -> "Gitea":
- """
- Creates a remote instance from the environment.
- It will:
-
- - Find the remote location using `git remote`.
- - Find the current branch
- - Create a new pull request for that branch
- - Allow posting updates using the gitea token provided
- """
- os.environ["JAYPORE_COMMIT_BRANCH"] = repo.branch
- os.environ["JAYPORE_COMMIT_SHA"] = repo.sha
- rem = RemoteInfo.parse(repo.remote)
- return cls(
- root=f"https://{rem.netloc}",
- owner=rem.owner,
- repo=rem.repo,
- branch=repo.branch,
- token=os.environ["JAYPORE_GITEA_TOKEN"],
- sha=repo.sha,
- )
-
- def __init__(
- self, *, root, owner, repo, token, **kwargs
- ): # pylint: disable=too-many-arguments
- super().__init__(**kwargs)
- # --- customer
- self.root = root
- self.api = f"{root}/api/v1"
- self.owner = owner
- self.repo = repo
- self.token = token
- self.timeout = 10
- self.base_branch = "develop"
- # ---
- self.__pr_id__ = None
-
- def logging(self):
- """
- Return's a logging instance with information about gitea bound to it.
- """
- return logger.bind(
- root=self.root, owner=self.owner, repo=self.repo, branch=self.branch
- )
-
- def get_pr_id(self):
- """
- Returns the pull request ID for the current branch.
- """
- if self.__pr_id__ is None:
- r = requests.post(
- f"{self.api}/repos/{self.owner}/{self.repo}/pulls",
- params={"access_token": self.token},
- timeout=self.timeout,
- json={
- "base": self.base_branch,
- "body": "Branch auto created by JayporeCI",
- "head": self.branch,
- "title": self.branch,
- },
- )
- self.logging().debug("Get PR Id", status_code=r.status_code)
- if r.status_code == 409:
- self.__pr_id__ = r.text.split("issue_id:")[1].split(",")[0].strip()
- return self.get_pr_id()
- if r.status_code == 201:
- return self.get_pr_id()
- if (
- r.status_code == 404
- and r.json()["message"] == "IsBranchExist"
- and self.base_branch != "main"
- ):
- self.base_branch = "main"
- return self.get_pr_id()
- self.logging().debug(
- "Failed gitea api",
- api=self.api,
- owner=self.owner,
- repo=self.repo,
- token=self.token,
- branch=self.branch,
- status=r.status_code,
- response=r.text,
- )
- raise RemoteApiFailed(r)
- return self.__pr_id__
-
- def publish(self, report: str, status: str):
- """
- Will publish the report to the remote.
-
- :param report: Report to write to remote.
- :param status: One of ["pending", "success", "error", "failure",
- "warning"] This is the dot next to each commit in gitea.
- """
- assert status in ("pending", "success", "error", "failure", "warning")
- issue_id = self.get_pr_id()
- # Get existing PR body
- r = requests.get(
- f"{self.api}/repos/{self.owner}/{self.repo}/pulls/{issue_id}",
- timeout=self.timeout,
- params={"access_token": self.token},
- )
- self.logging().debug("Get existing body", status_code=r.status_code)
- assert r.status_code == 200
- body = r.json()["body"]
- body = (line for line in body.split("\n"))
- prefix = []
- for line in body:
- if "```jayporeci" in line:
- prefix = prefix[:-1]
- break
- prefix.append(line)
- while prefix and prefix[-1].strip() == "":
- prefix = prefix[:-1]
- prefix.append("")
- # Post new body with report
- report = "\n".join(prefix) + "\n" + report
- r = requests.patch(
- f"{self.api}/repos/{self.owner}/{self.repo}/pulls/{issue_id}",
- data={"body": report},
- timeout=self.timeout,
- params={"access_token": self.token},
- )
- self.logging().debug("Published new report", status_code=r.status_code)
- # Set commit status
- r = requests.post(
- f"{self.api}/repos/{self.owner}/{self.repo}/statuses/{self.sha}",
- json={
- "context": "JayporeCi",
- "description": f"Pipeline status is: {status}",
- "state": status,
- "target_url": f"{self.root}/{self.owner}/{self.repo}/pulls/{issue_id}",
- },
- timeout=self.timeout,
- params={"access_token": self.token},
- )
- self.logging().debug(
- "Published new status", status=status, status_code=r.status_code
- )
diff --git a/jaypore_ci/remotes/github.py b/jaypore_ci/remotes/github.py
@@ -1,161 +0,0 @@
-"""
-A github remote git host.
-
-This is used to report pipeline status to the remote.
-"""
-import os
-
-import requests
-
-from jaypore_ci.interfaces import Remote, RemoteApiFailed, Repo, RemoteInfo
-from jaypore_ci.logging import logger
-
-
-class Github(Remote): # pylint: disable=too-many-instance-attributes
- """
- The remote implementation for github.
- """
-
- def __headers__(self):
- return {
- "Authorization": f"Bearer {self.token}",
- "Accept": "application/vnd.github+json",
- "X-Github-Api-Version": "2022-11-28",
- }
-
- @classmethod
- def from_env(cls, *, repo: Repo) -> "Github":
- """
- Creates a remote instance from the environment.
- It will:
-
- - Find the remote location using `git remote`.
- - Find the current branch
- - Create a new pull request for that branch
- - Allow posting updates using the gitea token provided
- """
- rem = RemoteInfo.parse(repo.remote)
- os.environ["JAYPORE_COMMIT_BRANCH"] = repo.branch
- os.environ["JAYPORE_COMMIT_SHA"] = repo.sha
- return cls(
- root="https://api.github.com",
- owner=rem.owner,
- repo=rem.repo,
- branch=repo.branch,
- token=os.environ["JAYPORE_GITHUB_TOKEN"],
- sha=repo.sha,
- )
-
- def __init__(
- self, *, root, owner, repo, token, **kwargs
- ): # pylint: disable=too-many-arguments
- super().__init__(**kwargs)
- # --- customer
- self.root = root
- self.api = root
- self.owner = owner
- self.repo = repo
- self.token = token
- self.timeout = 10
- self.base_branch = "main"
-
- def logging(self):
- """
- Return's a logging instance with information about gitea bound to it.
- """
- return logger.bind(
- root=self.root, owner=self.owner, repo=self.repo, branch=self.branch
- )
-
- def get_pr_id(self):
- """
- Returns the pull request ID for the current branch.
- """
- r = requests.post(
- f"{self.api}/repos/{self.owner}/{self.repo}/pulls",
- headers=self.__headers__(),
- timeout=self.timeout,
- json={
- "base": self.base_branch,
- "body": "Branch auto created by JayporeCI",
- "head": self.branch,
- "title": self.branch,
- },
- )
- self.logging().debug("Create PR", status_code=r.status_code)
- if r.status_code == 201:
- return r.json()["number"]
- r = requests.get(
- f"{self.api}/repos/{self.owner}/{self.repo}/pulls",
- headers=self.__headers__(),
- timeout=self.timeout,
- json={"base": self.base_branch, "head": self.branch, "draft": True},
- )
- self.logging().debug("Get PR", status_code=r.status_code)
- if r.status_code == 200:
- if len(r.json()) == 1:
- return r.json()[0]["number"]
- self.logging().debug(
- "Failed github api",
- api=self.api,
- owner=self.owner,
- repo=self.repo,
- token=self.token,
- branch=self.branch,
- status=r.status_code,
- response=r.text,
- )
- raise RemoteApiFailed(r)
-
- def publish(self, report: str, status: str):
- """
- Will publish the report to the remote.
-
- :param report: Report to write to remote.
- :param status: One of ["pending", "success", "error", "failure"]
- This is the dot/tick next to each commit in gitea.
- """
- assert status in ("pending", "success", "error", "failure")
- issue_id = self.get_pr_id()
- # Get existing PR body
- r = requests.get(
- f"{self.api}/repos/{self.owner}/{self.repo}/pulls/{issue_id}",
- timeout=self.timeout,
- headers=self.__headers__(),
- )
- self.logging().debug("Get existing body", status_code=r.status_code)
- assert r.status_code == 200
- body = r.json()["body"]
- body = (line for line in body.split("\n"))
- prefix = []
- for line in body:
- if "```jayporeci" in line:
- prefix = prefix[:-1]
- break
- prefix.append(line)
- while prefix and prefix[-1].strip() == "":
- prefix = prefix[:-1]
- prefix.append("")
- # Post new body with report
- report = "\n".join(prefix) + "\n" + report
- r = requests.patch(
- f"{self.api}/repos/{self.owner}/{self.repo}/pulls/{issue_id}",
- json={"body": report},
- timeout=self.timeout,
- headers=self.__headers__(),
- )
- self.logging().debug("Published new report", status_code=r.status_code)
- # Set commit status
- r = requests.post(
- f"{self.api}/repos/{self.owner}/{self.repo}/statuses/{self.sha}",
- json={
- "context": "JayporeCi",
- "description": f"Pipeline status is: {status}",
- "state": status,
- },
- timeout=self.timeout,
- headers=self.__headers__(),
- )
- self.logging().debug(
- "Published new status", status=status, status_code=r.status_code
- )
diff --git a/jaypore_ci/remotes/mock.py b/jaypore_ci/remotes/mock.py
@@ -1,38 +0,0 @@
-"""
-A mock remote.
-
-This is used to test pipelines.
-"""
-from jaypore_ci.interfaces import Remote, Repo
-from jaypore_ci.logging import logger
-
-
-class Mock(Remote): # pylint: disable=too-many-instance-attributes
- """
- A mock remote implementation.
- """
-
- @classmethod
- def from_env(cls, *, repo: Repo):
- return cls(branch=repo.branch, sha=repo.sha)
-
- def logging(self):
- """
- Return's a logging instance with information about gitea bound to it.
- """
- return logger.bind(branch=self.branch)
-
- def get_pr_id(self):
- """
- Returns the pull request ID for the current branch.
- """
- return self.branch
-
- def publish(self, report: str, status: str):
- """
- Will publish the report to the remote.
- """
- pr_id = self.get_pr_id()
- self.logging().debug(
- "Published report", report=report, status=status, pr_id=pr_id
- )
diff --git a/jaypore_ci/reporters/__init__.py b/jaypore_ci/reporters/__init__.py
@@ -1,3 +0,0 @@
-from .common import clean_logs
-from .markdown import Markdown
-from .text import Text
diff --git a/jaypore_ci/reporters/common.py b/jaypore_ci/reporters/common.py
@@ -1,12 +0,0 @@
-import re
-
-ansi_escape = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])")
-
-
-def clean_logs(logs):
- """
- Clean logs so that they don't have HTML/ANSI color codes in them.
- """
- for old, new in [("<", r"\<"), (">", r"\>"), ("`", '"'), ("\r", "\n")]:
- logs = logs.replace(old, new)
- return [line.strip() for line in ansi_escape.sub("", logs).split("\n")]
diff --git a/jaypore_ci/reporters/markdown.py b/jaypore_ci/reporters/markdown.py
@@ -1,100 +0,0 @@
-from jaypore_ci.interfaces import Reporter, Status
-
-
-def __node_mod__(nodes):
- mod = 1
- if len(nodes) > 5:
- mod = 2
- if len(nodes) > 10:
- mod = 3
- return mod
-
-
-class Markdown(Reporter):
- def __init__(self, *, graph_direction: str = "TD", **kwargs):
- super().__init__(**kwargs)
- self.graph_direction = graph_direction
-
- def render(self, pipeline):
- """
- Returns a markdown report for a given pipeline.
-
- It will include a mermaid graph and a collapsible list of logs for each
- job.
- """
- return f"""
-<details>
- <summary>JayporeCi: {pipeline.get_status_dot()} {pipeline.remote.sha[:10]}</summary>
-
-{self.__render_graph__(pipeline)}
-
-</details>"""
-
- def __render_graph__(self, pipeline) -> str: # pylint: disable=too-many-locals
- """
- Render a mermaid graph given the jobs in the pipeline.
- """
- st_map = {
- Status.PENDING: "pending",
- Status.RUNNING: "running",
- Status.FAILED: "failed",
- Status.PASSED: "passed",
- Status.TIMEOUT: "timeout",
- Status.SKIPPED: "skipped",
- }
- mermaid = f"""
-```mermaid
-flowchart {self.graph_direction}
-"""
- for stage in pipeline.stages:
- nodes, edges = set(), set()
- for job in pipeline.jobs.values():
- if job.stage != stage:
- continue
- nodes.add(job.name)
- edges |= {(p, job.name) for p in job.parents}
- mermaid += f"""
- subgraph {stage}
- direction {self.graph_direction}
- """
- ref = {n: f"{stage}_{i}" for i, n in enumerate(nodes)}
- # If there are too many nodes, scatter them with different length arrows
- mod = __node_mod__([n for n in nodes if not pipeline.jobs[n].parents])
- for i, n in enumerate(nodes):
- n = pipeline.jobs[n]
- if n.parents:
- continue
- arrow = "." * ((i % mod) + 1)
- arrow = f"-{arrow}->"
- mermaid += f"""
- s_{stage}(( )) {arrow} {ref[n.name]}({n.name}):::{st_map[n.status]}"""
- mod = __node_mod__([n for n in nodes if pipeline.jobs[n].parents])
- for i, (a, b) in enumerate(edges):
- a, b = pipeline.jobs[a], pipeline.jobs[b]
- arrow = "." * ((i % mod) + 1)
- arrow = f"-{arrow}->"
- mermaid += "\n"
- mermaid += (
- " "
- "{ref[a.name]}({a.name}):::{st_map[a.status]}"
- "{arrow}"
- "{ref[b.name]}({b.name}):::{st_map[b.status]}"
- )
- mermaid += """
- end
- """
- for s1, s2 in zip(pipeline.stages, pipeline.stages[1:]):
- mermaid += f"""
- {s1} ---> {s2}
- """
- mermaid += """
-
- classDef pending fill:#aaa, color:black, stroke:black,stroke-width:2px,stroke-dasharray: 5 5;
- classDef skipped fill:#aaa, color:black, stroke:black,stroke-width:2px;
- classDef assigned fill:#ddd, color:black, stroke:black,stroke-width:2px;
- classDef running fill:#bae1ff,color:black,stroke:black,stroke-width:2px,stroke-dasharray: 5 5;
- classDef passed fill:#88d8b0, color:black, stroke:black;
- classDef failed fill:#ff6f69, color:black, stroke:black;
- classDef timeout fill:#ffda9e, color:black, stroke:black;
-``` """
- return mermaid
diff --git a/jaypore_ci/reporters/text.py b/jaypore_ci/reporters/text.py
@@ -1,80 +0,0 @@
-import pendulum
-from jaypore_ci.interfaces import Reporter, Status
-
-
-def __get_time_format__(job):
- time = " --:--"
- if job.run_state is not None:
- if (
- job.run_state.finished_at is not None
- and job.run_state.started_at is not None
- ):
- s = job.run_state.finished_at - job.run_state.started_at
- elif job.run_state.started_at is not None:
- s = pendulum.now() - job.run_state.started_at
- else:
- s = None
- s = s.in_seconds() if s is not None else 0
- m = s // 60
- time = f"{m:>3}:{s % 60:>2}"
- return time
-
-
-def get_job_report(jobname):
- with open(f"/jaypore_ci/run/{jobname}.txt", "r", encoding="utf-8") as fl:
- return fl.read()
-
-
-__ST_MAP__ = {
- Status.RUNNING: "🔵",
- Status.FAILED: "🔴",
- Status.PASSED: "🟢",
-}
-
-
-class Text(Reporter):
- def render(self, pipeline):
- """
- Returns a human readable report for a given pipeline.
- """
- max_name = max(len(job.name) for job in pipeline.jobs.values())
- max_name = max(max_name, len("jayporeci"))
- max_report = 10
- name = ("JayporeCI" + " " * max_name)[:max_name]
- graph = [
- "",
- "```jayporeci",
- f"╔ {pipeline.get_status_dot()} : {name} [sha {pipeline.remote.sha[:10]}]",
- ]
- closer = "┗" + ("━" * (len(" O : ") + max_name + 1 + 1 + 8 + 1)) + "┛"
- for stage in pipeline.stages:
- nodes, edges = set(), set()
- for job in pipeline.jobs.values():
- if job.stage != stage:
- continue
- nodes.add(job.name)
- edges |= {(p, job.name) for p in job.parents}
- if not nodes:
- continue
- graph += [f"┏━ {stage}", "┃"]
- for n in sorted(
- nodes, key=lambda x: (len(pipeline.jobs[x].parents), x)
- ): # Fewer parents first
- n = pipeline.jobs[n]
- name = (n.name + " " * max_name)[:max_name]
- status = __ST_MAP__.get(n.status, "🟡")
- run_id = f"{n.run_id}"[:8] if n.run_id is not None else ""
- graph += [f"┃ {status} : {name} [{run_id:<8}] {__get_time_format__(n)}"]
- try:
- report = get_job_report(n.name)
- report = " ".join(report.strip().split())
- report = (report + " " * max_report)[:max_report]
- except FileNotFoundError:
- report = " " * max_report
- graph[-1] += f" {report}"
- if n.parents:
- graph[-1] += f" ❮-- {n.parents}"
- graph += [closer]
- graph += ["```"]
- graph = "\n".join(graph)
- return f"\n{graph}"
diff --git a/jaypore_ci/repos/__init__.py b/jaypore_ci/repos/__init__.py
@@ -1 +0,0 @@
-from .git import Git
diff --git a/jaypore_ci/repos/git.py b/jaypore_ci/repos/git.py
@@ -1,49 +0,0 @@
-import subprocess
-from typing import List
-
-from jaypore_ci.interfaces import Repo
-
-
-class Git(Repo):
- def files_changed(self, target: str) -> List[str]:
- "Returns list of files changed between current sha and target"
- return (
- subprocess.check_output(
- f"git diff --name-only {target} {self.sha}", shell=True
- )
- .decode()
- .strip()
- .split("\n")
- )
-
- @classmethod
- def from_env(cls) -> "Git":
- """
- Gets repo status from the environment and git repo on disk.
- """
- remote = (
- subprocess.check_output(
- "git remote -v | grep push | head -n1 | grep https | awk '{print $2}'",
- shell=True,
- )
- .decode()
- .strip()
- )
- assert "https://" in remote, f"Only https remotes supported: {remote}"
- assert ".git" in remote
- # NOTE: Later on perhaps we should support non-https remotes as well
- # since JCI does not actually do anything with the remote.
- branch = (
- subprocess.check_output(
- r"git branch | grep \* | awk '{print $2}'", shell=True
- )
- .decode()
- .strip()
- )
- sha = subprocess.check_output("git rev-parse HEAD", shell=True).decode().strip()
- message = (
- subprocess.check_output("git log -1 --pretty=%B", shell=True)
- .decode()
- .strip()
- )
- return cls(sha=sha, branch=branch, remote=remote, commit_message=message)
diff --git a/poetry.lock b/poetry.lock
@@ -1,1373 +0,0 @@
-# This file is automatically @generated by Poetry and should not be changed by hand.
-
-[[package]]
-name = "alabaster"
-version = "0.7.13"
-description = "A configurable sidebar-enabled Sphinx theme"
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"},
- {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"},
-]
-
-[[package]]
-name = "astroid"
-version = "2.15.0"
-description = "An abstract syntax tree for Python with inference support."
-category = "dev"
-optional = false
-python-versions = ">=3.7.2"
-files = [
- {file = "astroid-2.15.0-py3-none-any.whl", hash = "sha256:e3e4d0ffc2d15d954065579689c36aac57a339a4679a679579af6401db4d3fdb"},
- {file = "astroid-2.15.0.tar.gz", hash = "sha256:525f126d5dc1b8b0b6ee398b33159105615d92dc4a17f2cd064125d57f6186fa"},
-]
-
-[package.dependencies]
-lazy-object-proxy = ">=1.4.0"
-typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""}
-wrapt = [
- {version = ">=1.11,<2", markers = "python_version < \"3.11\""},
- {version = ">=1.14,<2", markers = "python_version >= \"3.11\""},
-]
-
-[[package]]
-name = "attrs"
-version = "22.2.0"
-description = "Classes Without Boilerplate"
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"},
- {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"},
-]
-
-[package.extras]
-cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"]
-dev = ["attrs[docs,tests]"]
-docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"]
-tests = ["attrs[tests-no-zope]", "zope.interface"]
-tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"]
-
-[[package]]
-name = "babel"
-version = "2.12.1"
-description = "Internationalization utilities"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"},
- {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"},
-]
-
-[package.dependencies]
-pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""}
-
-[[package]]
-name = "black"
-version = "22.12.0"
-description = "The uncompromising code formatter."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"},
- {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"},
- {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"},
- {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"},
- {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"},
- {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"},
- {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"},
- {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"},
- {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"},
- {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"},
- {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"},
- {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"},
-]
-
-[package.dependencies]
-click = ">=8.0.0"
-mypy-extensions = ">=0.4.3"
-pathspec = ">=0.9.0"
-platformdirs = ">=2"
-tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""}
-typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
-
-[package.extras]
-colorama = ["colorama (>=0.4.3)"]
-d = ["aiohttp (>=3.7.4)"]
-jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
-uvloop = ["uvloop (>=0.15.2)"]
-
-[[package]]
-name = "certifi"
-version = "2022.12.7"
-description = "Python package for providing Mozilla's CA Bundle."
-category = "main"
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"},
- {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"},
-]
-
-[[package]]
-name = "charset-normalizer"
-version = "3.1.0"
-description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
-category = "main"
-optional = false
-python-versions = ">=3.7.0"
-files = [
- {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"},
- {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"},
-]
-
-[[package]]
-name = "click"
-version = "8.1.3"
-description = "Composable command line interface toolkit"
-category = "main"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
- {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
-]
-
-[package.dependencies]
-colorama = {version = "*", markers = "platform_system == \"Windows\""}
-
-[[package]]
-name = "colorama"
-version = "0.4.6"
-description = "Cross-platform colored terminal text."
-category = "main"
-optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
-files = [
- {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
- {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
-]
-
-[[package]]
-name = "coverage"
-version = "7.2.2"
-description = "Code coverage measurement for Python"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "coverage-7.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c90e73bdecb7b0d1cea65a08cb41e9d672ac6d7995603d6465ed4914b98b9ad7"},
- {file = "coverage-7.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e2926b8abedf750c2ecf5035c07515770944acf02e1c46ab08f6348d24c5f94d"},
- {file = "coverage-7.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57b77b9099f172804e695a40ebaa374f79e4fb8b92f3e167f66facbf92e8e7f5"},
- {file = "coverage-7.2.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:efe1c0adad110bf0ad7fb59f833880e489a61e39d699d37249bdf42f80590169"},
- {file = "coverage-7.2.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2199988e0bc8325d941b209f4fd1c6fa007024b1442c5576f1a32ca2e48941e6"},
- {file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:81f63e0fb74effd5be736cfe07d710307cc0a3ccb8f4741f7f053c057615a137"},
- {file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:186e0fc9cf497365036d51d4d2ab76113fb74f729bd25da0975daab2e107fd90"},
- {file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:420f94a35e3e00a2b43ad5740f935358e24478354ce41c99407cddd283be00d2"},
- {file = "coverage-7.2.2-cp310-cp310-win32.whl", hash = "sha256:38004671848b5745bb05d4d621526fca30cee164db42a1f185615f39dc997292"},
- {file = "coverage-7.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:0ce383d5f56d0729d2dd40e53fe3afeb8f2237244b0975e1427bfb2cf0d32bab"},
- {file = "coverage-7.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3eb55b7b26389dd4f8ae911ba9bc8c027411163839dea4c8b8be54c4ee9ae10b"},
- {file = "coverage-7.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d2b96123a453a2d7f3995ddb9f28d01fd112319a7a4d5ca99796a7ff43f02af5"},
- {file = "coverage-7.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:299bc75cb2a41e6741b5e470b8c9fb78d931edbd0cd009c58e5c84de57c06731"},
- {file = "coverage-7.2.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e1df45c23d4230e3d56d04414f9057eba501f78db60d4eeecfcb940501b08fd"},
- {file = "coverage-7.2.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:006ed5582e9cbc8115d2e22d6d2144a0725db542f654d9d4fda86793832f873d"},
- {file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d683d230b5774816e7d784d7ed8444f2a40e7a450e5720d58af593cb0b94a212"},
- {file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8efb48fa743d1c1a65ee8787b5b552681610f06c40a40b7ef94a5b517d885c54"},
- {file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c752d5264053a7cf2fe81c9e14f8a4fb261370a7bb344c2a011836a96fb3f57"},
- {file = "coverage-7.2.2-cp311-cp311-win32.whl", hash = "sha256:55272f33da9a5d7cccd3774aeca7a01e500a614eaea2a77091e9be000ecd401d"},
- {file = "coverage-7.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:92ebc1619650409da324d001b3a36f14f63644c7f0a588e331f3b0f67491f512"},
- {file = "coverage-7.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5afdad4cc4cc199fdf3e18088812edcf8f4c5a3c8e6cb69127513ad4cb7471a9"},
- {file = "coverage-7.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0484d9dd1e6f481b24070c87561c8d7151bdd8b044c93ac99faafd01f695c78e"},
- {file = "coverage-7.2.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d530191aa9c66ab4f190be8ac8cc7cfd8f4f3217da379606f3dd4e3d83feba69"},
- {file = "coverage-7.2.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ac0f522c3b6109c4b764ffec71bf04ebc0523e926ca7cbe6c5ac88f84faced0"},
- {file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ba279aae162b20444881fc3ed4e4f934c1cf8620f3dab3b531480cf602c76b7f"},
- {file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:53d0fd4c17175aded9c633e319360d41a1f3c6e352ba94edcb0fa5167e2bad67"},
- {file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c99cb7c26a3039a8a4ee3ca1efdde471e61b4837108847fb7d5be7789ed8fd9"},
- {file = "coverage-7.2.2-cp37-cp37m-win32.whl", hash = "sha256:5cc0783844c84af2522e3a99b9b761a979a3ef10fb87fc4048d1ee174e18a7d8"},
- {file = "coverage-7.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:817295f06eacdc8623dc4df7d8b49cea65925030d4e1e2a7c7218380c0072c25"},
- {file = "coverage-7.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6146910231ece63facfc5984234ad1b06a36cecc9fd0c028e59ac7c9b18c38c6"},
- {file = "coverage-7.2.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:387fb46cb8e53ba7304d80aadca5dca84a2fbf6fe3faf6951d8cf2d46485d1e5"},
- {file = "coverage-7.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:046936ab032a2810dcaafd39cc4ef6dd295df1a7cbead08fe996d4765fca9fe4"},
- {file = "coverage-7.2.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e627dee428a176ffb13697a2c4318d3f60b2ccdde3acdc9b3f304206ec130ccd"},
- {file = "coverage-7.2.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fa54fb483decc45f94011898727802309a109d89446a3c76387d016057d2c84"},
- {file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3668291b50b69a0c1ef9f462c7df2c235da3c4073f49543b01e7eb1dee7dd540"},
- {file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7c20b731211261dc9739bbe080c579a1835b0c2d9b274e5fcd903c3a7821cf88"},
- {file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5764e1f7471cb8f64b8cda0554f3d4c4085ae4b417bfeab236799863703e5de2"},
- {file = "coverage-7.2.2-cp38-cp38-win32.whl", hash = "sha256:4f01911c010122f49a3e9bdc730eccc66f9b72bd410a3a9d3cb8448bb50d65d3"},
- {file = "coverage-7.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:c448b5c9e3df5448a362208b8d4b9ed85305528313fca1b479f14f9fe0d873b8"},
- {file = "coverage-7.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bfe7085783cda55e53510482fa7b5efc761fad1abe4d653b32710eb548ebdd2d"},
- {file = "coverage-7.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9d22e94e6dc86de981b1b684b342bec5e331401599ce652900ec59db52940005"},
- {file = "coverage-7.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:507e4720791977934bba016101579b8c500fb21c5fa3cd4cf256477331ddd988"},
- {file = "coverage-7.2.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc4803779f0e4b06a2361f666e76f5c2e3715e8e379889d02251ec911befd149"},
- {file = "coverage-7.2.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db8c2c5ace167fd25ab5dd732714c51d4633f58bac21fb0ff63b0349f62755a8"},
- {file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4f68ee32d7c4164f1e2c8797535a6d0a3733355f5861e0f667e37df2d4b07140"},
- {file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d52f0a114b6a58305b11a5cdecd42b2e7f1ec77eb20e2b33969d702feafdd016"},
- {file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:797aad79e7b6182cb49c08cc5d2f7aa7b2128133b0926060d0a8889ac43843be"},
- {file = "coverage-7.2.2-cp39-cp39-win32.whl", hash = "sha256:db45eec1dfccdadb179b0f9ca616872c6f700d23945ecc8f21bb105d74b1c5fc"},
- {file = "coverage-7.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:8dbe2647bf58d2c5a6c5bcc685f23b5f371909a5624e9f5cd51436d6a9f6c6ef"},
- {file = "coverage-7.2.2-pp37.pp38.pp39-none-any.whl", hash = "sha256:872d6ce1f5be73f05bea4df498c140b9e7ee5418bfa2cc8204e7f9b817caa968"},
- {file = "coverage-7.2.2.tar.gz", hash = "sha256:36dd42da34fe94ed98c39887b86db9d06777b1c8f860520e21126a75507024f2"},
-]
-
-[package.extras]
-toml = ["tomli"]
-
-[[package]]
-name = "dill"
-version = "0.3.6"
-description = "serialize all of python"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"},
- {file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"},
-]
-
-[package.extras]
-graph = ["objgraph (>=1.7.2)"]
-
-[[package]]
-name = "docker"
-version = "6.0.1"
-description = "A Python library for the Docker Engine API."
-category = "main"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "docker-6.0.1-py3-none-any.whl", hash = "sha256:dbcb3bd2fa80dca0788ed908218bf43972772009b881ed1e20dfc29a65e49782"},
- {file = "docker-6.0.1.tar.gz", hash = "sha256:896c4282e5c7af5c45e8b683b0b0c33932974fe6e50fc6906a0a83616ab3da97"},
-]
-
-[package.dependencies]
-packaging = ">=14.0"
-pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""}
-requests = ">=2.26.0"
-urllib3 = ">=1.26.0"
-websocket-client = ">=0.32.0"
-
-[package.extras]
-ssh = ["paramiko (>=2.4.3)"]
-
-[[package]]
-name = "docutils"
-version = "0.18.1"
-description = "Docutils -- Python Documentation Utilities"
-category = "dev"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
-files = [
- {file = "docutils-0.18.1-py2.py3-none-any.whl", hash = "sha256:23010f129180089fbcd3bc08cfefccb3b890b0050e1ca00c867036e9d161b98c"},
- {file = "docutils-0.18.1.tar.gz", hash = "sha256:679987caf361a7539d76e584cbeddc311e3aee937877c87346f31debc63e9d06"},
-]
-
-[[package]]
-name = "exceptiongroup"
-version = "1.1.1"
-description = "Backport of PEP 654 (exception groups)"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"},
- {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"},
-]
-
-[package.extras]
-test = ["pytest (>=6)"]
-
-[[package]]
-name = "gprof2dot"
-version = "2022.7.29"
-description = "Generate a dot graph from the output of several profilers."
-category = "dev"
-optional = false
-python-versions = ">=2.7"
-files = [
- {file = "gprof2dot-2022.7.29-py2.py3-none-any.whl", hash = "sha256:f165b3851d3c52ee4915eb1bd6cca571e5759823c2cd0f71a79bda93c2dc85d6"},
- {file = "gprof2dot-2022.7.29.tar.gz", hash = "sha256:45b4d298bd36608fccf9511c3fd88a773f7a1abc04d6cd39445b11ba43133ec5"},
-]
-
-[[package]]
-name = "hypothesis"
-version = "6.70.0"
-description = "A library for property-based testing"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "hypothesis-6.70.0-py3-none-any.whl", hash = "sha256:be395f71d6337a5e8ed2f695c568360a686056c3b00c98bd818874c674b24586"},
- {file = "hypothesis-6.70.0.tar.gz", hash = "sha256:f5cae09417d0ffc7711f602cdcfa3b7baf344597a672a84658186605b04f4a4f"},
-]
-
-[package.dependencies]
-attrs = ">=19.2.0"
-exceptiongroup = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
-sortedcontainers = ">=2.1.0,<3.0.0"
-
-[package.extras]
-all = ["backports.zoneinfo (>=0.2.1)", "black (>=19.10b0)", "click (>=7.0)", "django (>=3.2)", "dpcontracts (>=0.4)", "importlib-metadata (>=3.6)", "lark (>=0.10.1)", "libcst (>=0.3.16)", "numpy (>=1.9.0)", "pandas (>=1.0)", "pytest (>=4.6)", "python-dateutil (>=1.4)", "pytz (>=2014.1)", "redis (>=3.0.0)", "rich (>=9.0.0)", "tzdata (>=2022.7)"]
-cli = ["black (>=19.10b0)", "click (>=7.0)", "rich (>=9.0.0)"]
-codemods = ["libcst (>=0.3.16)"]
-dateutil = ["python-dateutil (>=1.4)"]
-django = ["django (>=3.2)"]
-dpcontracts = ["dpcontracts (>=0.4)"]
-ghostwriter = ["black (>=19.10b0)"]
-lark = ["lark (>=0.10.1)"]
-numpy = ["numpy (>=1.9.0)"]
-pandas = ["pandas (>=1.0)"]
-pytest = ["pytest (>=4.6)"]
-pytz = ["pytz (>=2014.1)"]
-redis = ["redis (>=3.0.0)"]
-zoneinfo = ["backports.zoneinfo (>=0.2.1)", "tzdata (>=2022.7)"]
-
-[[package]]
-name = "idna"
-version = "3.4"
-description = "Internationalized Domain Names in Applications (IDNA)"
-category = "main"
-optional = false
-python-versions = ">=3.5"
-files = [
- {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
- {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
-]
-
-[[package]]
-name = "imagesize"
-version = "1.4.1"
-description = "Getting image size from png/jpeg/jpeg2000/gif file"
-category = "dev"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-files = [
- {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"},
- {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"},
-]
-
-[[package]]
-name = "importlib-metadata"
-version = "6.1.0"
-description = "Read metadata from Python packages"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "importlib_metadata-6.1.0-py3-none-any.whl", hash = "sha256:ff80f3b5394912eb1b108fcfd444dc78b7f1f3e16b16188054bd01cb9cb86f09"},
- {file = "importlib_metadata-6.1.0.tar.gz", hash = "sha256:43ce9281e097583d758c2c708c4376371261a02c34682491a8e98352365aad20"},
-]
-
-[package.dependencies]
-zipp = ">=0.5"
-
-[package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
-perf = ["ipython"]
-testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"]
-
-[[package]]
-name = "iniconfig"
-version = "2.0.0"
-description = "brain-dead simple config-ini parsing"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
- {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
-]
-
-[[package]]
-name = "isort"
-version = "5.12.0"
-description = "A Python utility / library to sort Python imports."
-category = "dev"
-optional = false
-python-versions = ">=3.8.0"
-files = [
- {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"},
- {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"},
-]
-
-[package.extras]
-colors = ["colorama (>=0.4.3)"]
-pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"]
-plugins = ["setuptools"]
-requirements-deprecated-finder = ["pip-api", "pipreqs"]
-
-[[package]]
-name = "jinja2"
-version = "3.1.2"
-description = "A very fast and expressive template engine."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"},
- {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"},
-]
-
-[package.dependencies]
-MarkupSafe = ">=2.0"
-
-[package.extras]
-i18n = ["Babel (>=2.7)"]
-
-[[package]]
-name = "lazy-object-proxy"
-version = "1.9.0"
-description = "A fast and thorough lazy object proxy."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"},
- {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"},
- {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"},
- {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"},
- {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"},
- {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"},
- {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"},
- {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"},
- {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"},
- {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"},
- {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"},
- {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"},
- {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"},
- {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"},
- {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"},
- {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"},
- {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"},
- {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"},
- {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"},
- {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"},
- {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"},
- {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"},
- {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"},
- {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"},
- {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"},
- {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"},
- {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"},
- {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"},
- {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"},
- {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"},
- {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"},
- {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"},
- {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"},
- {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"},
- {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"},
- {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"},
-]
-
-[[package]]
-name = "markdown-it-py"
-version = "2.2.0"
-description = "Python port of markdown-it. Markdown parsing, done right!"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1"},
- {file = "markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30"},
-]
-
-[package.dependencies]
-mdurl = ">=0.1,<1.0"
-
-[package.extras]
-benchmarking = ["psutil", "pytest", "pytest-benchmark"]
-code-style = ["pre-commit (>=3.0,<4.0)"]
-compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"]
-linkify = ["linkify-it-py (>=1,<3)"]
-plugins = ["mdit-py-plugins"]
-profiling = ["gprof2dot"]
-rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
-testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
-
-[[package]]
-name = "markupsafe"
-version = "2.1.2"
-description = "Safely add untrusted strings to HTML/XML markup."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"},
- {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"},
- {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"},
- {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"},
- {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"},
- {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"},
- {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"},
- {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"},
- {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"},
- {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"},
- {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"},
- {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"},
- {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"},
- {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"},
- {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"},
- {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"},
- {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"},
- {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"},
- {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"},
- {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"},
- {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"},
- {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"},
- {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"},
- {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"},
- {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"},
- {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"},
- {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"},
- {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"},
- {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"},
- {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"},
- {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"},
- {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"},
- {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"},
- {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"},
- {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"},
- {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"},
- {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"},
- {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"},
- {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"},
- {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"},
- {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"},
- {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"},
- {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"},
- {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"},
- {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"},
- {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"},
- {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"},
- {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"},
- {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"},
- {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"},
-]
-
-[[package]]
-name = "mccabe"
-version = "0.7.0"
-description = "McCabe checker, plugin for flake8"
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
- {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
-]
-
-[[package]]
-name = "mdurl"
-version = "0.1.2"
-description = "Markdown URL utilities"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
- {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
-]
-
-[[package]]
-name = "mypy-extensions"
-version = "1.0.0"
-description = "Type system extensions for programs checked with the mypy type checker."
-category = "dev"
-optional = false
-python-versions = ">=3.5"
-files = [
- {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
- {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
-]
-
-[[package]]
-name = "packaging"
-version = "23.0"
-description = "Core utilities for Python packages"
-category = "main"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"},
- {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"},
-]
-
-[[package]]
-name = "pathspec"
-version = "0.11.1"
-description = "Utility library for gitignore style pattern matching of file paths."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"},
- {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"},
-]
-
-[[package]]
-name = "pendulum"
-version = "2.1.2"
-description = "Python datetimes made easy"
-category = "main"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
-files = [
- {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"},
- {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"},
- {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"},
- {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"},
- {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"},
- {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"},
- {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"},
- {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"},
- {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"},
- {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"},
- {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"},
- {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"},
- {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"},
- {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"},
- {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"},
- {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"},
- {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"},
- {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"},
- {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"},
- {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"},
- {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"},
-]
-
-[package.dependencies]
-python-dateutil = ">=2.6,<3.0"
-pytzdata = ">=2020.1"
-
-[[package]]
-name = "platformdirs"
-version = "3.1.1"
-description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "platformdirs-3.1.1-py3-none-any.whl", hash = "sha256:e5986afb596e4bb5bde29a79ac9061aa955b94fca2399b7aaac4090860920dd8"},
- {file = "platformdirs-3.1.1.tar.gz", hash = "sha256:024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa"},
-]
-
-[package.extras]
-docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"]
-test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
-
-[[package]]
-name = "pluggy"
-version = "1.0.0"
-description = "plugin and hook calling mechanisms for python"
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
- {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
-]
-
-[package.extras]
-dev = ["pre-commit", "tox"]
-testing = ["pytest", "pytest-benchmark"]
-
-[[package]]
-name = "pygments"
-version = "2.14.0"
-description = "Pygments is a syntax highlighting package written in Python."
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"},
- {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"},
-]
-
-[package.extras]
-plugins = ["importlib-metadata"]
-
-[[package]]
-name = "pylint"
-version = "2.17.1"
-description = "python code static checker"
-category = "dev"
-optional = false
-python-versions = ">=3.7.2"
-files = [
- {file = "pylint-2.17.1-py3-none-any.whl", hash = "sha256:8660a54e3f696243d644fca98f79013a959c03f979992c1ab59c24d3f4ec2700"},
- {file = "pylint-2.17.1.tar.gz", hash = "sha256:d4d009b0116e16845533bc2163493d6681846ac725eab8ca8014afb520178ddd"},
-]
-
-[package.dependencies]
-astroid = ">=2.15.0,<=2.17.0-dev0"
-colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
-dill = [
- {version = ">=0.2", markers = "python_version < \"3.11\""},
- {version = ">=0.3.6", markers = "python_version >= \"3.11\""},
-]
-isort = ">=4.2.5,<6"
-mccabe = ">=0.6,<0.8"
-platformdirs = ">=2.2.0"
-tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
-tomlkit = ">=0.10.1"
-typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""}
-
-[package.extras]
-spelling = ["pyenchant (>=3.2,<4.0)"]
-testutils = ["gitpython (>3)"]
-
-[[package]]
-name = "pytest"
-version = "7.2.2"
-description = "pytest: simple powerful testing with Python"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "pytest-7.2.2-py3-none-any.whl", hash = "sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e"},
- {file = "pytest-7.2.2.tar.gz", hash = "sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4"},
-]
-
-[package.dependencies]
-attrs = ">=19.2.0"
-colorama = {version = "*", markers = "sys_platform == \"win32\""}
-exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
-iniconfig = "*"
-packaging = "*"
-pluggy = ">=0.12,<2.0"
-tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
-
-[package.extras]
-testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
-
-[[package]]
-name = "pytest-profiling"
-version = "1.7.0"
-description = "Profiling plugin for py.test"
-category = "dev"
-optional = false
-python-versions = "*"
-files = [
- {file = "pytest-profiling-1.7.0.tar.gz", hash = "sha256:93938f147662225d2b8bd5af89587b979652426a8a6ffd7e73ec4a23e24b7f29"},
- {file = "pytest_profiling-1.7.0-py2.py3-none-any.whl", hash = "sha256:999cc9ac94f2e528e3f5d43465da277429984a1c237ae9818f8cfd0b06acb019"},
-]
-
-[package.dependencies]
-gprof2dot = "*"
-pytest = "*"
-six = "*"
-
-[package.extras]
-tests = ["pytest-virtualenv"]
-
-[[package]]
-name = "python-dateutil"
-version = "2.8.2"
-description = "Extensions to the standard Python datetime module"
-category = "main"
-optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
-files = [
- {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
- {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
-]
-
-[package.dependencies]
-six = ">=1.5"
-
-[[package]]
-name = "pytz"
-version = "2022.7.1"
-description = "World timezone definitions, modern and historical"
-category = "dev"
-optional = false
-python-versions = "*"
-files = [
- {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"},
- {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"},
-]
-
-[[package]]
-name = "pytzdata"
-version = "2020.1"
-description = "The Olson timezone database for Python."
-category = "main"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-files = [
- {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"},
- {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"},
-]
-
-[[package]]
-name = "pywin32"
-version = "305"
-description = "Python for Window Extensions"
-category = "main"
-optional = false
-python-versions = "*"
-files = [
- {file = "pywin32-305-cp310-cp310-win32.whl", hash = "sha256:421f6cd86e84bbb696d54563c48014b12a23ef95a14e0bdba526be756d89f116"},
- {file = "pywin32-305-cp310-cp310-win_amd64.whl", hash = "sha256:73e819c6bed89f44ff1d690498c0a811948f73777e5f97c494c152b850fad478"},
- {file = "pywin32-305-cp310-cp310-win_arm64.whl", hash = "sha256:742eb905ce2187133a29365b428e6c3b9001d79accdc30aa8969afba1d8470f4"},
- {file = "pywin32-305-cp311-cp311-win32.whl", hash = "sha256:19ca459cd2e66c0e2cc9a09d589f71d827f26d47fe4a9d09175f6aa0256b51c2"},
- {file = "pywin32-305-cp311-cp311-win_amd64.whl", hash = "sha256:326f42ab4cfff56e77e3e595aeaf6c216712bbdd91e464d167c6434b28d65990"},
- {file = "pywin32-305-cp311-cp311-win_arm64.whl", hash = "sha256:4ecd404b2c6eceaca52f8b2e3e91b2187850a1ad3f8b746d0796a98b4cea04db"},
- {file = "pywin32-305-cp36-cp36m-win32.whl", hash = "sha256:48d8b1659284f3c17b68587af047d110d8c44837736b8932c034091683e05863"},
- {file = "pywin32-305-cp36-cp36m-win_amd64.whl", hash = "sha256:13362cc5aa93c2beaf489c9c9017c793722aeb56d3e5166dadd5ef82da021fe1"},
- {file = "pywin32-305-cp37-cp37m-win32.whl", hash = "sha256:a55db448124d1c1484df22fa8bbcbc45c64da5e6eae74ab095b9ea62e6d00496"},
- {file = "pywin32-305-cp37-cp37m-win_amd64.whl", hash = "sha256:109f98980bfb27e78f4df8a51a8198e10b0f347257d1e265bb1a32993d0c973d"},
- {file = "pywin32-305-cp38-cp38-win32.whl", hash = "sha256:9dd98384da775afa009bc04863426cb30596fd78c6f8e4e2e5bbf4edf8029504"},
- {file = "pywin32-305-cp38-cp38-win_amd64.whl", hash = "sha256:56d7a9c6e1a6835f521788f53b5af7912090674bb84ef5611663ee1595860fc7"},
- {file = "pywin32-305-cp39-cp39-win32.whl", hash = "sha256:9d968c677ac4d5cbdaa62fd3014ab241718e619d8e36ef8e11fb930515a1e918"},
- {file = "pywin32-305-cp39-cp39-win_amd64.whl", hash = "sha256:50768c6b7c3f0b38b7fb14dd4104da93ebced5f1a50dc0e834594bff6fbe1271"},
-]
-
-[[package]]
-name = "requests"
-version = "2.28.2"
-description = "Python HTTP for Humans."
-category = "main"
-optional = false
-python-versions = ">=3.7, <4"
-files = [
- {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"},
- {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"},
-]
-
-[package.dependencies]
-certifi = ">=2017.4.17"
-charset-normalizer = ">=2,<4"
-idna = ">=2.5,<4"
-urllib3 = ">=1.21.1,<1.27"
-
-[package.extras]
-socks = ["PySocks (>=1.5.6,!=1.5.7)"]
-use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
-
-[[package]]
-name = "rich"
-version = "13.3.2"
-description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
-category = "dev"
-optional = false
-python-versions = ">=3.7.0"
-files = [
- {file = "rich-13.3.2-py3-none-any.whl", hash = "sha256:a104f37270bf677148d8acb07d33be1569eeee87e2d1beb286a4e9113caf6f2f"},
- {file = "rich-13.3.2.tar.gz", hash = "sha256:91954fe80cfb7985727a467ca98a7618e5dd15178cc2da10f553b36a93859001"},
-]
-
-[package.dependencies]
-markdown-it-py = ">=2.2.0,<3.0.0"
-pygments = ">=2.13.0,<3.0.0"
-typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""}
-
-[package.extras]
-jupyter = ["ipywidgets (>=7.5.1,<9)"]
-
-[[package]]
-name = "six"
-version = "1.16.0"
-description = "Python 2 and 3 compatibility utilities"
-category = "main"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
-files = [
- {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
- {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
-]
-
-[[package]]
-name = "snowballstemmer"
-version = "2.2.0"
-description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms."
-category = "dev"
-optional = false
-python-versions = "*"
-files = [
- {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
- {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
-]
-
-[[package]]
-name = "sortedcontainers"
-version = "2.4.0"
-description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set"
-category = "dev"
-optional = false
-python-versions = "*"
-files = [
- {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"},
- {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"},
-]
-
-[[package]]
-name = "sphinx"
-version = "5.3.0"
-description = "Python documentation generator"
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"},
- {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"},
-]
-
-[package.dependencies]
-alabaster = ">=0.7,<0.8"
-babel = ">=2.9"
-colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
-docutils = ">=0.14,<0.20"
-imagesize = ">=1.3"
-importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""}
-Jinja2 = ">=3.0"
-packaging = ">=21.0"
-Pygments = ">=2.12"
-requests = ">=2.5.0"
-snowballstemmer = ">=2.0"
-sphinxcontrib-applehelp = "*"
-sphinxcontrib-devhelp = "*"
-sphinxcontrib-htmlhelp = ">=2.0.0"
-sphinxcontrib-jsmath = "*"
-sphinxcontrib-qthelp = "*"
-sphinxcontrib-serializinghtml = ">=1.1.5"
-
-[package.extras]
-docs = ["sphinxcontrib-websupport"]
-lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"]
-test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"]
-
-[[package]]
-name = "sphinx-rtd-theme"
-version = "1.2.0"
-description = "Read the Docs theme for Sphinx"
-category = "dev"
-optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
-files = [
- {file = "sphinx_rtd_theme-1.2.0-py2.py3-none-any.whl", hash = "sha256:f823f7e71890abe0ac6aaa6013361ea2696fc8d3e1fa798f463e82bdb77eeff2"},
- {file = "sphinx_rtd_theme-1.2.0.tar.gz", hash = "sha256:a0d8bd1a2ed52e0b338cbe19c4b2eef3c5e7a048769753dac6a9f059c7b641b8"},
-]
-
-[package.dependencies]
-docutils = "<0.19"
-sphinx = ">=1.6,<7"
-sphinxcontrib-jquery = {version = ">=2.0.0,<3.0.0 || >3.0.0", markers = "python_version > \"3\""}
-
-[package.extras]
-dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"]
-
-[[package]]
-name = "sphinxcontrib-applehelp"
-version = "1.0.4"
-description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books"
-category = "dev"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"},
- {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"},
-]
-
-[package.extras]
-lint = ["docutils-stubs", "flake8", "mypy"]
-test = ["pytest"]
-
-[[package]]
-name = "sphinxcontrib-devhelp"
-version = "1.0.2"
-description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document."
-category = "dev"
-optional = false
-python-versions = ">=3.5"
-files = [
- {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"},
- {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"},
-]
-
-[package.extras]
-lint = ["docutils-stubs", "flake8", "mypy"]
-test = ["pytest"]
-
-[[package]]
-name = "sphinxcontrib-htmlhelp"
-version = "2.0.1"
-description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files"
-category = "dev"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"},
- {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"},
-]
-
-[package.extras]
-lint = ["docutils-stubs", "flake8", "mypy"]
-test = ["html5lib", "pytest"]
-
-[[package]]
-name = "sphinxcontrib-jquery"
-version = "4.1"
-description = "Extension to include jQuery on newer Sphinx releases"
-category = "dev"
-optional = false
-python-versions = ">=2.7"
-files = [
- {file = "sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a"},
- {file = "sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae"},
-]
-
-[package.dependencies]
-Sphinx = ">=1.8"
-
-[[package]]
-name = "sphinxcontrib-jsmath"
-version = "1.0.1"
-description = "A sphinx extension which renders display math in HTML via JavaScript"
-category = "dev"
-optional = false
-python-versions = ">=3.5"
-files = [
- {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"},
- {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"},
-]
-
-[package.extras]
-test = ["flake8", "mypy", "pytest"]
-
-[[package]]
-name = "sphinxcontrib-mermaid"
-version = "0.8.1"
-description = "Mermaid diagrams in yours Sphinx powered docs"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "sphinxcontrib-mermaid-0.8.1.tar.gz", hash = "sha256:fa3e5325d4ba395336e6137d113f55026b1a03ccd115dc54113d1d871a580466"},
- {file = "sphinxcontrib_mermaid-0.8.1-py3-none-any.whl", hash = "sha256:15491c24ec78cf1626b1e79e797a9ce87cb7959cf38f955eb72dd5512aeb6ce9"},
-]
-
-[[package]]
-name = "sphinxcontrib-qthelp"
-version = "1.0.3"
-description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document."
-category = "dev"
-optional = false
-python-versions = ">=3.5"
-files = [
- {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"},
- {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"},
-]
-
-[package.extras]
-lint = ["docutils-stubs", "flake8", "mypy"]
-test = ["pytest"]
-
-[[package]]
-name = "sphinxcontrib-serializinghtml"
-version = "1.1.5"
-description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)."
-category = "dev"
-optional = false
-python-versions = ">=3.5"
-files = [
- {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"},
- {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"},
-]
-
-[package.extras]
-lint = ["docutils-stubs", "flake8", "mypy"]
-test = ["pytest"]
-
-[[package]]
-name = "structlog"
-version = "22.3.0"
-description = "Structured Logging for Python"
-category = "main"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "structlog-22.3.0-py3-none-any.whl", hash = "sha256:b403f344f902b220648fa9f286a23c0cc5439a5844d271fec40562dbadbc70ad"},
- {file = "structlog-22.3.0.tar.gz", hash = "sha256:e7509391f215e4afb88b1b80fa3ea074be57a5a17d794bd436a5c949da023333"},
-]
-
-[package.extras]
-dev = ["structlog[docs,tests,typing]"]
-docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-mermaid", "twisted"]
-tests = ["coverage[toml]", "freezegun (>=0.2.8)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "simplejson"]
-typing = ["mypy", "rich", "twisted"]
-
-[[package]]
-name = "tomli"
-version = "2.0.1"
-description = "A lil' TOML parser"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
- {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
-]
-
-[[package]]
-name = "tomlkit"
-version = "0.11.6"
-description = "Style preserving TOML library"
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "tomlkit-0.11.6-py3-none-any.whl", hash = "sha256:07de26b0d8cfc18f871aec595fda24d95b08fef89d147caa861939f37230bf4b"},
- {file = "tomlkit-0.11.6.tar.gz", hash = "sha256:71b952e5721688937fb02cf9d354dbcf0785066149d2855e44531ebdd2b65d73"},
-]
-
-[[package]]
-name = "tqdm"
-version = "4.65.0"
-description = "Fast, Extensible Progress Meter"
-category = "main"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "tqdm-4.65.0-py3-none-any.whl", hash = "sha256:c4f53a17fe37e132815abceec022631be8ffe1b9381c2e6e30aa70edc99e9671"},
- {file = "tqdm-4.65.0.tar.gz", hash = "sha256:1871fb68a86b8fb3b59ca4cdd3dcccbc7e6d613eeed31f4c332531977b89beb5"},
-]
-
-[package.dependencies]
-colorama = {version = "*", markers = "platform_system == \"Windows\""}
-
-[package.extras]
-dev = ["py-make (>=0.1.0)", "twine", "wheel"]
-notebook = ["ipywidgets (>=6)"]
-slack = ["slack-sdk"]
-telegram = ["requests"]
-
-[[package]]
-name = "typing-extensions"
-version = "4.5.0"
-description = "Backported and Experimental Type Hints for Python 3.7+"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"},
- {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"},
-]
-
-[[package]]
-name = "urllib3"
-version = "1.26.15"
-description = "HTTP library with thread-safe connection pooling, file post, and more."
-category = "main"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
-files = [
- {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"},
- {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"},
-]
-
-[package.extras]
-brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
-secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
-socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
-
-[[package]]
-name = "websocket-client"
-version = "1.5.1"
-description = "WebSocket client for Python with low level API options"
-category = "main"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "websocket-client-1.5.1.tar.gz", hash = "sha256:3f09e6d8230892547132177f575a4e3e73cfdf06526e20cc02aa1c3b47184d40"},
- {file = "websocket_client-1.5.1-py3-none-any.whl", hash = "sha256:cdf5877568b7e83aa7cf2244ab56a3213de587bbe0ce9d8b9600fc77b455d89e"},
-]
-
-[package.extras]
-docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"]
-optional = ["python-socks", "wsaccel"]
-test = ["websockets"]
-
-[[package]]
-name = "wrapt"
-version = "1.15.0"
-description = "Module for decorators, wrappers and monkey patching."
-category = "dev"
-optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
-files = [
- {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"},
- {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"},
- {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"},
- {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"},
- {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"},
- {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"},
- {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"},
- {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"},
- {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"},
- {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"},
- {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"},
- {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"},
- {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"},
- {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"},
- {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"},
- {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"},
- {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"},
- {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"},
- {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"},
- {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"},
- {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"},
- {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"},
- {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"},
- {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"},
- {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"},
- {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"},
- {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"},
- {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"},
- {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"},
- {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"},
- {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"},
- {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"},
- {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"},
- {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"},
- {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"},
- {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"},
- {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"},
- {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"},
- {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"},
- {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"},
- {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"},
- {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"},
- {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"},
- {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"},
- {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"},
- {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"},
- {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"},
- {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"},
- {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"},
- {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"},
- {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"},
- {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"},
- {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"},
- {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"},
- {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"},
- {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"},
- {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"},
- {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"},
- {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"},
- {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"},
- {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"},
- {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"},
- {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"},
- {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"},
- {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"},
- {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"},
- {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"},
- {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"},
- {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"},
- {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"},
- {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"},
- {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"},
- {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"},
- {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"},
- {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"},
-]
-
-[[package]]
-name = "zipp"
-version = "3.15.0"
-description = "Backport of pathlib-compatible object wrapper for zip files"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"},
- {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"},
-]
-
-[package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
-testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
-
-[metadata]
-lock-version = "2.0"
-python-versions = "^3.8"
-content-hash = "7433dfdb5a52ecc76bc584d9453c1c4449e7b7659bdc653e6704470ae53f537d"
diff --git a/pyproject.toml b/pyproject.toml
@@ -1,32 +0,0 @@
-[tool.poetry]
-name = "jaypore_ci"
-version = "0.2.30"
-description = ""
-authors = ["arjoonn sharma <arjoonn.94@gmail.com>"]
-homepage = "https://www.jayporeci.in/"
-documentation = "https://www.jayporeci.in/"
-
-[tool.poetry.dependencies]
-python = "^3.8"
-requests = "^2.28.1"
-click = "^8.1.3"
-pendulum = "^2.1.2"
-structlog = "^22.3.0"
-docker = "^6.0.1"
-tqdm = "^4.65.0"
-
-[tool.poetry.group.dev.dependencies]
-pylint = "^2.15.7"
-black = "^22.10.0"
-pytest = "^7.2.0"
-sphinx = "^5.3.0"
-rich = "^13.2.0"
-coverage = "^7.0.5"
-pytest-profiling = "^1.7.0"
-hypothesis = "^6.68.2"
-sphinx-rtd-theme = "^1.2.0"
-sphinxcontrib-mermaid = "^0.8.1"
-
-[build-system]
-requires = ["poetry-core>=1.0.0"]
-build-backend = "poetry.core.masonry.api"
diff --git a/secrets/.gitignore b/secrets/.gitignore
@@ -1,3 +0,0 @@
-*.key
-*.plaintext
-*.env
diff --git a/secrets/bin/age b/secrets/bin/age
Binary files differ.
diff --git a/secrets/bin/age-keygen b/secrets/bin/age-keygen
Binary files differ.
diff --git a/secrets/bin/edit_env.sh b/secrets/bin/edit_env.sh
@@ -1,28 +0,0 @@
-#!/usr/bin/env bash
-
-set -o errexit
-set -o pipefail
-
-main (){
- NAME=$1
- BIN=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
- SECRETS=$(echo "$BIN/..")
- KEY_FILE=$(echo "$SECRETS/$NAME.key")
- ENC_FILE=$(echo "$SECRETS/$NAME.enc")
- PLAINTEXT_FILE=$(echo "$SECRETS/$NAME.plaintext")
- export SOPS_AGE_KEY_FILE=$KEY_FILE
- echo "BIN = $BIN"
- echo "SECRETS = $SECRETS"
- echo "KEY = $KEY_FILE"
- echo "SOPS KEY = $SOPS_AGE_KEY_FILE"
- echo "ENC = $ENC_FILE"
- echo "PLAIN = $PLAINTEXT_FILE"
- PATH="$BIN:$PATH"
- if [[ -f "$ENC_FILE" ]]; then
- sops --decrypt --input-type dotenv --output-type dotenv "$ENC_FILE" > "$PLAINTEXT_FILE"
- fi
- ${EDITOR:-nano} "$PLAINTEXT_FILE"
- sops --input-type dotenv --output-type dotenv --encrypt --age $(age-keygen -y "$KEY_FILE") "$PLAINTEXT_FILE" > "$ENC_FILE"
- rm "$PLAINTEXT_FILE"
-}
-(main $1)
diff --git a/secrets/bin/set_env.sh b/secrets/bin/set_env.sh
@@ -1,7 +0,0 @@
-#!/usr/bin/env bash
-
-BIN=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
-SECRETS=$(echo "$BIN/..")
-NAME=$1
-PATH="$BIN:$PATH"
-export $(SOPS_AGE_KEY_FILE=$SECRETS/$NAME.key sops --decrypt --input-type dotenv --output-type dotenv $SECRETS/$NAME.enc | xargs)
diff --git a/secrets/bin/sops b/secrets/bin/sops
Binary files differ.
diff --git a/secrets/ci.enc b/secrets/ci.enc
@@ -1,12 +0,0 @@
-JAYPORE_DOCKER_USER=ENC[AES256_GCM,data:/gSnrYanww==,iv:YbbR8Qm9iTPXzGHGcFZc4xPXtY6xevMKvfEk6eJlu8U=,tag:CLDG3dfo1PyODydiOB6nVQ==,type:str]
-JAYPORE_DOCKER_PWD=ENC[AES256_GCM,data:DyWhzveKJpqYzjLmB64zDKGi6JgV1QUgOji51xBgv/NKY4fa,iv:jQBAu0nxL2TebfXXSp9VcQdaklKg0mDbTDcuVXKs7Io=,tag:zLLr5yLyguvdvYfb9XlvpA==,type:str]
-JAYPORE_NETLIFY_TOKEN=ENC[AES256_GCM,data:hhWA9DznqJoKafoWn3gcer/aDUrl+IMItpnHH6kC27bT9gbV3cBAgXxvmQ==,iv:2t74vUHDZuJ+hFGY7reo8g0zoW/2HCJwgx4HNWfeI74=,tag:avaxuRztESg0IDGpNyE2pw==,type:str]
-JAYPORE_NETLIFY_SITEID=ENC[AES256_GCM,data:XkpP7xVfkm+Lg8F0YS8pIxbCWRYzAfPPq/0mobmPARcF5pas,iv:7O+W0MOvkL/tPZ0L0EAKV83wZcIkvwRicAvXWnaygfA=,tag:/vZc0tSEnXS5ryOzAVrLlw==,type:str]
-JAYPORE_PYPI_TOKEN=ENC[AES256_GCM,data:Wzu3wIfwNnr+8n+fUhKSRGQqxFhCy0lDkuF03rBC/1BPjDkhxcFWnPSi7Vn3KCukO6UcfV3Mwc/e2AaEGqhsYx1t9dLU732VcKADtrnksBtqmVDeFKyipAQX8oveIPVj6CmhA1T6UHH8DrEgDhhJ4Sh4NvXg+zRsgD0LztgNdkLW1UaCqAfP7qeCdgLhdKYijkPspcU+36H6pKaQSXg1zbIHAkjS2L89JbmWZ0Y4jRitxkJx9rdm2lsOGXHtlHYWQBU/OGg50At5tFH7qoIE6ks=,iv:f+Q5GB9+FfGBH9XnphAe28sHaVrsW5fIYIwRnBQkUpo=,tag:k2alnsZl9vmffZYwsngKIg==,type:str]
-JAYPORE_GITEA_TOKEN=ENC[AES256_GCM,data:DSjy+nvSnWwGNAVed420XX+FoqfqECWGdrLsf7EEzDbZ/Cn2FtDLmQ==,iv:tHdNbhXD2gLThSaSX2VG5cOVJGrxZ12JQ9BC/L7ntDU=,tag:TBpGcFKnCed/lm9FDf0OgQ==,type:str]
-sops_mac=ENC[AES256_GCM,data:dyEnTlv/6w0NJ1zCTMoiitQXjEKMoXKJyadg1uyVNgw/AwsHx483a251nDuMcGAsKzetMzml2CVA9FeNP9n71CdHzyKYKAy1nSUKAhdg1MJyVhW93xnvfm5Wsq4R6KRTMU0ZWz2M1nixgPssRzOZb37VWMp8uVjdMfxG8Q+DsfY=,iv:6fuVGFk8gC+dFus+m4R03RyvRzA3/R+QF33QYA+EKxY=,tag:QUhvrMdr4WZjZ85Ttj6bqQ==,type:str]
-sops_unencrypted_suffix=_unencrypted
-sops_version=3.7.3
-sops_age__list_0__map_enc=-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSAxSVlDWmk2RGFVeXJObDVW\nMmNTZ0E4OGpieXV4M3VITysrUis2YnpzRVVVCklFWTNNa3VkaFdMbDVOZGRHUHgy\nNnltNkJkZ0FmdkNMR2dmaEJVSWZoRzgKLS0tIDQ2clJoKzYxamh6NkxEOWRVd2hi\nVHE1am96M1BzT0RxeHd2V2xYSSt5OFkKQH/3MqOW+2vAJHeeERfpAc2o6i3xmQI1\nGKT1ghplWIPJIvnxmPEiqMfbzrN8Rl7kTtXQvjVAFQLU2VNBeE0prw==\n-----END AGE ENCRYPTED FILE-----\n
-sops_age__list_0__map_recipient=age1u0zd477nnqqausg4vtxl3laxz73t5tgt9qw8enmpeeadcmes3eusw3v3m9
-sops_lastmodified=2023-03-10T04:04:50Z
diff --git a/setup.sh b/setup.sh
@@ -1,151 +0,0 @@
-set -o errexit
-set -o nounset
-set -o pipefail
-
-RUNNING_IN_CI="${RUNNING_IN_CI:-no}"
-ASSUME_YES="no"
-while getopts ":y" opt; do
- case $opt in
- y)
- ASSUME_YES="yes"
- ;;
- esac
-done
-
-echo "RUNNING_IN_CI : $RUNNING_IN_CI"
-echo "ASSUME_YES : $ASSUME_YES"
-
-should_continue (){
- if [[ "$ASSUME_YES" = "yes" ]]
- then
- return 0
- fi
- # ---
- read -r -p "Continue? [Y/n] " response
- if [[ "$response" = "" ]]
- then
- return 0
- fi
- if [[ "$response" =~ ^([yY][eE][sS]|[yY])$ ]]
- then
- return 0
- fi
- return 1
-}
-
-getfile(){
- if [ "$RUNNING_IN_CI" = "yes" ]; then
- ROOT='/jaypore_ci/run'
- SOURCE=$(echo "$ROOT$1")
- if [ -f "$ROOT/cicd$1" ]; then
- SOURCE=$(echo "$ROOT/cicd$1")
- fi
- if [ -f "$ROOT/secrets$1" ]; then
- SOURCE=$(echo "$ROOT/secrets$1")
- fi
- echo "Getting file: $SOURCE $2"
- cp $SOURCE $2
- else
- wget --quiet -O $2 https://www.jayporeci.in$1
- fi
-}
-
-main (){
- REPO_ROOT=$(git rev-parse --show-toplevel)
- LOCAL_HOOK=$(echo $REPO_ROOT/.git/hooks/pre-push)
- CICD_ROOT=cicd
- echo "--------------------"
- echo "Installing JayporeCI"
- echo "--------------------"
- echo "Installing in repo: $REPO_ROOT"
- echo "Creating folder for cicd: $REPO_ROOT/$CICD_ROOT"
- # ----------------==
- if should_continue;
- then
- echo "Creating cicd.py and pre-push.sh"
- else
- exit 0
- fi
- mkdir $REPO_ROOT/$CICD_ROOT || echo 'Moving on..'
- cat > $REPO_ROOT/$CICD_ROOT/cicd.py << EOF
-from jaypore_ci import jci
-
-with jci.Pipeline() as p:
- p.job("Black", "black --check .")
-EOF
- getfile /pre-push.sh $REPO_ROOT/cicd/pre-push.sh
- getfile /Dockerfile $REPO_ROOT/cicd/Dockerfile
- chmod u+x $REPO_ROOT/cicd/pre-push.sh
- # ----------------==
- ENV_PREFIX=''
- echo "Creating 'secrets' folder for environment variables."
- if should_continue
- then
- mkdir -p secrets/bin
- PATH="$REPO_ROOT/secrets/bin:$PATH"
- BINLOC=$HOME/.local/jayporeci_bin
- echo "Downloading age/sops binaries to: $BINLOC"
- if should_continue
- then
- echo "Downloading age/ binaries"
- mkdir -p $BINLOC &> /dev/null
- getfile /bin/age $BINLOC/age &
- getfile /bin/age-keygen $BINLOC/age-keygen &
- getfile /bin/sops $BINLOC/sops &
- wait
- chmod u+x $BINLOC/age $BINLOC/age-keygen $BINLOC/sops
- fi
- echo "Adding line to .bashrc:"
- echo " \$PATH=$BINLOC:\$PATH"
- if should_continue
- then
- echo "export PATH=$BINLOC:\$PATH" >> $HOME/.bashrc
- source $HOME/.bashrc
- fi
- echo "Downloading edit/set env scripts"
- getfile /bin/edit_env.sh secrets/bin/edit_env.sh &
- getfile /bin/set_env.sh secrets/bin/set_env.sh &
- wait
- echo "Created $REPO_ROOT/secrets/bin"
- echo "Adding gitignore so that key and plaintext files are never committed"
- echo "*.key" >> .gitignore
- echo "*.plaintext" >> .gitignore
- echo "Creating new age-key at: $REPO_ROOT/secrets/ci.key"
- age-keygen > $REPO_ROOT/secrets/ci.key
- echo "You can now use (bash secrets/bin/edit_env.sh ci) to edit environment variables."
- echo "Editing secrets now"
- if [ "$RUNNING_IN_CI" = "yes" ]; then
- echo "Skip setting env file. Running in CI"
- else
- if should_continue
- then
- (bash $REPO_ROOT/secrets/bin/edit_env.sh ci)
- fi
- fi
- ENV_PREFIX='ENV=ci '
- fi
- # ----------------==
- echo "Creating git hook for pre-push"
- if test -f "$LOCAL_HOOK"; then
- if test -f "$LOCAL_HOOK.local"; then
- echo "$LOCAL_HOOK has already been moved once."
- echo $LOCAL_HOOK
- echo $LOCAL_HOOK.local
- echo "Please link"
- echo " Jaypore hook : $REPO_ROOT/cicd/pre-push.sh"
- echo "with"
- echo " Existing hook: $LOCAL_HOOK"
- echo "manually by editing the existing hook file"
- echo "--------------------------------------"
- echo "Stopping."
- exit 1
- else
- echo "$LOCAL_HOOK exists. Moving to separate file"
- mv $LOCAL_HOOK $REPO_ROOT/.git/hooks/pre-push.old
- echo "$REPO_ROOT/.git/hooks/pre-push.old" >> $REPO_ROOT/.git/hooks/pre-push
- fi
- fi
- echo "$ENV_PREFIX$REPO_ROOT/cicd/pre-push.sh hook" >> $REPO_ROOT/.git/hooks/pre-push
- chmod u+x $LOCAL_HOOK
-}
-(main)
diff --git a/tests/__init__.py b/tests/__init__.py
diff --git a/tests/conftest.py b/tests/conftest.py
@@ -1,90 +0,0 @@
-import os
-from pathlib import Path
-import unittest
-
-import pytest
-import tests.subprocess_mock # pylint: disable=unused-import
-import tests.docker_mock # pylint: disable=unused-import
-from tests.requests_mock import add_gitea_mocks, add_github_mocks, Mock
-import tests.jayporeci_patch # pylint: disable=unused-import
-
-from jaypore_ci import jci, executors, remotes, reporters, repos
-
-
-def idfn(x):
- name = []
- for _, item in sorted(x.items()):
- what, _, cls = str(item).replace(">", "").split(".")[-3:]
- name.append(".".join([what, cls]))
- return str(name)
-
-
-def factory(*, repo, remote, executor, reporter):
- "Return a new pipeline every time the builder function is called"
-
- def build():
- r = repo.from_env()
- return jci.Pipeline(
- poll_interval=0,
- repo=r,
- remote=remote.from_env(repo=r),
- executor=executor(),
- reporter=reporter(),
- )
-
- return build
-
-
-def set_env_keys():
- os.environ["JAYPORE_GITEA_TOKEN"] = "fake_gitea_token"
- os.environ["JAYPORE_GITHUB_TOKEN"] = "fake_github_token"
- os.environ["JAYPORE_EMAIL_ADDR"] = "fake@email.com"
- os.environ["JAYPORE_EMAIL_PASSWORD"] = "fake_email_password"
- os.environ["JAYPORE_EMAIL_TO"] = "fake.to@mymailmail.com"
-
-
-@pytest.fixture(
- scope="function",
- params=list(
- jci.Pipeline.env_matrix(
- reporter=[reporters.Text, reporters.Markdown],
- remote=[
- remotes.Mock,
- remotes.Email,
- remotes.GitRemote,
- remotes.Gitea,
- remotes.Github,
- ],
- repo=[repos.Git],
- executor=[executors.Docker],
- )
- ),
- ids=idfn,
-)
-def pipeline(request):
- set_env_keys()
- builder = factory(
- repo=request.param["repo"],
- remote=request.param["remote"],
- executor=request.param["executor"],
- reporter=request.param["reporter"],
- )
- if request.param["remote"] == remotes.Gitea and not Mock.gitea_added:
- add_gitea_mocks(builder().remote)
- if request.param["remote"] == remotes.Github and not Mock.github_added:
- add_github_mocks(builder().remote)
- if request.param["remote"] == remotes.Email:
- with unittest.mock.patch("smtplib.SMTP_SSL", autospec=True):
- yield builder
- else:
- yield builder
-
-
-@pytest.fixture(
- scope="function",
- params=list((Path(__name__) / "../docs/source/examples").resolve().glob("*.py")),
- ids=str,
-)
-def doc_example_filepath(request):
- set_env_keys()
- yield request.param
diff --git a/tests/docker_mock.py b/tests/docker_mock.py
@@ -1,109 +0,0 @@
-import random
-from collections import defaultdict
-
-import pendulum
-import docker
-
-
-def cid(short=False):
- n_chars = 12 if short else 64
- return "".join(random.sample("0123456789abcdef" * 10, n_chars))
-
-
-class Network:
- def __init__(self, **kwargs):
- self.__dict__.update(kwargs)
-
- def remove(self, **_):
- pass
-
-
-class Networks:
- nets = {}
-
- def list(self, names):
- return list(filter(None, [self.nets.get(name) for name in names]))
-
- def create(self, **kwargs):
- name = kwargs.get("name")
- self.nets[name] = Network(**kwargs)
- return name
-
- def get(self, name):
- return self.nets[name]
-
-
-class Container:
- def __init__(self, **kwargs):
- self.id = cid()
- self.__dict__.update(kwargs)
- self.FinishedAt = "0001-01-01T00:00:00Z"
- self.ExitCode = 0
- self.attrs = {
- "State": {
- "StartedAt": getattr(self, "StartedAt", None),
- "FinishedAt": getattr(self, "FinishedAt", None),
- }
- }
-
- def logs(self):
- return b""
-
- def stop(self, **_):
- self.FinishedAt = str(pendulum.now())
- self.attrs["State"]["FinishedAt"] = self.FinishedAt
- self.ExitCode = 0
-
- def remove(self, **_):
- Containers.boxes.pop(self.id, None)
-
-
-class Containers:
- boxes = {}
-
- def get(self, container_id):
- return self.boxes[container_id]
-
- def run(self, **kwargs):
- kwargs["StartedAt"] = str(pendulum.now())
- c = Container(**kwargs)
- self.boxes[c.id] = c
- return c
-
- def list(self, **_):
- return list(Containers.boxes.values())
-
-
-class Docker:
- networks = Networks()
- containers = Containers()
-
-
-class APIClient:
- max_running = {}
- reported_running = defaultdict(int)
-
- def inspect_container(self, container_id):
- if container_id not in self.max_running:
- self.max_running[container_id] = random.choice(range(3, 11))
- self.reported_running[container_id] += 1
- is_running = (
- self.reported_running[container_id] <= self.max_running[container_id]
- )
- container = Containers.boxes[container_id]
- return {
- "State": {
- "Running": is_running,
- "ExitCode": container.ExitCode,
- "StartedAt": container.StartedAt,
- "FinishedAt": container.FinishedAt,
- }
- }
-
-
-def from_env():
- return Docker()
-
-
-docker.from_env = from_env
-docker.APIClient = APIClient
diff --git a/tests/jayporeci_patch.py b/tests/jayporeci_patch.py
@@ -1,8 +0,0 @@
-from jaypore_ci import jci
-
-
-def __get_pipe_id__(self):
- return f"fake_docker_container_id_{self.repo.sha}"
-
-
-jci.Pipeline.__get_pipe_id__ = __get_pipe_id__
diff --git a/tests/requests_mock.py b/tests/requests_mock.py
@@ -1,103 +0,0 @@
-import json
-
-from typing import NamedTuple
-from collections import defaultdict
-
-import requests
-
-
-class MockResponse(NamedTuple):
- status_code: int
- body: str
- content_type: str
-
- def json(self):
- return json.loads(self.body)
-
- @property
- def text(self):
- return self.body
-
-
-class Mock:
- registry = defaultdict(list)
- index = defaultdict(int)
- gitea_added = False
- github_added = False
-
- @classmethod
- def get(cls, url, status=200, body="", content_type="text/html"):
- cls.registry["get", url].append(
- MockResponse(status_code=status, body=body, content_type=content_type)
- )
-
- @classmethod
- def post(cls, url, status=200, body="", content_type="text/html"):
- cls.registry["post", url].append(
- MockResponse(status_code=status, body=body, content_type=content_type)
- )
-
- @classmethod
- def patch(cls, url, status=200, body="", content_type="text/html"):
- cls.registry["patch", url].append(
- MockResponse(status_code=status, body=body, content_type=content_type)
- )
-
- @classmethod
- def handle(cls, method):
- def handler(url, **_):
- options = cls.registry[method, url]
- index = cls.index[method, url]
- resp = options[index]
- cls.index[method, url] = (cls.index[method, url] + 1) % len(options)
- return resp
-
- return handler
-
-
-def add_gitea_mocks(gitea):
- ISSUE_ID = 1
- # --- create PR
- create_pr_url = f"{gitea.api}/repos/{gitea.owner}/{gitea.repo}/pulls"
- Mock.post(create_pr_url, body="", status=201)
- Mock.post(create_pr_url, body=f"issue_id:{ISSUE_ID}", status=409)
- # --- get existing body
- Mock.get(
- f"{gitea.api}/repos/{gitea.owner}/{gitea.repo}/pulls/{ISSUE_ID}",
- body=json.dumps({"body": "Previous body in PR description."}),
- content_type="application/json",
- )
- # --- update body
- Mock.patch(f"{gitea.api}/repos/{gitea.owner}/{gitea.repo}/pulls/{ISSUE_ID}")
- # --- set commit status
- Mock.post(f"{gitea.api}/repos/{gitea.owner}/{gitea.repo}/statuses/{gitea.sha}")
- Mock.gitea_added = True
-
-
-def add_github_mocks(github):
- ISSUE_ID = 1
- # --- create PR
- create_pr_url = f"{github.api}/repos/{github.owner}/{github.repo}/pulls"
- Mock.post(create_pr_url, body="", status=404)
- Mock.get(
- create_pr_url,
- body=json.dumps([{"number": ISSUE_ID}]),
- content_type="application/json",
- )
- Mock.post(create_pr_url, body=f"issue_id:{ISSUE_ID}", status=409)
- # --- get existing body
- Mock.get(
- f"{github.api}/repos/{github.owner}/{github.repo}/pulls/{ISSUE_ID}",
- body=json.dumps({"body": "Already existing body in PR description."}),
- content_type="application/json",
- )
- # --- update body
- Mock.patch(f"{github.api}/repos/{github.owner}/{github.repo}/pulls/{ISSUE_ID}")
- # --- set commit status
- Mock.post(f"{github.api}/repos/{github.owner}/{github.repo}/statuses/{github.sha}")
- Mock.github_added = True
-
-
-requests.get = Mock.handle("get")
-requests.post = Mock.handle("post")
-requests.patch = Mock.handle("patch")
diff --git a/tests/subprocess_mock.py b/tests/subprocess_mock.py
@@ -1,87 +0,0 @@
-import random
-import subprocess
-from typing import NamedTuple
-
-
-class ProcMock(NamedTuple):
- returncode: int
- stdout: str
-
-
-def sha():
- return hex(random.getrandbits(128))
-
-
-__rev_parse__ = sha()
-__hash_object__ = sha()
-__mktree = sha()
-__commit_tree = sha()
-__update_ref__ = sha()
-
-
-def check_output(cmd, **_):
- text = ""
- # repos.git
- if "git diff" in cmd:
- text = "some\nfiles\nthat\nwere\nchanged"
- elif "git remote -v" in cmd and "grep https" in cmd:
- text = "https://fake_remote.subprocessmock.com/fake_owner/fake_repo.git"
- elif "git branch" in cmd and "grep" in cmd:
- text = "subprocess_mock_fake_branch"
- elif "rev-parse HEAD" in cmd:
- text = __rev_parse__
- elif "git log -1" in cmd:
- text = "some_fake_git_commit_message\nfrom_subprocess_mock"
- # jci
- elif "cat /proc/self/cgroup" in cmd:
- text = "fake_pipe_id_from_subprocess_mock"
- # remotes.git
- elif "git hash-object" in cmd:
- text = __hash_object__
- elif "git mktree" in cmd:
- text = __mktree
- elif "git commit-tree" in cmd:
- text = __commit_tree
- elif "git update-ref" in cmd:
- text = __update_ref__
- return text.encode()
-
-
-networks = {}
-names = {}
-containers = {}
-
-
-def cid(short=False):
- n_chars = 12 if short else 64
- return random.sample("0123456789abcdef" * 10, n_chars)
-
-
-def run(cmd, **_):
- code, text = 0, ""
- if "docker network create" in cmd:
- name = cmd.split()[-1]
- networks[name] = True
- elif "docker network ls" in cmd:
- name = cmd.split("grep")[1]
- if name in networks:
- text = f"{cid(short=True)} {name} bridge local"
- else:
- code = 1
- elif "docker network rm" in cmd:
- name = text = cmd.split(" rm ")[1].split("|")[0].strip()
- if name not in networks:
- text = "No such net"
- elif "docker stop -t 1" in cmd:
- name = text = cmd.split()[-1]
- if name not in containers and name not in names:
- cmd = 1
- text = f"Error response from daemon: No such container: {name}"
- elif "docker run -d" in cmd:
- name = cmd.split("--name")[1].strip().split()[0]
- containers[name] = text = cid()
- return ProcMock(returncode=code, stdout=text.encode())
-
-
-subprocess.check_output = check_output
-subprocess.run = run
diff --git a/tests/test_doc_examples.py b/tests/test_doc_examples.py
@@ -1,9 +0,0 @@
-from jaypore_ci.jci import Pipeline
-
-
-def test_doc_examples(doc_example_filepath):
- with open(doc_example_filepath, "r", encoding="utf-8") as fl:
- code = fl.read()
- Pipeline.__run_on_exit__ = False
- exec(code) # pylint: disable=exec-used
- Pipeline.__run_on_exit__ = True
diff --git a/tests/test_hypo_jci.py b/tests/test_hypo_jci.py
@@ -1,11 +0,0 @@
-from hypothesis import given, strategies as st, settings, HealthCheck
-
-from jaypore_ci.clean import allowed_alphabet
-
-
-@given(st.text(alphabet=allowed_alphabet, min_size=1))
-@settings(suppress_health_check=[HealthCheck.function_scoped_fixture], deadline=500)
-def test_hypo_jobs(pipeline, name):
- pipeline = pipeline()
- with pipeline as p:
- p.job(name, name)
diff --git a/tests/test_jaypore_ci.py b/tests/test_jaypore_ci.py
@@ -1,102 +0,0 @@
-import pytest
-from jaypore_ci.changelog import version_map
-from jaypore_ci.config import const
-
-
-def test_sanity():
- assert 4 == 2 + 2
-
-
-def test_version_has_entry_in_version_map():
- assert const.version in version_map, const
-
-
-def test_simple_linear_jobs(pipeline):
- pipeline = pipeline()
- with pipeline as p:
- p.job("lint", "x")
- p.job("test", "x", depends_on=["lint"])
- order = pipeline.executor.get_execution_order()
- assert order["lint"] < order["test"]
-
-
-def test_no_duplicate_names(pipeline):
- pipeline = pipeline()
- with pytest.raises(AssertionError):
- with pipeline as p:
- p.job("lint", "x")
- p.job("lint", "y")
-
-
-def test_dependency_has_to_be_defined_before_child(pipeline):
- pipeline = pipeline()
- with pytest.raises(AssertionError):
- with pipeline as p:
- p.job("x", "x", depends_on=["y"])
- p.job("y", "y")
-
-
-def test_dependency_cannot_cross_stages(pipeline):
- pipeline = pipeline()
- with pytest.raises(AssertionError):
- with pipeline as p:
- with p.stage("stage1"):
- p.job("y", "y")
- with p.stage("stage2"):
- p.job("x", "x", depends_on=["y"])
-
-
-def test_duplicate_stages_not_allowed(pipeline):
- pipeline = pipeline()
- with pytest.raises(AssertionError):
- with pipeline as p:
- with p.stage("stage1"):
- p.job("x", "x")
- with p.stage("stage1"):
- p.job("y", "y")
-
-
-def test_stage_and_job_cannot_have_same_name(pipeline):
- pipeline = pipeline()
- with pytest.raises(AssertionError):
- with pipeline as p:
- with p.stage("x"):
- p.job("x", "x")
-
-
-def test_cannot_define_duplicate_jobs(pipeline):
- pipeline = pipeline()
- with pytest.raises(AssertionError):
- with pipeline as p:
- p.job("x", "x")
- p.job("x", "x")
-
-
-def test_non_service_jobs_must_have_commands(pipeline):
- pipeline = pipeline()
- with pytest.raises(AssertionError):
- with pipeline as p:
- p.job("x", None)
-
-
-def test_call_chain_is_followed(pipeline):
- pipeline = pipeline()
- with pipeline as p:
- for name in "pq":
- p.job(name, name)
- p.job("x", "x")
- p.job("y", "y", depends_on=["x"])
- p.job("z", "z", depends_on=["y"])
- for name in "ab":
- p.job(name, name)
- order = pipeline.executor.get_execution_order()
- # assert order == {}
- assert order["x"] < order["y"] < order["z"]
-
-
-def test_env_matrix_is_easy_to_make(pipeline):
- pipeline = pipeline()
- with pipeline as p:
- for i, env in enumerate(p.env_matrix(A=[1, 2, 3], B=[5, 6, 7])):
- p.job(f"job{i}", "fake command", env=env)
- assert len(pipeline.jobs) == 9