commit 2f757d5a8eb294fcf66411a3e09b8f7480ff9f73
parent 006d9c9cb339dc7c2647f406c076ea350e35cc32
Author: arjoonn <arjoonn@noreply.localhost>
Date: Tue, 21 Feb 2023 20:11:21 +0000
Add git remote (!35)
Branch auto created by JayporeCI
```jayporeci
╔ 🟢 : JayporeCI [sha 8d61724c26]
┏━ build_and_test
┃
┃ 🟢 : JciEnv [d731d93e] 0:39
┃ 🟢 : Jci [1039f856] 0: 9 ❮-- ['JciEnv']
┃ 🟢 : black [9b568107] 0: 0 ❮-- ['JciEnv']
┃ 🟢 : pylint [eb7b0dbc] 0: 7 ❮-- ['JciEnv']
┃ 🟢 : pytest [77021b9f] 1:12 81% ❮-- ['JciEnv']
┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
┏━ Publish
┃
┃ 🟢 : DockerHubJci [1f8fd84f] 1: 1
┃ 🟢 : DockerHubJcienv [5296360e] 1:16
┃ 🟢 : PublishDocs [32a5e3a3] 0:16
┃ 🟢 : PublishPypi [773266c7] 0: 6
┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
```
Co-authored-by: arjoonn sharma <arjoonn@midpathsoftware.com>
Reviewed-on: https://gitea.midpathsoftware.com/midpath/jaypore_ci/pulls/35
Diffstat:
24 files changed, 713 insertions(+), 187 deletions(-)
diff --git a/.gitignore b/.gitignore
@@ -4,3 +4,4 @@
dist/
.hypothesis/
.coverage
+prof/
diff --git a/cicd/cicd.py b/cicd/cicd.py
@@ -1,6 +1,27 @@
from jaypore_ci import jci
+from typing import NamedTuple
+
+
+class Should(NamedTuple):
+ release: bool = False
+ lint: bool = False
+
+
+def parse_commit(repo):
+ """
+ Decide what all the commit is asking us to do.
+ """
+ config = {}
+ for line in repo.commit_message.lower().split("\n"):
+ line = line.strip().replace(" ", "")
+ if "jci:" in line:
+ _, key = line.split("jci:")
+ config[key] = True
+ return Should(**config)
+
with jci.Pipeline() as p:
+ should = parse_commit(p.repo)
jcienv = f"jcienv:{p.repo.sha}"
with p.stage("build_and_test"):
p.job("JciEnv", f"docker build --target jcienv -t jcienv:{p.repo.sha} .")
@@ -13,8 +34,12 @@ with jci.Pipeline() as p:
p.job("black", "python3 -m black --check .", **kwargs)
p.job("pylint", "python3 -m pylint jaypore_ci/ tests/", **kwargs)
p.job("pytest", "bash cicd/run_tests.sh", image=jcienv, depends_on=["JciEnv"])
- with p.stage("Publish", image=jcienv):
- p.job("DockerHubJcienv", "bash cicd/build_and_push_docker.sh jcienv")
- p.job("DockerHubJci", "bash cicd/build_and_push_docker.sh jci")
- p.job("PublishDocs", f"bash cicd/build_and_publish_docs.sh {p.remote.branch}")
- p.job("PublishPypi", "bash cicd/build_and_push_pypi.sh")
+
+ if should.release:
+ with p.stage("Publish", image=jcienv):
+ p.job("DockerHubJcienv", "bash cicd/build_and_push_docker.sh jcienv")
+ p.job("DockerHubJci", "bash cicd/build_and_push_docker.sh jci")
+ p.job(
+ "PublishDocs", f"bash cicd/build_and_publish_docs.sh {p.remote.branch}"
+ )
+ p.job("PublishPypi", "bash cicd/build_and_push_pypi.sh")
diff --git a/cicd/run_tests.sh b/cicd/run_tests.sh
@@ -6,7 +6,7 @@ set -o pipefail
main() {
- python -m coverage run --source=. -m pytest -vv
+ python -m coverage run --branch --source=. -m pytest -vv
coverage html
coverage report
echo "$(coverage report --format=total)%" > "/jaypore_ci/run/pytest.txt"
diff --git a/docs/source/index.rst b/docs/source/index.rst
@@ -70,14 +70,14 @@ This would produce a CI report like::
┃ 🟢 : PyTest [28d4985f] 0:15 [Cov: 65% ]
┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
-- `edcb193bae` is the SHA that the report is for.
-- `Pipeline` is the default pipeline stage.
+- **edcb193bae** is the SHA that the report is for.
+- **Pipeline** is the default pipeline stage.
- 🟢 indicates that the job has passed
-- `Black`, `Pylint`, and `PyTest` are the job names.
-- `[ffcda0a9]` is the docker container ID for that job.
-- `1: 3` is the time taken by the job.
-- `[Cov: 65% ]` is custom reporting done by the job. Any job can create a file
- `/jaypore_ci/run/<job name>.txt` and the first 5 characters from that file
+- **Black**, **Pylint**, and **PyTest** are the job names.
+- **[ffcda0a9]** is the docker container ID for that job.
+- **1: 3** is the time taken by the job.
+- **[Cov: 65% ]** is custom reporting done by the job. Any job can create a file
+ **/jaypore_ci/run/<job name>.txt** and the first 5 characters from that file
will be displayed in the report.
- Although this is used for coverage reports you could potentially use this for anything you want. A few ideas:
- You could report error codes here to indicate WHY a job failed.
@@ -124,7 +124,7 @@ Concepts
the :class:`~jaypore_ci.reporters.markdown.Markdown` reporter that uses
Mermaid graphs to show you pipeline dependencies.
4. :class:`~jaypore_ci.interfaces.Remote` is where the report is published to. Currently we have:
- - :class:`~jaypore_ci.remotes.git.Git` which can store the pipeline status
+ - :class:`~jaypore_ci.remotes.git.GitRemote` which can store the pipeline status
in git itself. You can then push the status to your github and share it
with others. This works similar to git-bug.
- :class:`~jaypore_ci.remotes.gitea.Gitea` can open a PR and publish pipeline status as the PR description on Gitea.
@@ -146,18 +146,18 @@ Concepts
- Jobs inherit keyword arguments from Pipelines, then stages, then whatever
is specified at the job level.
-Examples
-========
+How to
+======
-Job logs / debugging
---------------------
+See job logs
+------------
- The recommended way is to have a `Dozzle <https://dozzle.dev/>`_ container on your localhost to explore CI jobs.
-- To see logs you can run `docker logs <container ID>` locally.
-- To debug you can `docker exec <container ID>` while the job is running.
+- You can also run `docker logs <container ID>` locally.
+- To debug running containers you can `docker exec <container ID>` while the job is running.
-Dependencies in docker
-----------------------
+Build and publish docker images
+-------------------------------
Environment / package dependencies can be cached in docker easily. Simply build
your docker image and then run the job with that built image.
@@ -176,7 +176,7 @@ your docker image and then run the job with that built image.
)
-Complex job relations
+Define complex job relations
---------------------
This config builds docker images, runs linting, testing on the
@@ -188,53 +188,18 @@ codebase, then builds and publishes documentation.
from jaypore_ci import jci
with jci.Pipeline() as p:
- image = f"myproject_{p.repo.sha}"
with p.stage("build"):
- p.job("DockDev", f"docker build --target DevEnv -t {image}_dev .")
-
- with p.stage("checking", image=f"{image}_dev"):
- p.job("UnitTest", "python3 -m pytest -m unit tests/")
- p.job("PyLint", "python3 -m pylint src/")
- p.job("Black", "python3 -m black --check .")
- p.job(
- "IntegrationTest",
- "python3 -m pytest -m integration tests/",
- depends_on=["PyLint", "UnitTest"],
- )
- p.job(
- "RegressionTest",
- "python3 -m pytest -m regression tests/",
- depends_on=["PyLint", "UnitTest"],
- )
- p.job(
- "FuzzyTest",
- "python3 -m pytest -m fuzzy tests/",
- depends_on=["PyLint", "UnitTest"],
- )
-
- with p.stage("publish"):
- p.job("TagProd", f"docker tag -t {image}_prod hub/{image}_prod:{p.repo.sha}")
- p.job("TagDev", f"docker tag -t {image}_dev hub/{image}_dev:{p.repo.sha}")
- p.job(
- "PushProd",
- f"docker push hub/{image}_prod:{p.repo.sha}",
- depends_on=["TagProd"],
- )
- p.job(
- "PushDev",
- f"docker push hub/{image}_dev:{p.repo.sha}",
- depends_on=["TagDev"],
- )
- p.job(
- "BuildDocs",
- "sphinx-build -b html docs/source/ docs/build/html",
- image=f"{image}_dev"
- )
-
-
-Job matrix
-----------
+ p.job("DockDev", f"docker build --target DevEnv -t {p.repo.sha}_dev .")
+
+ with p.stage("checking", image=f"{p.repo.sha}_dev"):
+ p.job( "IntTest", "run int_test.sh")
+ p.job( "RegText", "bash regression_tests.sh", depends_on=["IntTest"])
+ p.job( "FuzzTest", "bash fuzzy_tests.sh", depends_on=["IntTest", "RegText"])
+
+
+Run a job matrix
+----------------
There is no special concept for matrix jobs. Just declare as many jobs as you
want in a while loop. There is a function to make this easier when you want to
@@ -256,8 +221,8 @@ run combinations of variables.
The above config generates 3 x 3 x 2 = 18 jobs and sets the environment for each to a unique combination of `BROWSER` , `SCREENSIZE`, and `ONLINE`.
-Cloud/remote runners
---------------------
+Run on cloud/remote runners
+---------------------------
- Make sure docker is installed on the remote machine.
- Make sure you have ssh access to remote machine and the user you are logging in as can run docker commands.
@@ -271,8 +236,8 @@ Cloud/remote runners
- Now in your `cicd/pre-push.sh` file, where the `docker run` command is mentioned, simply add `DOCKER_HOST=ssh://my.aws.machine`
- JayporeCi will then run on the remote machine.
-DB Services
------------
+Use custom services for testing
+-------------------------------
Some jobs don't affect the status of the pipeline. They just need to be there
while you are running your tests. For example, you might need a DB to run API
@@ -315,10 +280,10 @@ You can also import jobs defined by other people. Some examples of why you might
Since `JayporeCI` has a normal programming language as it's config language, most things can be solved without too much effort.
-Artifacts / Cache
------------------
+Publish Artifacts / Cache
+-------------------------
-- All jobs run in a shared directory `jaypore_ci/run`.
+- All jobs run in a shared directory **/jaypore_ci/run**.
- Anything you write to this directory is available to all jobs so you can use this to pass artifacts / cache between jobs.
- You can have a separate job to POST your artifacts to some remote location / git notes / S3 / gitea
@@ -389,6 +354,24 @@ variables you will have to supply to make this work.
with jci.Pipeline(repo=git, remote=email) as p:
p.job("x", "x")
+Run selected jobs based on commit message
+--------------------------------------
+
+Sometimes we want to control when some jobs run. For example, build/release jobs, or intensive testing jobs.
+A simple way to do this is to read the commit messsage and see if the author
+asked us to run these jobs. JayporeCI itself only runs release jobs when the
+commit message contains **jci:release** as one of it's lines.
+
+.. code-block:: python
+
+ from jaypore_ci import jci
+
+ with jci.Pipeline() as p:
+ p.job("build", "bash cicd/build.sh")
+ if p.repo.commit_message.contains("jci:release"):
+ p.job("release", "bash cicd/release.sh", depends_on=["build"])
+
+
Contributing
============
diff --git a/jaypore_ci/__init__.py b/jaypore_ci/__init__.py
@@ -1 +0,0 @@
-__version__ = "0.1.0"
diff --git a/jaypore_ci/interfaces.py b/jaypore_ci/interfaces.py
@@ -39,10 +39,11 @@ class Repo:
Contains information about the current VCS repo.
"""
- def __init__(self, sha: str, branch: str, remote: str):
+ def __init__(self, sha: str, branch: str, remote: str, commit_message: str):
self.sha: str = sha
self.branch: str = branch
self.remote: str = remote
+ self.commit_message: str = commit_message
def files_changed(self, target: str) -> List[str]:
"Returns list of files changed between current sha and target"
diff --git a/jaypore_ci/jci.py b/jaypore_ci/jci.py
@@ -37,13 +37,30 @@ class Job: # pylint: disable=too-many-instance-attributes
"""
This is the fundamental building block for running jobs.
Each job goes through a lifecycle defined by
- :class:`jaypore_ci.interfaces.Status`.
+ :class:`~jaypore_ci.interfaces.Status`.
A job is run by an :class:`~jaypore_ci.interfaces.Executor` as part of a
:class:`~jaypore_ci.jci.Pipeline`.
It is never created manually. The correct way to create a job is to use
:meth:`~jaypore_ci.jci.Pipeline.job`.
+
+ :param name: The name for the job. Names must be unique across jobs and stages.
+ :param command: The command that we need to run for the job. It can be set
+ to `None` when `is_service` is True.
+ :param is_service: Is this job a service or not? Service jobs are assumed
+ to be :class:`~jaypore_ci.interfaces.Status.PASSED` as long as they start.
+ They are shut down when the entire pipeline has finished executing.
+ :param pipeline: The pipeline this job is associated with.
+ :param status: The :class:`~jaypore_ci.interfaces.Status` of this job.
+ :param image: What docker image to use for this job.
+ :param timeout: Defines how long a job is allowed to run before being
+ killed and marked as class:`~jaypore_ci.interfaces.Status.FAILED`.
+ :param env: A dictionary of environment variables to pass to the docker run command.
+ :param children: Defines which jobs depend on this job's output status.
+ :param parents: Defines which jobs need to pass before this job can be run.
+ :param stage: What stage the job belongs to. This stage name must exist so
+ that we can assign jobs to it.
"""
def __init__(
@@ -112,7 +129,7 @@ class Job: # pylint: disable=too-many-instance-attributes
try:
self.pipeline.remote.publish(report, status)
except Exception as e: # pylint: disable=broad-except
- self.logging().exeception(e)
+ self.logging().exception(e)
return report
def trigger(self):
@@ -199,6 +216,13 @@ class Job: # pylint: disable=too-many-instance-attributes
class Pipeline: # pylint: disable=too-many-instance-attributes
"""
A pipeline acts as a controlling/organizing mechanism for multiple jobs.
+
+ :param repo : Provides information about the codebase.
+ :param reporter : Provides reports based on the state of the pipeline.
+ :param remote : Allows us to publish reports to somewhere like gitea/email.
+ :param executor : Runs the specified jobs.
+ :param poll_interval: Defines how frequently (in seconds) to check the
+ pipeline status and publish a report.
"""
def __init__( # pylint: disable=too-many-arguments
@@ -208,8 +232,7 @@ class Pipeline: # pylint: disable=too-many-instance-attributes
remote: Remote = None,
executor: Executor = None,
reporter: Reporter = None,
- graph_direction: str = "TB",
- poll_interval: int = 1,
+ poll_interval: int = 10,
**kwargs,
) -> "Pipeline":
self.jobs = {}
@@ -223,7 +246,6 @@ class Pipeline: # pylint: disable=too-many-instance-attributes
)
self.executor = executor if executor is not None else executors.docker.Docker()
self.reporter = reporter if reporter is not None else reporters.text.Text()
- self.graph_direction = graph_direction
self.poll_interval = poll_interval
self.stages = ["Pipeline"]
self.pipe_id = (
@@ -311,12 +333,10 @@ class Pipeline: # pylint: disable=too-many-instance-attributes
**kwargs,
) -> Job:
"""
- Declare a job in this pipeline.
-
- Jobs inherit their keyword arguments from the stage they are defined in
- and the pipeline they are defined in.
-
- Initially jobs are in a `PENDING` state.
+ Creates a :class:`~jaypore_ci.jci.Job` instance based on the
+ pipeline/stage that it is being defined in. See
+ :class:`~jaypore_ci.jci.Job` for details on what parameters can be
+ passed to the job.
"""
depends_on = [] if depends_on is None else depends_on
depends_on = [depends_on] if isinstance(depends_on, str) else depends_on
@@ -347,7 +367,8 @@ class Pipeline: # pylint: disable=too-many-instance-attributes
self.services.append(job)
return job
- def env_matrix(self, **kwargs):
+ @classmethod
+ def env_matrix(cls, **kwargs):
"""
Return a cartesian product of all the provided kwargs.
"""
diff --git a/jaypore_ci/remotes/__init__.py b/jaypore_ci/remotes/__init__.py
@@ -1,4 +1,5 @@
from .mock import Mock
+from .git import GitRemote
from .gitea import Gitea
from .github import Github
from .email import Email
diff --git a/jaypore_ci/remotes/email.py b/jaypore_ci/remotes/email.py
@@ -63,7 +63,9 @@ class Email(Remote): # pylint: disable=too-many-instance-attributes
addr=os.environ["JAYPORE_EMAIL_ADDR"],
password=os.environ["JAYPORE_EMAIL_PASSWORD"],
email_to=os.environ["JAYPORE_EMAIL_TO"],
- email_from=os.environ["JAYPORE_EMAIL_FROM"],
+ email_from=os.environ.get(
+ "JAYPORE_EMAIL_FROM", os.environ["JAYPORE_EMAIL_ADDR"]
+ ),
subject=f"JCI [{owner}/{name}] [{repo.branch} {repo.sha[:8]}]",
branch=repo.branch,
sha=repo.sha,
diff --git a/jaypore_ci/remotes/git.py b/jaypore_ci/remotes/git.py
@@ -0,0 +1,96 @@
+"""
+This is used to save the pipeline status to git itself.
+"""
+import time
+import subprocess
+
+from jaypore_ci.interfaces import Remote
+from jaypore_ci.repos import Git, Mock
+from jaypore_ci.logging import logger
+
+
+class GitRemote(Remote): # pylint: disable=too-many-instance-attributes
+ """
+ You can save pipeline status to git using this remote.
+
+ To push/fetch your local refs to a git remote you can run
+
+ .. code-block:: console
+
+ git fetch origin refs/jayporeci/*:refs/jayporeci/*
+ git push origin refs/jayporeci/*:refs/jayporeci/*
+ """
+
+ @classmethod
+ def from_env(cls, *, repo: Git) -> "GitRemote":
+ """
+ Creates a remote instance from the environment.
+ """
+ assert isinstance(repo, (Git, Mock)), "Git remote can only work in a git repo"
+ return cls(
+ repo=repo,
+ branch=repo.branch,
+ sha=repo.sha,
+ )
+
+ def __init__(self, *, repo, **kwargs):
+ super().__init__(**kwargs)
+ self.repo = repo
+
+ def logging(self):
+ """
+ Return's a logging instance with information about git bound to it.
+ """
+ return logger.bind(repo=self.repo)
+
+ def publish(self, report: str, status: str) -> None:
+ """
+ Will publish the report via email.
+
+ :param report: Report to write to remote.
+ :param status: One of ["pending", "success", "error", "failure",
+ "warning"] This is the dot next to each commit in gitea.
+ """
+ assert status in ("pending", "success", "error", "failure", "warning")
+ now = time.time()
+ lines = ""
+ git_blob_sha = subprocess.check_output(
+ "git hash-object -w --stdin",
+ input=report,
+ text=True,
+ stderr=subprocess.STDOUT,
+ shell=True,
+ ).strip()
+ lines += f"\n100644 blob {git_blob_sha}\t{now}.txt"
+ lines = lines.strip()
+ git_tree_sha = subprocess.run(
+ "git mktree",
+ input=lines,
+ text=True,
+ shell=True,
+ check=False,
+ stderr=subprocess.STDOUT,
+ stdout=subprocess.PIPE,
+ ).stdout.strip()
+ git_commit_sha = subprocess.run(
+ f"git commit-tree {git_tree_sha}",
+ text=True,
+ input=f"JayporeCI status: {now}",
+ shell=True,
+ check=False,
+ stderr=subprocess.STDOUT,
+ stdout=subprocess.PIPE,
+ )
+ assert git_commit_sha.returncode == 0
+ git_commit_sha = (
+ subprocess.check_output(
+ f"git update-ref refs/jayporeci/{self.repo.sha} {git_commit_sha.stdout.strip()}",
+ shell=True,
+ stderr=subprocess.STDOUT,
+ )
+ .decode()
+ .strip()
+ )
+ self.logging().info(
+ "Published status to local git: refs/jayporeci/{self.repo.sha} {git_commit_sha}"
+ )
diff --git a/jaypore_ci/remotes/gitea.py b/jaypore_ci/remotes/gitea.py
@@ -53,6 +53,8 @@ class Gitea(Remote): # pylint: disable=too-many-instance-attributes
self.token = token
self.timeout = 10
self.base_branch = "main"
+ # ---
+ self.__pr_id__ = None
def logging(self):
"""
@@ -66,36 +68,43 @@ class Gitea(Remote): # pylint: disable=too-many-instance-attributes
"""
Returns the pull request ID for the current branch.
"""
- r = requests.post(
- f"{self.api}/repos/{self.owner}/{self.repo}/pulls",
- params={"access_token": self.token},
- timeout=self.timeout,
- json={
- "base": self.base_branch,
- "body": "Branch auto created by JayporeCI",
- "head": self.branch,
- "title": self.branch,
- },
- )
- self.logging().debug("Get PR Id", status_code=r.status_code)
- if r.status_code == 409:
- return r.text.split("issue_id:")[1].split(",")[0].strip()
- if r.status_code == 201:
- return self.get_pr_id()
- if r.status_code == 404 and r.json()["message"] == "IsBranchExist":
- self.base_branch = "develop"
- return self.get_pr_id()
- self.logging().debug()(
- "Failed gitea api",
- api=self.api,
- owner=self.owner,
- repo=self.repo,
- token=self.token,
- branch=self.branch,
- status=r.status_code,
- response=r.text,
- )
- raise RemoteApiFailed(r)
+ if self.__pr_id__ is None:
+ r = requests.post(
+ f"{self.api}/repos/{self.owner}/{self.repo}/pulls",
+ params={"access_token": self.token},
+ timeout=self.timeout,
+ json={
+ "base": self.base_branch,
+ "body": "Branch auto created by JayporeCI",
+ "head": self.branch,
+ "title": self.branch,
+ },
+ )
+ self.logging().debug("Get PR Id", status_code=r.status_code)
+ if r.status_code == 409:
+ self.__pr_id__ = r.text.split("issue_id:")[1].split(",")[0].strip()
+ return self.get_pr_id()
+ if r.status_code == 201:
+ return self.get_pr_id()
+ if (
+ r.status_code == 404
+ and r.json()["message"] == "IsBranchExist"
+ and self.base_branch != "develop"
+ ):
+ self.base_branch = "develop"
+ return self.get_pr_id()
+ self.logging().debug()(
+ "Failed gitea api",
+ api=self.api,
+ owner=self.owner,
+ repo=self.repo,
+ token=self.token,
+ branch=self.branch,
+ status=r.status_code,
+ response=r.text,
+ )
+ raise RemoteApiFailed(r)
+ return self.__pr_id__
def publish(self, report: str, status: str):
"""
diff --git a/jaypore_ci/remotes/github.py b/jaypore_ci/remotes/github.py
@@ -41,7 +41,7 @@ class Github(Remote): # pylint: disable=too-many-instance-attributes
os.environ["JAYPORE_COMMIT_SHA"] = repo.sha
return cls(
root="https://api.github.com",
- owner=Path(repo.remote.path).parts[1],
+ owner=Path(remote.path).parts[1],
repo=Path(remote.path).parts[2].replace(".git", ""),
branch=repo.branch,
token=os.environ["JAYPORE_GITHUB_TOKEN"],
diff --git a/jaypore_ci/remotes/mock.py b/jaypore_ci/remotes/mock.py
@@ -1,11 +1,8 @@
"""
-A gitea remote git host.
+A mock remote.
-This is used to report pipeline status to the remote.
+This is used to test pipelines.
"""
-import os
-
-
from jaypore_ci.interfaces import Remote, Repo
from jaypore_ci.logging import logger
@@ -17,7 +14,7 @@ class Mock(Remote): # pylint: disable=too-many-instance-attributes
@classmethod
def from_env(cls, *, repo: Repo):
- return cls(branch=os.environ["JAYPORE_BRANCH"], sha=os.environ["JAYPORE_SHA"])
+ return cls(branch=repo.branch, sha=repo.sha)
def logging(self):
"""
diff --git a/jaypore_ci/reporters/markdown.py b/jaypore_ci/reporters/markdown.py
@@ -11,6 +11,10 @@ def __node_mod__(nodes):
class Markdown(Reporter):
+ def __init__(self, *, graph_direction: str = "TD", **kwargs):
+ super().__init__(**kwargs)
+ self.graph_direction = graph_direction
+
def render(self, pipeline):
"""
Returns a markdown report for a given pipeline.
@@ -40,7 +44,7 @@ class Markdown(Reporter):
}
mermaid = f"""
```mermaid
-flowchart {pipeline.graph_direction}
+flowchart {self.graph_direction}
"""
for stage in pipeline.stages:
nodes, edges = set(), set()
@@ -51,7 +55,7 @@ flowchart {pipeline.graph_direction}
edges |= {(p, job.name) for p in job.parents}
mermaid += f"""
subgraph {stage}
- direction {pipeline.graph_direction}
+ direction {self.graph_direction}
"""
ref = {n: f"{stage}_{i}" for i, n in enumerate(nodes)}
# If there are too many nodes, scatter them with different length arrows
diff --git a/jaypore_ci/repos/git.py b/jaypore_ci/repos/git.py
@@ -28,7 +28,7 @@ class Git(Repo):
.decode()
.strip()
)
- assert "https://" in remote, "Only https remotes supported"
+ assert "https://" in remote, f"Only https remotes supported: {remote}"
assert ".git" in remote
# NOTE: Later on perhaps we should support non-https remotes as well
# since JCI does not actually do anything with the remote.
@@ -40,4 +40,9 @@ class Git(Repo):
.strip()
)
sha = subprocess.check_output("git rev-parse HEAD", shell=True).decode().strip()
- return Repo(sha=sha, branch=branch, remote=remote)
+ message = (
+ subprocess.check_output("git log -1 --pretty=%B", shell=True)
+ .decode()
+ .strip()
+ )
+ return cls(sha=sha, branch=branch, remote=remote, commit_message=message)
diff --git a/jaypore_ci/repos/mock.py b/jaypore_ci/repos/mock.py
@@ -15,6 +15,6 @@ class Mock(Repo):
@classmethod
def from_env(cls, **kwargs) -> "Mock":
"""
- Gets repo status from the environment and git repo on disk.
+ Save whatever is provided to kwargs
"""
return cls(**kwargs)
diff --git a/poetry.lock b/poetry.lock
@@ -14,14 +14,14 @@ files = [
[[package]]
name = "astroid"
-version = "2.14.1"
+version = "2.14.2"
description = "An abstract syntax tree for Python with inference support."
category = "dev"
optional = false
python-versions = ">=3.7.2"
files = [
- {file = "astroid-2.14.1-py3-none-any.whl", hash = "sha256:23c718921acab5f08cbbbe9293967f1f8fec40c336d19cd75dc12a9ea31d2eb2"},
- {file = "astroid-2.14.1.tar.gz", hash = "sha256:bd1aa4f9915c98e8aaebcd4e71930154d4e8c9aaf05d35ac0a63d1956091ae3f"},
+ {file = "astroid-2.14.2-py3-none-any.whl", hash = "sha256:0e0e3709d64fbffd3037e4ff403580550f14471fd3eaae9fa11cc9a5c7901153"},
+ {file = "astroid-2.14.2.tar.gz", hash = "sha256:a3cf9f02c53dd259144a7e8f3ccd75d67c9a8c716ef183e0c1f291bc5d7bb3cf"},
]
[package.dependencies]
@@ -116,19 +116,102 @@ files = [
[[package]]
name = "charset-normalizer"
-version = "2.1.1"
+version = "3.0.1"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
category = "main"
optional = false
-python-versions = ">=3.6.0"
+python-versions = "*"
files = [
- {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"},
- {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"},
+ {file = "charset-normalizer-3.0.1.tar.gz", hash = "sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-win32.whl", hash = "sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-win32.whl", hash = "sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-win32.whl", hash = "sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-win32.whl", hash = "sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-win32.whl", hash = "sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59"},
+ {file = "charset_normalizer-3.0.1-py3-none-any.whl", hash = "sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24"},
]
-[package.extras]
-unicode-backport = ["unicodedata2"]
-
[[package]]
name = "click"
version = "8.1.3"
@@ -263,6 +346,51 @@ files = [
test = ["pytest (>=6)"]
[[package]]
+name = "gprof2dot"
+version = "2022.7.29"
+description = "Generate a dot graph from the output of several profilers."
+category = "dev"
+optional = false
+python-versions = ">=2.7"
+files = [
+ {file = "gprof2dot-2022.7.29-py2.py3-none-any.whl", hash = "sha256:f165b3851d3c52ee4915eb1bd6cca571e5759823c2cd0f71a79bda93c2dc85d6"},
+ {file = "gprof2dot-2022.7.29.tar.gz", hash = "sha256:45b4d298bd36608fccf9511c3fd88a773f7a1abc04d6cd39445b11ba43133ec5"},
+]
+
+[[package]]
+name = "hypothesis"
+version = "6.68.2"
+description = "A library for property-based testing"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "hypothesis-6.68.2-py3-none-any.whl", hash = "sha256:2a41cc766cde52705895e54547374af89c617e8ec7bc4186cb7f03884a667d4e"},
+ {file = "hypothesis-6.68.2.tar.gz", hash = "sha256:a7eb2b0c9a18560d8197fe35047ceb58e7e8ab7623a3e5a82613f6a2cd71cffa"},
+]
+
+[package.dependencies]
+attrs = ">=19.2.0"
+exceptiongroup = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
+sortedcontainers = ">=2.1.0,<3.0.0"
+
+[package.extras]
+all = ["backports.zoneinfo (>=0.2.1)", "black (>=19.10b0)", "click (>=7.0)", "django (>=3.2)", "dpcontracts (>=0.4)", "importlib-metadata (>=3.6)", "lark (>=0.10.1)", "libcst (>=0.3.16)", "numpy (>=1.9.0)", "pandas (>=1.0)", "pytest (>=4.6)", "python-dateutil (>=1.4)", "pytz (>=2014.1)", "redis (>=3.0.0)", "rich (>=9.0.0)", "tzdata (>=2022.7)"]
+cli = ["black (>=19.10b0)", "click (>=7.0)", "rich (>=9.0.0)"]
+codemods = ["libcst (>=0.3.16)"]
+dateutil = ["python-dateutil (>=1.4)"]
+django = ["django (>=3.2)"]
+dpcontracts = ["dpcontracts (>=0.4)"]
+ghostwriter = ["black (>=19.10b0)"]
+lark = ["lark (>=0.10.1)"]
+numpy = ["numpy (>=1.9.0)"]
+pandas = ["pandas (>=1.0)"]
+pytest = ["pytest (>=4.6)"]
+pytz = ["pytz (>=2014.1)"]
+redis = ["redis (>=3.0.0)"]
+zoneinfo = ["backports.zoneinfo (>=0.2.1)", "tzdata (>=2022.7)"]
+
+[[package]]
name = "idna"
version = "3.4"
description = "Internationalized Domain Names in Applications (IDNA)"
@@ -288,21 +416,21 @@ files = [
[[package]]
name = "importlib-metadata"
-version = "4.13.0"
+version = "6.0.0"
description = "Read metadata from Python packages"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"},
- {file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"},
+ {file = "importlib_metadata-6.0.0-py3-none-any.whl", hash = "sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad"},
+ {file = "importlib_metadata-6.0.0.tar.gz", hash = "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"},
]
[package.dependencies]
zipp = ">=0.5"
[package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
perf = ["ipython"]
testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"]
@@ -511,14 +639,14 @@ files = [
[[package]]
name = "mypy-extensions"
-version = "0.4.3"
-description = "Experimental type system extensions for programs checked with the mypy typechecker."
+version = "1.0.0"
+description = "Type system extensions for programs checked with the mypy type checker."
category = "dev"
optional = false
-python-versions = "*"
+python-versions = ">=3.5"
files = [
- {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
- {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
+ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
+ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
]
[[package]]
@@ -582,19 +710,19 @@ pytzdata = ">=2020.1"
[[package]]
name = "platformdirs"
-version = "2.6.2"
+version = "3.0.0"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"},
- {file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"},
+ {file = "platformdirs-3.0.0-py3-none-any.whl", hash = "sha256:b1d5eb14f221506f50d6604a561f4c5786d9e80355219694a1b244bcd96f4567"},
+ {file = "platformdirs-3.0.0.tar.gz", hash = "sha256:8a1228abb1ef82d788f74139988b137e78692984ec7b08eaa6c65f1723af28f9"},
]
[package.extras]
-docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"]
-test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
+docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"]
+test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
[[package]]
name = "pluggy"
@@ -629,18 +757,18 @@ plugins = ["importlib-metadata"]
[[package]]
name = "pylint"
-version = "2.16.0"
+version = "2.16.2"
description = "python code static checker"
category = "dev"
optional = false
python-versions = ">=3.7.2"
files = [
- {file = "pylint-2.16.0-py3-none-any.whl", hash = "sha256:55e5cf00601c4cfe2e9404355c743a14e63be85df7409da7e482ebde5f9f14a1"},
- {file = "pylint-2.16.0.tar.gz", hash = "sha256:43ee36c9b690507ef9429ce1802bdc4dcde49454c3d665e39c23791567019c0a"},
+ {file = "pylint-2.16.2-py3-none-any.whl", hash = "sha256:ff22dde9c2128cd257c145cfd51adeff0be7df4d80d669055f24a962b351bbe4"},
+ {file = "pylint-2.16.2.tar.gz", hash = "sha256:13b2c805a404a9bf57d002cd5f054ca4d40b0b87542bdaba5e05321ae8262c84"},
]
[package.dependencies]
-astroid = ">=2.14.1,<=2.16.0-dev0"
+astroid = ">=2.14.2,<=2.16.0-dev0"
colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
dill = [
{version = ">=0.2", markers = "python_version < \"3.11\""},
@@ -682,6 +810,26 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
[[package]]
+name = "pytest-profiling"
+version = "1.7.0"
+description = "Profiling plugin for py.test"
+category = "dev"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pytest-profiling-1.7.0.tar.gz", hash = "sha256:93938f147662225d2b8bd5af89587b979652426a8a6ffd7e73ec4a23e24b7f29"},
+ {file = "pytest_profiling-1.7.0-py2.py3-none-any.whl", hash = "sha256:999cc9ac94f2e528e3f5d43465da277429984a1c237ae9818f8cfd0b06acb019"},
+]
+
+[package.dependencies]
+gprof2dot = "*"
+pytest = "*"
+six = "*"
+
+[package.extras]
+tests = ["pytest-virtualenv"]
+
+[[package]]
name = "python-dateutil"
version = "2.8.2"
description = "Extensions to the standard Python datetime module"
@@ -787,6 +935,18 @@ files = [
]
[[package]]
+name = "sortedcontainers"
+version = "2.4.0"
+description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set"
+category = "dev"
+optional = false
+python-versions = "*"
+files = [
+ {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"},
+ {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"},
+]
+
+[[package]]
name = "sphinx"
version = "5.3.0"
description = "Python documentation generator"
@@ -961,14 +1121,14 @@ files = [
[[package]]
name = "typing-extensions"
-version = "4.4.0"
+version = "4.5.0"
description = "Backported and Experimental Type Hints for Python 3.7+"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
- {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
+ {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"},
+ {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"},
]
[[package]]
@@ -1064,14 +1224,14 @@ files = [
[[package]]
name = "zipp"
-version = "3.12.0"
+version = "3.14.0"
description = "Backport of pathlib-compatible object wrapper for zip files"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "zipp-3.12.0-py3-none-any.whl", hash = "sha256:9eb0a4c5feab9b08871db0d672745b53450d7f26992fd1e4653aa43345e97b86"},
- {file = "zipp-3.12.0.tar.gz", hash = "sha256:73efd63936398aac78fd92b6f4865190119d6c91b531532e798977ea8dd402eb"},
+ {file = "zipp-3.14.0-py3-none-any.whl", hash = "sha256:188834565033387710d046e3fe96acfc9b5e86cbca7f39ff69cf21a4128198b7"},
+ {file = "zipp-3.14.0.tar.gz", hash = "sha256:9e5421e176ef5ab4c0ad896624e87a7b2f07aca746c9b2aa305952800cb8eecb"},
]
[package.extras]
@@ -1081,4 +1241,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools"
[metadata]
lock-version = "2.0"
python-versions = "^3.8"
-content-hash = "73b0393d0d7ce869edd371aad92fefdddb2b997dd4660661e767b0502222ac84"
+content-hash = "9e58aabf16fd54e3083364bca0198366e2130799e0788894b56796eaf8554a39"
diff --git a/pyproject.toml b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "jaypore_ci"
-version = "0.2.10"
+version = "0.2.11"
description = ""
authors = ["arjoonn sharma <arjoonn.94@gmail.com>"]
@@ -18,6 +18,8 @@ pytest = "^7.2.0"
sphinx = "^5.3.0"
rich = "^13.2.0"
coverage = "^7.0.5"
+pytest-profiling = "^1.7.0"
+hypothesis = "^6.68.2"
[build-system]
requires = ["poetry-core>=1.0.0"]
diff --git a/script.sh b/script.sh
@@ -0,0 +1,44 @@
+#! /bin/bash
+
+set -o errexit
+set -o nounset
+set -o pipefail
+
+
+# TODO: We have to update TUI so that it reads job logs from git instead of just docker.
+# TODO: JCI logging should commit things to git after it is complete.
+# TODO: JCI should offer capability to git-push/fetch job logs.
+
+main(){
+ SHA=e827f9a0a6
+ rm /tmp/tree.txt || echo "No such file exists"
+
+ # Run through a list of docker container IDs and names
+ for PAIR in "b6326575 JayporeCI" "200c5b71 JciEnv" "021ffe61 Jci" "f9f3b7b4 black" "66497172 pylint" "6bc7eb99 pytest" "11fb552c DockerHubJci" "f1cbbd4b DockerHubJcienv" "e99ec6e0 PublishDocs" "57cf28ee PublishPypi"
+ do
+ set -- $PAIR
+ CID=$1
+ NAME=$2
+ # --- Create blobs from the logs
+ GIT_BLOB_SHA=$(docker logs $CID 2>&1 | git hash-object -w --stdin)
+ echo $GIT_BLOB_SHA $NAME
+ # Accumulate them to a file to create a tree later on
+ echo -e "100644 blob $GIT_BLOB_SHA\t$NAME.txt" >> /tmp/tree.txt
+ done
+
+ # Create a tree
+ GIT_TREE_SHA=$(cat /tmp/tree.txt | git mktree)
+ echo "GIT_WRITE_TREE: $GIT_TREE_SHA"
+
+ # Commit that tree
+ # TODO: This part requires us to set identity. How should we handle this? :thinking:
+ GIT_COMMIT_SHA=$(echo 'Jaypore CI logs' | git commit-tree $GIT_TREE_SHA)
+ echo "COMMIT_SHA: $GIT_COMMIT_SHA"
+
+ # Update the refs so that the provided SHA will point to this tree
+ git update-ref refs/jayporeci/$SHA $GIT_COMMIT_SHA
+ git push origin refs/jayporeci/*:refs/jayporeci/*
+ git fetch origin refs/jayporeci/*:refs/jayporeci/*
+}
+
+(main)
diff --git a/setup.sh b/setup.sh
@@ -12,7 +12,7 @@ main (){
echo "--------------------"
echo "Installing in repo: $REPO_ROOT"
echo "Creating folder for cicd: $REPO_ROOT/$CICD_ROOT"
- # ----------------<<<<<<<<<<<<<
+ # ----------------==
read -r -p "Should we continue? [Y/n] " response
if [[ "$response" =~ ^([yY][eE][sS]|[yY])$ ]]
then
@@ -29,7 +29,7 @@ with jci.Pipeline() as p:
EOF
curl -s https://www.jayporeci.in/pre-push.sh -o $REPO_ROOT/cicd/pre-push.sh
chmod u+x $REPO_ROOT/cicd/pre-push.sh
- # ----------------<<<<<<<<<<<<<
+ # ----------------==
ENV_PREFIX=''
read -r -p "Do you want to create 'secrets' folder for environment variables? [Y/n] " response
if [[ "$response" =~ ^([yY][eE][sS]|[yY])$ ]]
@@ -63,7 +63,7 @@ EOF
fi
ENV_PREFIX='ENV=ci '
fi
- # ----------------<<<<<<<<<<<<<
+ # ----------------==
echo "Creating git hook for pre-push"
if test -f "$LOCAL_HOOK"; then
if test -f "$LOCAL_HOOK.local"; then
diff --git a/tests/conftest.py b/tests/conftest.py
@@ -1,20 +1,111 @@
+import os
+import json
+import unittest
+
import pytest
+import tests.subprocess_mock # pylint: disable=unused-import
+from tests.requests_mock import Mock
from jaypore_ci import jci, executors, remotes, reporters, repos
+def add_gitea_mocks(gitea):
+ ISSUE_ID = 1
+ # --- create PR
+ create_pr_url = f"{gitea.api}/repos/{gitea.owner}/{gitea.repo}/pulls"
+ Mock.post(create_pr_url, body="", status=201)
+ Mock.post(create_pr_url, body="issue_id:{ISSUE_ID}", status=409)
+ # --- get existing body
+ Mock.get(
+ f"{gitea.api}/repos/{gitea.owner}/{gitea.repo}/pulls/{ISSUE_ID}",
+ body=json.dumps({"body": "Previous body in PR description."}),
+ content_type="application/json",
+ )
+ # --- update body
+ Mock.patch(f"{gitea.api}/repos/{gitea.owner}/{gitea.repo}/pulls/{ISSUE_ID}")
+ # --- set commit status
+ Mock.post(f"{gitea.api}/repos/{gitea.owner}/{gitea.repo}/statuses/{gitea.sha}")
+ Mock.gitea_added = True
+
+
+def add_github_mocks(github):
+ ISSUE_ID = 1
+ # --- create PR
+ create_pr_url = f"{github.api}/repos/{github.owner}/{github.repo}/pulls"
+ Mock.post(create_pr_url, body="", status=404)
+ Mock.get(
+ create_pr_url,
+ body=json.dumps([{"number": ISSUE_ID}]),
+ content_type="application/json",
+ )
+ Mock.post(create_pr_url, body="issue_id:{ISSUE_ID}", status=409)
+ # --- get existing body
+ Mock.get(
+ f"{github.api}/repos/{github.owner}/{github.repo}/pulls/{ISSUE_ID}",
+ body=json.dumps({"body": "Already existing body in PR description."}),
+ content_type="application/json",
+ )
+ # --- update body
+ Mock.patch(f"{github.api}/repos/{github.owner}/{github.repo}/pulls/{ISSUE_ID}")
+ # --- set commit status
+ Mock.post(f"{github.api}/repos/{github.owner}/{github.repo}/statuses/{github.sha}")
+ Mock.github_added = True
+
+
+def idfn(x):
+ name = []
+ for _, item in sorted(x.items()):
+ what, _, cls = str(item).replace(">", "").split(".")[-3:]
+ name.append(".".join([what, cls]))
+ return str(name)
+
+
@pytest.fixture(
- scope="function", params=[reporters.Text, reporters.Mock, reporters.Markdown]
+ scope="function",
+ params=list(
+ jci.Pipeline.env_matrix(
+ reporter=[reporters.Text, reporters.Mock, reporters.Markdown],
+ remote=[
+ remotes.Mock,
+ remotes.Email,
+ remotes.GitRemote,
+ remotes.Gitea,
+ remotes.Github,
+ ],
+ repo=[repos.Mock, repos.Git],
+ executor=[executors.Mock],
+ )
+ ),
+ ids=idfn,
)
def pipeline(request):
- repo = repos.Mock.from_env(
- files_changed=[], branch="test_branch", sha="fake_sha", remote="fake_remote"
- )
- executor = executors.Mock()
- remote = remotes.Mock(branch=repo.branch, sha=repo.sha)
- reporter = request.param()
- p = jci.Pipeline(
- repo=repo, executor=executor, remote=remote, reporter=reporter, poll_interval=0
- )
- p.render_report = lambda: ""
- yield p
+ os.environ["JAYPORE_GITEA_TOKEN"] = "fake_gitea_token"
+ os.environ["JAYPORE_GITHUB_TOKEN"] = "fake_github_token"
+ os.environ["JAYPORE_EMAIL_ADDR"] = "fake@email.com"
+ os.environ["JAYPORE_EMAIL_PASSWORD"] = "fake_email_password"
+ os.environ["JAYPORE_EMAIL_TO"] = "fake.to@mymailmail.com"
+ kwargs = {}
+ if request.param["repo"] == repos.Mock:
+ kwargs["repo"] = repos.Mock.from_env(
+ files_changed=[],
+ branch="test_branch",
+ sha="fake_sha",
+ remote="https://fake_remote.com/fake_owner/fake_repo.git",
+ commit_message="fake_commit_message",
+ )
+ else:
+ kwargs["repo"] = request.param["repo"].from_env()
+ # --- remote
+ kwargs["remote"] = request.param["remote"].from_env(repo=kwargs["repo"])
+ if request.param["remote"] == remotes.Gitea and not Mock.gitea_added:
+ add_gitea_mocks(kwargs["remote"])
+ if request.param["remote"] == remotes.Github and not Mock.github_added:
+ add_github_mocks(kwargs["remote"])
+ kwargs["executor"] = request.param["executor"]()
+ kwargs["reporter"] = request.param["reporter"]()
+ p = jci.Pipeline(poll_interval=0, **kwargs)
+ if request.param["remote"] == remotes.Email:
+ with unittest.mock.patch("smtplib.SMTP_SSL", autospec=True):
+ yield p
+ else:
+ yield p
diff --git a/tests/requests_mock.py b/tests/requests_mock.py
@@ -0,0 +1,50 @@
+from typing import NamedTuple
+from collections import defaultdict
+
+import requests
+
+
+class MockResponse(NamedTuple):
+ status_code: int
+ body: str
+ content_type: str
+
+
+class Mock:
+ registry = defaultdict(list)
+ index = defaultdict(int)
+ gitea_added = False
+ github_added = False
+
+ @classmethod
+ def get(cls, url, status=200, body="", content_type="text/html"):
+ cls.registry["get", url].append(
+ MockResponse(status_code=status, body=body, content_type=content_type)
+ )
+
+ @classmethod
+ def post(cls, url, status=200, body="", content_type="text/html"):
+ cls.registry["post", url].append(
+ MockResponse(status_code=status, body=body, content_type=content_type)
+ )
+
+ @classmethod
+ def patch(cls, url, status=200, body="", content_type="text/html"):
+ cls.registry["patch", url].append(
+ MockResponse(status_code=status, body=body, content_type=content_type)
+ )
+
+ @classmethod
+ def handle(cls, method):
+ def handler(url, **_):
+ options = cls.registry[method, url]
+ resp = options[cls.index[method, url]]
+ cls.index[method, url] = (cls.index[method, url] + 1) % len(options)
+ return resp
+
+ return handler
+
+
+requests.get = Mock.handle("get")
+requests.post = Mock.handle("post")
+requests.patch = Mock.handle("patch")
diff --git a/tests/subprocess_mock.py b/tests/subprocess_mock.py
@@ -0,0 +1,37 @@
+import random
+import subprocess
+
+
+def sha():
+ return hex(random.getrandbits(128))
+
+
+def check_output(cmd, **_):
+ text = ""
+ # repos.git
+ if "git diff" in cmd:
+ text = "some\nfiles\nthat\nwere\nchanged"
+ elif "git remote -v" in cmd and "grep https" in cmd:
+ text = "https://fake_remote.subprocessmock.com/fake_owner/fake_repo.git"
+ elif "git branch" in cmd and "grep" in cmd:
+ text = "subprocess_mock_fake_branch"
+ elif "rev-parse HEAD" in cmd:
+ text = sha()
+ elif "git log -1" in cmd:
+ text = "some_fake_git_commit_message\nfrom_subprocess_mock"
+ # jci
+ elif "cat /proc/self/cgroup" in cmd:
+ text = "fake_pipe_id_from_subprocess_mock"
+ # remotes.git
+ elif "git hash-object" in cmd:
+ text = sha()
+ elif "git mktree" in cmd:
+ text = sha()
+ elif "git commit-tree" in cmd:
+ text = sha()
+ elif "git update-ref" in cmd:
+ text = sha()
+ return text.encode()
+
+
+subprocess.check_output = check_output
diff --git a/tests/test_jaypore_ci.py b/tests/test_jaypore_ci.py
@@ -1,10 +1,8 @@
import pytest
-from jaypore_ci import __version__
-
-def test_version():
- assert __version__ == "0.1.0"
+def test_sanity():
+ assert 4 == 2 + 2
def test_simple_linear_jobs(pipeline):