commit 0db01dc8bb59a6d6a44ec775152e60036b9b47ec
parent f8affe4f0a3f0f61615d1e6236fdf973f853f915
Author: arjoonn <arjoonn@noreply.localhost>
Date: Thu, 2 Feb 2023 08:04:03 +0000
tui_reporter (!32)
Branch auto created by JayporeCI
```jayporeci
╔ 🟢 : JayporeCI [sha ba2ff95bc6]
┏━ Docker
┃
┃ 🟢 : Jci [495ddaf9] 1:22
┃ 🟢 : JciEnv [ba156eea] 1:13
┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
┏━ Jobs
┃
┃ 🟢 : black [a91c7647] 0: 0
┃ 🟢 : pylint [999a0d05] 0:10
┃ 🟢 : pytest [b332e8c9] 0: 2
┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
┏━ Publish
┃
┃ 🟢 : DockerHubJci [9256ba7e] 1:17
┃ 🟢 : DockerHubJcienv [0355d8fb] 0:51
┃ 🟢 : PublishDocs [d9ba508e] 0: 8
┃ 🟢 : PublishPypi [1126fbee] 0: 7
┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
```
Co-authored-by: arjoonn sharma <arjoonn@midpathsoftware.com>
Reviewed-on: https://gitea.midpathsoftware.com/midpath/jaypore_ci/pulls/32
Diffstat:
6 files changed, 170 insertions(+), 46 deletions(-)
diff --git a/cicd/pre-push.sh b/cicd/pre-push.sh
@@ -36,7 +36,7 @@ hook() {
-e JAYPORE_CODE_DIR=$JAYPORE_CODE_DIR \
-v /var/run/docker.sock:/var/run/docker.sock \
-v $REPO_ROOT:/jaypore_ci/repo:ro \
- -v /tmp/jaypore_$SHA:/jaypore_ci/run \
+ -v /tmp/jayporeci__src__$SHA:/jaypore_ci/run \
--workdir /jaypore_ci/run \
arjoonn/jci:latest \
bash -c "bash /jaypore_ci/repo/$JAYPORE_CODE_DIR/pre-push.sh run"
diff --git a/docs/source/index.rst b/docs/source/index.rst
@@ -85,9 +85,9 @@ This would produce a CI report like::
- `1: 3` is the time taken by the job.
-To see your pipelines on your machine you can run:
+To see the pipelines on your machine you can run:
-.. code-blcok:: bash
+.. code-block:: bash
docker run \
--rm -it \
@@ -96,7 +96,43 @@ To see your pipelines on your machine you can run:
bash -c 'python3 -m jaypore_ci'
-This will open up a console where you can interact and explore the job logs.
+This will open up a console where you can explore the job logs. If you don't
+want to do this it's also possible to simply use `docker logs <container ID>`
+to explore jobs.
+
+
+Config rules
+------------
+
+1. A config is simply a python file that imports and uses **jaypore_ci**
+2. A config starts with creating a :class:`jaypore_ci.jci.Pipeline` and using it as a context manager.
+ - A pipeline has to have one implementation of a Remote, Reporter, Executor specified.
+ - :class:`jaypore_ci.interfaces.Remote`
+ - :class:`jaypore_ci.interfaces.Reporter`
+ - :class:`jaypore_ci.interfaces.Executor`
+ - If you do not specify it then the defaults are:
+ - :class:`jaypore_ci.remotes.gitea.Gitea`
+ - :class:`jaypore_ci.reporters.text.Text`
+ - :class:`jaypore_ci.executors.docker.Docker`
+ - You can specify ANY other keyword arguments to the pipeline and they will
+ be applied to jobs in that pipeline as a default. This allows you to keep
+ your code DRY. For example, we can specify **image='some/docker:image'**
+ and this will be used for all jobs in the pipeline.
+3. Each pipeline can declare multiple :meth:`jaypore_ci.jci.Pipeline.stage` sections.
+ - Stage names have to be unique. They cannot conflict with job names as well.
+ - Stages are executed in the order in which they are declared in the config.
+ - The default stage is called **Pipeline**
+ - Any extra keyword arguments specified while creating the stage are
+ applied to jobs. These arguments override whatever is specified at the
+ Pipeline level.
+4. Finally, any number of :meth:`jaypore_ci.jci.Pipeline.job` can be declared.
+ - Jobs declared inside a stage belong to that stage.
+ - Job names have to be unique. They cannot clash with stage names / other job names.
+ - Jobs are run in parallel **UNLESS** they specify
+ **depends_on=["other_job"]**, in which case the job runs after
+ **other_job** has passed.
+ - Jobs inherit keyword arguments from Pipelines, then stages, then whatever
+ is specified at the job level.
Examples
diff --git a/jaypore_ci/executors/docker.py b/jaypore_ci/executors/docker.py
@@ -54,9 +54,7 @@ class Docker(Executor):
if self.pipe_id is not None:
self.delete_network()
self.delete_all_jobs()
- self.pipe_id = __check_output__(
- "cat /proc/self/cgroup | grep name= | awk -F/ '{print $3}'"
- )
+ self.pipe_id = pipeline.pipe_id
self.pipeline = pipeline
self.create_network()
@@ -136,9 +134,8 @@ class Docker(Executor):
Generates a clean job name slug.
"""
name = "".join(
- l
- for l in job.name.lower().replace(" ", "_")
- if l in "abcdefghijklmnopqrstuvwxyz_1234567890"
+ l if l in "abcdefghijklmnopqrstuvwxyz1234567890" else "-"
+ for l in job.name.lower()
)
return f"jayporeci__job__{self.pipe_id}__{name}"
@@ -152,7 +149,7 @@ class Docker(Executor):
trigger = [
"docker run -d",
"-v /var/run/docker.sock:/var/run/docker.sock",
- f"-v /tmp/jaypore_{job.pipeline.remote.sha}:/jaypore_ci/run",
+ f"-v /tmp/jayporeci__src__{self.pipeline.remote.sha}:/jaypore_ci/run",
*["--workdir /jaypore_ci/run" if not job.is_service else None],
f"--name {self.get_job_name(job)}",
f"--network {self.get_net()}",
diff --git a/jaypore_ci/jci.py b/jaypore_ci/jci.py
@@ -3,6 +3,7 @@ The code submodule for Jaypore CI.
"""
import time
import os
+import subprocess
from itertools import product
from collections import defaultdict
from typing import List, Union, Callable
@@ -27,10 +28,15 @@ PREFIX = "JAYPORE_"
class Job: # pylint: disable=too-many-instance-attributes
"""
- This is the fundamental building block.
- Each job goes through a lifecycle defined by `Status` class.
+ This is the fundamental building block for running jobs.
+ Each job goes through a lifecycle defined by
+ :class:`jaypore_ci.interfaces.Status`.
- A job is run by an Executor as part of a Pipeline.
+ A job is run by an :class:`jaypore_ci.interfaces.Executor` as part of a
+ :class:`jaypore_ci.jci.Pipeline`.
+
+ It is never created manually. The correct way to create a job is to use
+ :meth:`jaypore_ci.jci.Pipeline.job`.
"""
def __init__(
@@ -78,7 +84,7 @@ class Job: # pylint: disable=too-many-instance-attributes
run_id=self.run_id,
)
- def update_report(self):
+ def update_report(self) -> str:
"""
Update the status report. Usually called when a job changes some of
it's internal state like when logs are updated or when status has
@@ -94,6 +100,8 @@ class Job: # pylint: disable=too-many-instance-attributes
Status.SKIPPED: "warning",
}[self.pipeline.get_status()]
report = self.pipeline.reporter.render(self.pipeline)
+ with open("/jaypore_ci/run/jaypore_ci.status.txt", "w", encoding="utf-8") as fl:
+ fl.write(report)
try:
self.pipeline.remote.publish(report, status)
except Exception as e: # pylint: disable=broad-except
@@ -154,7 +162,7 @@ class Job: # pylint: disable=too-many-instance-attributes
if with_update_report:
self.update_report()
- def is_complete(self):
+ def is_complete(self) -> bool:
"""
Is this job complete? It could have passed/ failed etc.
We no longer need to check for updates in a complete job.
@@ -163,8 +171,12 @@ class Job: # pylint: disable=too-many-instance-attributes
def get_env(self):
"""
- Gets the environment variables for a given job by interpolating it with
- the pipeline's environment.
+ Gets the environment variables for a given job.
+ Order of precedence for setting values is:
+
+ 1. Pipeline
+ 2. Stage
+ 3. Job
"""
return {
**{
@@ -180,11 +192,6 @@ class Job: # pylint: disable=too-many-instance-attributes
class Pipeline: # pylint: disable=too-many-instance-attributes
"""
A pipeline acts as a controlling/organizing mechanism for multiple jobs.
-
- - Each pipeline has stages. A default stage of 'Pipeline' is always available.
- - Stages are executed in order. Execution proceeds to the next stage ONLY
- if all jobs in a stage have passed.
- - Jobs can be defined inside stages.
"""
def __init__( # pylint: disable=too-many-arguments
@@ -205,8 +212,17 @@ class Pipeline: # pylint: disable=too-many-instance-attributes
self.reporter = reporter if reporter is not None else reporters.text.Text()
self.graph_direction = graph_direction
self.poll_interval = poll_interval
- self.executor.set_pipeline(self)
self.stages = ["Pipeline"]
+ self.pipe_id = (
+ subprocess.check_output(
+ "cat /proc/self/cgroup | grep name= | awk -F/ '{print $3}'",
+ shell=True,
+ stderr=subprocess.STDOUT,
+ )
+ .decode()
+ .strip()
+ )
+ self.executor.set_pipeline(self)
# ---
kwargs["image"] = kwargs.get("image", "arjoonn/jaypore_ci:latest")
kwargs["timeout"] = kwargs.get("timeout", 15 * 60)
@@ -239,7 +255,7 @@ class Pipeline: # pylint: disable=too-many-instance-attributes
self.remote.__exit__(exc_type, exc_value, traceback)
return False
- def get_status(self):
+ def get_status(self) -> Status:
"""
Calculates a pipeline's status based on the status of it's jobs.
"""
@@ -261,7 +277,7 @@ class Pipeline: # pylint: disable=too-many-instance-attributes
return Status.FAILED
return Status.PENDING if has_pending else Status.PASSED
- def get_status_dot(self):
+ def get_status_dot(self) -> str:
"""
Get's the status dot for the pipeline.
"""
@@ -334,8 +350,9 @@ class Pipeline: # pylint: disable=too-many-instance-attributes
def run(self):
"""
- Run the pipeline. This is almost always called automatically when the
- context of the pipeline declaration finishes.
+ Run the pipeline. This is always called automatically when the context
+ of the pipeline declaration finishes and so unless you are doing
+ something fancy you don't need to call this manually.
"""
self.__ensure_duplex__()
# Run stages one by one
diff --git a/jaypore_ci/tui.py b/jaypore_ci/tui.py
@@ -1,4 +1,5 @@
import subprocess
+from copy import deepcopy
from rich.traceback import Traceback
@@ -13,9 +14,48 @@ HELP = """
- Toggle the list of jobs for that pipeline.
- Show pipeline logs
- Clicking on the job will show logs for that job
+
+ :J55555557.
+ :P#######?
+ :5#BBBBB#?
+ :5#BBBBB#?
+ :5#BBBBB#?
+ .~^ :5#BBBBB#?
+ ^JG#G! :5#BBBBB#?
+ :JB##B#B?. :5#BBBBB#?
+ ~G##BBBB##J: :5#BBBBB#?
+ ~G#BBBBB#G?^. :5#BBBBB#?
+ :P#BBBBB#P^ :5#BBBBB#?
+ !B#BBBB#G~ :P#######?
+ 7#BBBBB#P: .~!!!!!!!^
+ !B#BBBB#G~
+ :P#BBBBB#P~
+ ~G#BBBBB#BJ^.
+ ~P##BBBB##BPY?!!!!?YJ.
+ :JB###BBB###########5:
+ ^?PB####BBBBBBBBBB#G!
+ .~?5GB##########BGY^
+ .^~!7????7!~^.
"""
+def get_logs(cid):
+ return subprocess.check_output(
+ f"docker logs {cid}",
+ shell=True,
+ stderr=subprocess.STDOUT,
+ ).decode()
+
+
+def get_status(sha):
+ with open(
+ f"/tmp/jayporeci__src__{sha}/jaypore_ci.status.txt", "r", encoding="utf-8"
+ ) as fl:
+ status = fl.read()
+ status = status.replace("```jayporeci", "").replace("```", "")
+ return status
+
+
def get_pipes_from_docker_ps():
lines = (
subprocess.check_output(
@@ -29,6 +69,13 @@ def get_pipes_from_docker_ps():
pipes = {}
PREFIX = "jayporeci__"
+ DEFAULT_PIPE = {
+ "sha": None,
+ "jobs": [],
+ "cid": None,
+ "completed": True,
+ "kind": "pipe",
+ }
for line in lines:
if PREFIX not in line:
continue
@@ -36,18 +83,26 @@ def get_pipes_from_docker_ps():
cid = line.split(" ")[0]
if kind == "pipe":
if cid not in pipes:
- pipes[cid] = {"sha": None, "jobs": [], "cid": None}
+ pipes[cid] = deepcopy(DEFAULT_PIPE)
if pipes[cid]["sha"] is None:
- pipes[cid]["sha"] = details[0][:8]
+ pipes[cid]["sha"] = details[0]
if pipes[cid]["cid"] is None:
pipes[cid]["cid"] = cid
+ pipes[cid]["completed"] = "Exited (" in line
elif kind == "job":
pipe_cid, name = details
pipe_cid = pipe_cid[:12]
if pipe_cid not in pipes:
- pipes[pipe_cid] = {"sha": None, "jobs": [], "cid": None}
- pipes[pipe_cid]["jobs"].append((cid[:12], name))
- return pipes
+ pipes[pipe_cid] = deepcopy(DEFAULT_PIPE)
+ pipes[pipe_cid]["jobs"].append(
+ {
+ "cid": cid[:12],
+ "name": name,
+ "completed": "Exited (" in line,
+ "kind": "job",
+ }
+ )
+ return {cid: pipe for cid, pipe in pipes.items() if pipe["sha"] is not None}
class Console(App):
@@ -66,10 +121,23 @@ class Console(App):
tree.root.expand()
pipes = get_pipes_from_docker_ps()
for pipe in pipes.values():
- pipe_node = tree.root.add(pipe["sha"], data=pipe)
+ s = " " if pipe["completed"] else "*"
+ pipe_node = tree.root.add(
+ f"{s} {pipe['sha'][:8]}", data=pipe, expand=not pipe["completed"]
+ )
+ pipe_node.add_leaf(
+ f"{s} {pipe['cid'][:4]}: JayporeCI",
+ data={
+ "cid": pipe["cid"],
+ "sha": pipe["sha"],
+ "name": "Status",
+ "kind": "status",
+ "completed": pipe["completed"],
+ },
+ )
for job in pipe["jobs"]:
- job_cid, job_name = job
- pipe_node.add_leaf(f"{job_cid[:4]}: {job_name}", data=job)
+ s = " " if job["completed"] else "*"
+ pipe_node.add_leaf(f"{s} {job['cid'][:4]}: {job['name']}", data=job)
# ---
yield Container(
tree,
@@ -80,6 +148,8 @@ class Console(App):
def on_mount(self, event: events.Mount) -> None: # pylint: disable=unused-argument
self.query_one(Tree).show_root = False
self.query_one(Tree).focus()
+ code_view = self.query_one("#code", Static)
+ code_view.update(HELP)
def on_tree_node_selected(self, event: Tree.NodeSelected) -> None:
"""Called when the user click a node in the job tree."""
@@ -87,21 +157,25 @@ class Console(App):
code_view = self.query_one("#code", Static)
data = event.node.data
cid = None
- if isinstance(data, dict) and "cid" in data:
+ if isinstance(data, dict) and "cid" in data and data.get("kind") == "pipe":
cid = name = data["cid"]
name = f"Pipeline for SHA: {data['sha']}"
- elif isinstance(data, tuple):
- cid, name = data
- name = f"Job: {name}"
+ elif isinstance(data, dict) and "sha" in data and data.get("kind") == "status":
+ name = f"Status: {data['sha']}"
+ try:
+ code_view.update(get_status(data["sha"]))
+ except Exception: # pylint: disable=broad-except
+ code_view.update(Traceback(theme="github-dark", width=None))
+ self.sub_title = "ERROR"
+ return
+ elif isinstance(data, dict) and "cid" in data and data.get("kind") == "job":
+ cid = name = data["cid"]
+ name = f"Job: {data['name']}"
if cid is None:
code_view.update(HELP)
return
try:
- logs = subprocess.check_output(
- f"docker logs {cid}",
- shell=True,
- stderr=subprocess.STDOUT,
- ).decode()
+ logs = get_logs(cid)
except Exception: # pylint: disable=broad-except
code_view.update(Traceback(theme="github-dark", width=None))
self.sub_title = "ERROR"
diff --git a/pyproject.toml b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "jaypore_ci"
-version = "0.2.2"
+version = "0.2.3"
description = ""
authors = ["arjoonn sharma <arjoonn.94@gmail.com>"]