Compare commits
30 Commits
31 changed files with 1867 additions and 617 deletions
@ -0,0 +1,11 @@ |
|||||
|
# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.231.6/containers/ubuntu/.devcontainer/base.Dockerfile |
||||
|
|
||||
|
# [Choice] Ubuntu version (use hirsuite or bionic on local arm64/Apple Silicon): hirsute, focal, bionic |
||||
|
ARG VARIANT="hirsute" |
||||
|
FROM mcr.microsoft.com/vscode/devcontainers/base:0-${VARIANT} |
||||
|
|
||||
|
# [Optional] Uncomment this section to install additional OS packages. |
||||
|
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ |
||||
|
# && apt-get -y install --no-install-recommends <your-package-list-here> |
||||
|
|
||||
|
|
@ -0,0 +1,32 @@ |
|||||
|
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at: |
||||
|
// https://github.com/microsoft/vscode-dev-containers/tree/v0.231.6/containers/ubuntu |
||||
|
{ |
||||
|
"name": "Ubuntu", |
||||
|
"build": { |
||||
|
"dockerfile": "Dockerfile", |
||||
|
// Update 'VARIANT' to pick an Ubuntu version: hirsute, focal, bionic |
||||
|
// Use hirsute or bionic on local arm64/Apple Silicon. |
||||
|
"args": { "VARIANT": "focal" } |
||||
|
}, |
||||
|
|
||||
|
// Set *default* container specific settings.json values on container create. |
||||
|
"settings": {}, |
||||
|
|
||||
|
|
||||
|
// Add the IDs of extensions you want installed when the container is created. |
||||
|
"extensions": [], |
||||
|
|
||||
|
// Use 'forwardPorts' to make a list of ports inside the container available locally. |
||||
|
// "forwardPorts": [], |
||||
|
|
||||
|
// Use 'postCreateCommand' to run commands after the container is created. |
||||
|
// "postCreateCommand": "uname -a", |
||||
|
|
||||
|
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. |
||||
|
"remoteUser": "vscode", |
||||
|
"features": { |
||||
|
"docker-from-docker": "20.10", |
||||
|
"git": "latest", |
||||
|
"python": "3.10" |
||||
|
} |
||||
|
} |
@ -1,32 +1,41 @@ |
|||||
kind: pipeline |
kind: pipeline |
||||
type: docker |
type: docker |
||||
name: default |
name: default |
||||
|
|
||||
steps: |
steps: |
||||
- name: static-test |
- name: static-test |
||||
image: alpine/flake8 |
image: alpine/flake8 |
||||
commands: |
commands: |
||||
- python3 -m flake8 --ignore E501,W503 |
- python3 -m flake8 --ignore E501,W503 alice-ci/src |
||||
|
|
||||
- name: build |
- name: build |
||||
image: python |
image: python |
||||
commands: |
commands: |
||||
- python3 -m pip install build |
- python3 -m pip install build |
||||
- python3 -m build alice-ci |
- python3 -m build alice-ci |
||||
|
|
||||
- name: publish |
- name: test |
||||
image: python |
image: python |
||||
environment: |
environment: |
||||
TWINE_PASSWORD: |
PYPIUSER: USER |
||||
from_secret: pypi_username |
PYPIPASS: PASS |
||||
TWINE_USERNAME: |
commands: |
||||
from_secret: pypi_password |
- python3 -m pip install alice-ci/dist/alice_ci*.whl |
||||
commands: |
- alice -i ci-examples/full.yaml -vv |
||||
- python3 -m pip install twine |
|
||||
- python3 -m twine upload --verbose alice-ci/dist/* |
- name: publish |
||||
when: |
image: python |
||||
branch: |
environment: |
||||
- master |
TWINE_PASSWORD: |
||||
event: |
from_secret: pypi_username |
||||
exclude: |
TWINE_USERNAME: |
||||
|
from_secret: pypi_password |
||||
|
commands: |
||||
|
- python3 -m pip install twine |
||||
|
- python3 -m twine upload --verbose alice-ci/dist/* |
||||
|
when: |
||||
|
branch: |
||||
|
- master |
||||
|
event: |
||||
|
exclude: |
||||
- pull_request |
- pull_request |
@ -1,140 +1,142 @@ |
|||||
# ---> Python |
# ---> Python |
||||
# Byte-compiled / optimized / DLL files |
# Byte-compiled / optimized / DLL files |
||||
__pycache__/ |
__pycache__/ |
||||
*.py[cod] |
*.py[cod] |
||||
*$py.class |
*$py.class |
||||
|
|
||||
# C extensions |
# C extensions |
||||
*.so |
*.so |
||||
|
|
||||
# Distribution / packaging |
# Distribution / packaging |
||||
.Python |
.Python |
||||
build/ |
build/ |
||||
develop-eggs/ |
develop-eggs/ |
||||
dist/ |
dist/ |
||||
downloads/ |
downloads/ |
||||
eggs/ |
eggs/ |
||||
.eggs/ |
.eggs/ |
||||
lib/ |
lib/ |
||||
lib64/ |
lib64/ |
||||
parts/ |
parts/ |
||||
sdist/ |
sdist/ |
||||
var/ |
var/ |
||||
wheels/ |
wheels/ |
||||
share/python-wheels/ |
share/python-wheels/ |
||||
*.egg-info/ |
*.egg-info/ |
||||
.installed.cfg |
.installed.cfg |
||||
*.egg |
*.egg |
||||
MANIFEST |
MANIFEST |
||||
|
|
||||
# PyInstaller |
# PyInstaller |
||||
# Usually these files are written by a python script from a template |
# Usually these files are written by a python script from a template |
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it. |
# before PyInstaller builds the exe, so as to inject date/other infos into it. |
||||
*.manifest |
*.manifest |
||||
*.spec |
*.spec |
||||
|
|
||||
# Installer logs |
# Installer logs |
||||
pip-log.txt |
pip-log.txt |
||||
pip-delete-this-directory.txt |
pip-delete-this-directory.txt |
||||
|
|
||||
# Unit test / coverage reports |
# Unit test / coverage reports |
||||
htmlcov/ |
htmlcov/ |
||||
.tox/ |
.tox/ |
||||
.nox/ |
.nox/ |
||||
.coverage |
.coverage |
||||
.coverage.* |
.coverage.* |
||||
.cache |
.cache |
||||
nosetests.xml |
nosetests.xml |
||||
coverage.xml |
coverage.xml |
||||
*.cover |
*.cover |
||||
*.py,cover |
*.py,cover |
||||
.hypothesis/ |
.hypothesis/ |
||||
.pytest_cache/ |
.pytest_cache/ |
||||
cover/ |
cover/ |
||||
|
|
||||
# Translations |
# Translations |
||||
*.mo |
*.mo |
||||
*.pot |
*.pot |
||||
|
|
||||
# Django stuff: |
# Django stuff: |
||||
*.log |
*.log |
||||
local_settings.py |
local_settings.py |
||||
db.sqlite3 |
db.sqlite3 |
||||
db.sqlite3-journal |
db.sqlite3-journal |
||||
|
|
||||
# Flask stuff: |
# Flask stuff: |
||||
instance/ |
instance/ |
||||
.webassets-cache |
.webassets-cache |
||||
|
|
||||
# Scrapy stuff: |
# Scrapy stuff: |
||||
.scrapy |
.scrapy |
||||
|
|
||||
# Sphinx documentation |
# Sphinx documentation |
||||
docs/_build/ |
docs/_build/ |
||||
|
|
||||
# PyBuilder |
# PyBuilder |
||||
.pybuilder/ |
.pybuilder/ |
||||
target/ |
target/ |
||||
|
|
||||
# Jupyter Notebook |
# Jupyter Notebook |
||||
.ipynb_checkpoints |
.ipynb_checkpoints |
||||
|
|
||||
# IPython |
# IPython |
||||
profile_default/ |
profile_default/ |
||||
ipython_config.py |
ipython_config.py |
||||
|
|
||||
# pyenv |
# pyenv |
||||
# For a library or package, you might want to ignore these files since the code is |
# For a library or package, you might want to ignore these files since the code is |
||||
# intended to run in multiple environments; otherwise, check them in: |
# intended to run in multiple environments; otherwise, check them in: |
||||
# .python-version |
# .python-version |
||||
|
|
||||
# pipenv |
# pipenv |
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. |
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. |
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies |
# However, in case of collaboration, if having platform-specific dependencies or dependencies |
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not |
# having no cross-platform support, pipenv may install dependencies that don't work, or not |
||||
# install all needed dependencies. |
# install all needed dependencies. |
||||
#Pipfile.lock |
#Pipfile.lock |
||||
|
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow |
# PEP 582; used by e.g. github.com/David-OConnor/pyflow |
||||
__pypackages__/ |
__pypackages__/ |
||||
|
|
||||
# Celery stuff |
# Celery stuff |
||||
celerybeat-schedule |
celerybeat-schedule |
||||
celerybeat.pid |
celerybeat.pid |
||||
|
|
||||
# SageMath parsed files |
# SageMath parsed files |
||||
*.sage.py |
*.sage.py |
||||
|
|
||||
# Environments |
# Environments |
||||
.env |
.env |
||||
.venv |
.venv |
||||
env/ |
env/ |
||||
venv/ |
venv/ |
||||
ENV/ |
ENV/ |
||||
env.bak/ |
env.bak/ |
||||
venv.bak/ |
venv.bak/ |
||||
|
|
||||
# Spyder project settings |
# Spyder project settings |
||||
.spyderproject |
.spyderproject |
||||
.spyproject |
.spyproject |
||||
|
|
||||
# Rope project settings |
# Rope project settings |
||||
.ropeproject |
.ropeproject |
||||
|
|
||||
# mkdocs documentation |
# mkdocs documentation |
||||
/site |
/site |
||||
|
|
||||
# mypy |
# mypy |
||||
.mypy_cache/ |
.mypy_cache/ |
||||
.dmypy.json |
.dmypy.json |
||||
dmypy.json |
dmypy.json |
||||
|
|
||||
# Pyre type checker |
# Pyre type checker |
||||
.pyre/ |
.pyre/ |
||||
|
|
||||
# pytype static type analyzer |
# pytype static type analyzer |
||||
.pytype/ |
.pytype/ |
||||
|
|
||||
# Cython debug symbols |
# Cython debug symbols |
||||
cython_debug/ |
cython_debug/ |
||||
|
|
||||
|
# persistency dir |
||||
|
.alice |
||||
|
@ -1,9 +1,9 @@ |
|||||
MIT License |
MIT License |
||||
|
|
||||
Copyright (c) 2022 Daniel Gyulai |
Copyright (c) 2022 Daniel Gyulai |
||||
|
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: |
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: |
||||
|
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. |
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. |
||||
|
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
||||
|
@ -1,7 +1,12 @@ |
|||||
# alice |
# alice |
||||
|
|
||||
CI framework with support for local running. |
CI framework with support for local running. |
||||
|
|
||||
* [Basic usage](alice-ci/README.md) |
Main repo [here](https://git.gyulai.cloud/gyulaid/alice). |
||||
* [Runners](docs/runners.md) |
|
||||
|
[](https://ci.gyulai.cloud/gyulaid/alice) |
||||
|
[](https://badge.fury.io/py/alice-ci) |
||||
|
|
||||
|
* [Basic usage](alice-ci/README.md) |
||||
|
* [Runners](docs/runners.md) |
||||
* [CI syntax](docs/syntax.md) |
* [CI syntax](docs/syntax.md) |
@ -1,16 +1,16 @@ |
|||||
# Alice-CI |
# Alice-CI |
||||
|
|
||||
Continous Integration framework with the goal of using the exact same code in CI and local env. Steps can be defined in yaml files, for syntax see the docs. Runs on LInux and Windows, Mac should work too, but not yet tested. |
Continous Integration framework with the goal of using the exact same code in CI and local env. Steps can be defined in yaml files, for syntax see the docs. Runs on LInux and Windows, Mac should work too, but not yet tested. |
||||
|
|
||||
## Usage |
## Usage |
||||
|
|
||||
Install with pip: |
Install with pip: |
||||
``` |
``` |
||||
pythom3 -m pip install alice-ci |
pythom3 -m pip install alice-ci |
||||
``` |
``` |
||||
|
|
||||
To run: |
To run: |
||||
|
|
||||
``` |
``` |
||||
pythom3 -m alice [-i <ci.yaml>] STEPS |
pythom3 -m alice [-i <ci.yaml>] STEPS |
||||
``` |
``` |
@ -1,6 +1,6 @@ |
|||||
[build-system] |
[build-system] |
||||
requires = [ |
requires = [ |
||||
"setuptools>=42", |
"setuptools>=42", |
||||
"wheel" |
"wheel" |
||||
] |
] |
||||
build-backend = "setuptools.build_meta" |
build-backend = "setuptools.build_meta" |
@ -1,26 +1,29 @@ |
|||||
[metadata] |
[metadata] |
||||
name = alice-ci |
name = alice-ci |
||||
version = 0.0.2 |
version = 0.0.14 |
||||
author = Daniel Gyulai |
author = Daniel Gyulai |
||||
description = Alice CI framework |
description = Alice CI framework |
||||
long_description = file: README.md |
long_description = file: README.md |
||||
long_description_content_type = text/markdown |
long_description_content_type = text/markdown |
||||
url = https://git.gyulai.cloud/gyulaid/alice |
url = https://git.gyulai.cloud/gyulaid/alice |
||||
project_urls = |
project_urls = |
||||
Bug Tracker = https://git.gyulai.cloud/gyulaid/alice/issues |
Bug Tracker = https://git.gyulai.cloud/gyulaid/alice/issues |
||||
classifiers = |
classifiers = |
||||
Programming Language :: Python :: 3 |
Programming Language :: Python :: 3 |
||||
License :: OSI Approved :: MIT License |
License :: OSI Approved :: MIT License |
||||
Operating System :: OS Independent |
Operating System :: OS Independent |
||||
|
|
||||
[options] |
[options] |
||||
package_dir = |
package_dir = |
||||
= src |
= src |
||||
packages = find: |
packages = |
||||
python_requires = >=3.6 |
alice |
||||
install_requires = |
alice.runners |
||||
PyYAML==6.0 |
python_requires = >=3.6 |
||||
virtualenv==20.14.0 |
install_requires = |
||||
|
PyYAML |
||||
[options.packages.find] |
docker |
||||
where = src |
|
||||
|
[options.entry_points] |
||||
|
console_scripts = |
||||
|
alice = alice.cli:main |
||||
|
@ -1,5 +1,9 @@ |
|||||
# flake8: noqa F401 |
# flake8: noqa F401 |
||||
from .cli import App |
from .configparser import ConfigParser |
||||
from .jobparser import Job, JobParser |
from .exceptions import NonZeroRetcode |
||||
from .exceptions import NonZeroRetcode |
from .runnerfactory import Factory |
||||
from .pythonrunner import PythonRunner |
from .exceptions import NonZeroRetcode |
||||
|
from .exceptions import RunnerError |
||||
|
from .exceptions import ConfigException |
||||
|
|
||||
|
name = "alice" |
@ -1,3 +1,4 @@ |
|||||
from cli import main |
from .cli import main |
||||
|
|
||||
main() |
if __name__ == '__main__': |
||||
|
main() |
||||
|
@ -1,59 +1,62 @@ |
|||||
import os |
import logging |
||||
import argparse |
import os |
||||
|
import argparse |
||||
from utils import ConfigParser |
|
||||
from runnerfactory import Factory |
from .configparser import ConfigParser |
||||
from exceptions import ConfigException, NonZeroRetcode, RunnerError |
from .exceptions import ConfigException, NonZeroRetcode, RunnerError |
||||
|
|
||||
|
|
||||
def gen_env(self, param_list): |
def gen_env(param_list): |
||||
env_vars = {} |
env_vars = {} |
||||
for item in param_list: |
for item in param_list: |
||||
item_parts = item.split("=") |
item_parts = item.split("=") |
||||
if len(item_parts) == 2: |
if len(item_parts) == 2: |
||||
env_vars[item_parts[0]] = item_parts[1] |
env_vars[item_parts[0]] = item_parts[1] |
||||
else: |
else: |
||||
raise ConfigException(f"Invalid parameter: {item}") |
raise ConfigException(f"Invalid parameter: {item}") |
||||
return env_vars |
return env_vars |
||||
|
|
||||
|
|
||||
def parse_jobs(args): |
def parse_jobs(args): |
||||
try: |
try: |
||||
factory = Factory() |
if len(args.env) > 0: |
||||
if len(args.env) > 0: |
envs = gen_env(args.env) |
||||
factory.update_runners({"env": gen_env(args.env)}) |
logging.debug(f"[Alice] Env vars from CLI: {envs}") |
||||
jobParser = ConfigParser(args.input, factory) |
jobParser = ConfigParser(args.input, gen_env(args.env)) |
||||
|
|
||||
print("Begin pipeline steps...") |
for step in args.steps: |
||||
for step in args.steps: |
jobParser.execute(step) |
||||
if step in jobParser.jobs: |
except ConfigException as e: |
||||
jobParser.execute_job(step) |
print(f"Configuration error-> {e}") |
||||
print(f"[Step] {step}: SUCCESS") |
exit(1) |
||||
else: |
except NonZeroRetcode: |
||||
print(f"Step {step} not found in {args.input}") |
print("[Alice] FAILED") |
||||
exit(1) |
exit(1) |
||||
except ConfigException as e: |
except RunnerError as e: |
||||
print(f"Configuration error-> {e}") |
print(f"RunnerError-> {e}") |
||||
exit(1) |
exit(1) |
||||
except NonZeroRetcode: |
|
||||
print("FAILED") |
|
||||
exit(1) |
def main(): |
||||
except RunnerError as e: |
parser = argparse.ArgumentParser(prog="alice") |
||||
print(f"RunnerError-> {e}") |
parser.add_argument("steps", nargs='*', default=["default"]) |
||||
|
parser.add_argument("-i", "--input", default=".alice-ci.yaml") |
||||
|
parser.add_argument("-e", "--env", nargs='*', default=[]) |
||||
def main(): |
parser.add_argument("-a", "--addrunner", nargs='*', default=[]) |
||||
parser = argparse.ArgumentParser(prog="alice") |
parser.add_argument('--verbose', '-v', action='count', default=0) |
||||
parser.add_argument("steps", nargs='+') |
args = parser.parse_args() |
||||
parser.add_argument("-i", "--input", default="alice-ci.yaml") |
|
||||
parser.add_argument("-e", "--env", nargs='*', default=[]) |
loglevel = 30 - ((10 * args.verbose) if args.verbose > 0 else 0) |
||||
parser.add_argument("-a", "--addrunner", nargs='*', default=[]) |
logging.basicConfig(level=loglevel, format='%(message)s') |
||||
args = parser.parse_args() |
|
||||
if not os.path.isfile(args.input): |
if not os.path.isfile(args.input): |
||||
print(f"No such file: {args.input}") |
print(f"No such file: {args.input}") |
||||
exit(1) |
exit(1) |
||||
parse_jobs(args) |
persistency_path = os.path.join(os.getcwd(), ".alice") |
||||
|
if not os.path.isdir(persistency_path): |
||||
|
os.mkdir(persistency_path) |
||||
if __name__ == "__main__": |
parse_jobs(args) |
||||
main() |
|
||||
|
|
||||
|
if __name__ == "__main__": |
||||
|
main() |
||||
|
@ -0,0 +1,51 @@ |
|||||
|
import logging |
||||
|
import os |
||||
|
|
||||
|
from .exceptions import ConfigException |
||||
|
|
||||
|
|
||||
|
class ConfigHolder: |
||||
|
__instance = None |
||||
|
file_name = os.path.join(os.getcwd(), ".alice", "vars") |
||||
|
|
||||
|
@staticmethod |
||||
|
def getInstance(): |
||||
|
""" Static access method. """ |
||||
|
if ConfigHolder.__instance is None: |
||||
|
ConfigHolder() |
||||
|
return ConfigHolder.__instance |
||||
|
|
||||
|
def __init__(self): |
||||
|
""" Virtually private constructor. """ |
||||
|
if ConfigHolder.__instance is not None: |
||||
|
raise Exception("This class is a singleton!") |
||||
|
else: |
||||
|
ConfigHolder.__instance = self |
||||
|
config = os.path.abspath(os.path.join(os.getcwd(), self.file_name)) |
||||
|
self.vars = {} |
||||
|
if os.path.isfile(config): |
||||
|
with open(config) as f: |
||||
|
for _line in f: |
||||
|
line = _line.strip() |
||||
|
items = line.split("=") |
||||
|
if len(items) > 1: |
||||
|
self.vars[items[0]] = line.replace(f"{items[0]}=", "") |
||||
|
logging.debug(f"Loaded from {self.file_name}: {self.vars}") |
||||
|
|
||||
|
def get(self, key): |
||||
|
try: |
||||
|
return self.vars[key] |
||||
|
except KeyError: |
||||
|
raise ConfigException(f"{key} not defined in .conf!") |
||||
|
|
||||
|
def set(self, key, value): |
||||
|
self.vars[key] = value |
||||
|
self.commit() |
||||
|
|
||||
|
def soft_set(self, key, value): |
||||
|
self.vars[key] = value |
||||
|
|
||||
|
def commit(self): |
||||
|
with open(self.file_name, 'w') as f: |
||||
|
for k, v in self.vars.items(): |
||||
|
f.write(f"{k}={v if v is not None else ''}\n") |
@ -0,0 +1,101 @@ |
|||||
|
import logging |
||||
|
from os import getcwd, path, environ |
||||
|
import subprocess |
||||
|
import yaml |
||||
|
|
||||
|
from .exceptions import ConfigException |
||||
|
from .runnerfactory import Factory |
||||
|
|
||||
|
|
||||
|
class ConfigParser: |
||||
|
def __init__(self, file_path, cli_env_vars) -> None: |
||||
|
with open(file_path) as f: |
||||
|
self.config = yaml.safe_load(f) |
||||
|
self.factory = Factory(self.__gen_globals(cli_env_vars), self.config.get("runners", {})) |
||||
|
self.jobs = self.__get_jobs() |
||||
|
self.pipelines = self.config.get("pipelines", {}) |
||||
|
|
||||
|
# Initialize env and workdir if not present in global |
||||
|
def __gen_globals(self, cli_vars): |
||||
|
env_vars = environ.copy() |
||||
|
env_vars.update(cli_vars) |
||||
|
globals = { |
||||
|
"env": env_vars, |
||||
|
"workdir": getcwd() |
||||
|
} |
||||
|
if "runners" in self.config: |
||||
|
if "global" in self.config["runners"]: |
||||
|
if "env" in self.config["runners"]["global"]: |
||||
|
for var in self.config["runners"]["global"]["env"]: |
||||
|
globals["env"][var["name"]] = var["value"] |
||||
|
if "workdir" in self.config["runners"]["global"]: |
||||
|
globals["workdir"] = self.config["runners"]["global"]["workdir"] |
||||
|
|
||||
|
logging.debug(f"[Alice] Configured globals: {globals}") |
||||
|
return globals |
||||
|
|
||||
|
def __get_jobs(self): |
||||
|
if "jobs" in self.config: |
||||
|
jobs = {} |
||||
|
for job_spec in self.config["jobs"]: |
||||
|
name = job_spec["name"] |
||||
|
if name in jobs: |
||||
|
raise ConfigException(f"Job with name {name} already exists!") |
||||
|
|
||||
|
jobs[name] = job_spec |
||||
|
logging.info(f"[Alice] Parsed jobs: {', '.join(jobs.keys())}") |
||||
|
return jobs |
||||
|
else: |
||||
|
raise ConfigException("No jobs defined in config") |
||||
|
|
||||
|
def __is_changed(self, changes): |
||||
|
try: |
||||
|
target = changes["branch"] |
||||
|
paths = [] |
||||
|
for _path in changes["paths"]: |
||||
|
paths.append(path.abspath(_path)) |
||||
|
# TODO: Error handling |
||||
|
command = ["git", "diff", "--name-only", target] |
||||
|
with subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: |
||||
|
p.wait() |
||||
|
for line in p.stdout: |
||||
|
change_path = path.abspath(line.decode("UTF-8").strip()) |
||||
|
for _path in paths: |
||||
|
spec_path = path.abspath(_path) |
||||
|
if change_path.startswith(spec_path): |
||||
|
logging.info(f"[Alice] Modified file: {change_path}") |
||||
|
logging.info(f"[Alice] Path match: {_path}") |
||||
|
return True |
||||
|
except KeyError: |
||||
|
raise ConfigException(f"Invalid 'changes' config: {changes}") |
||||
|
return False |
||||
|
|
||||
|
def execute(self, task_name): |
||||
|
if task_name in self.jobs: |
||||
|
self.execute_job(task_name) |
||||
|
elif task_name in self.pipelines: |
||||
|
self.execute_pipeline(task_name) |
||||
|
else: |
||||
|
raise ConfigException(f"No such job or pipeline: {task_name}") |
||||
|
|
||||
|
def execute_pipeline(self, pipeline_name): |
||||
|
if pipeline_name in self.pipelines: |
||||
|
print(f"[Alice][Pipeline] {pipeline_name}: Start") |
||||
|
for task in self.pipelines[pipeline_name]: |
||||
|
self.execute(task) |
||||
|
print(f"[Alice][Pipeline] {pipeline_name}: Success") |
||||
|
|
||||
|
def execute_job(self, job_name): |
||||
|
if job_name in self.jobs: |
||||
|
print(f"[Alice][Job] {job_name}: Start") |
||||
|
job_spec = self.jobs[job_name] |
||||
|
should_run = True |
||||
|
if "changes" in job_spec: |
||||
|
should_run = self.__is_changed(job_spec["changes"]) |
||||
|
if should_run: |
||||
|
runner = self.factory.get_runner(job_spec["type"]) |
||||
|
runner.run(job_spec) |
||||
|
status = "SUCCESS" |
||||
|
else: |
||||
|
status = "SKIP, no change detected" |
||||
|
print(f"[Alice][Job] {job_name}: {status}") |
@ -1,10 +1,10 @@ |
|||||
class NonZeroRetcode(Exception): |
class NonZeroRetcode(Exception): |
||||
pass |
pass |
||||
|
|
||||
|
|
||||
class RunnerError(Exception): |
class RunnerError(Exception): |
||||
pass |
pass |
||||
|
|
||||
|
|
||||
class ConfigException(Exception): |
class ConfigException(Exception): |
||||
pass |
pass |
||||
|
@ -1,34 +1,57 @@ |
|||||
from runners.pythonrunner import PythonRunner |
import logging |
||||
from os import getcwd |
from os.path import join, abspath |
||||
|
|
||||
|
from .runners.pythonrunner import PythonRunner |
||||
class Factory(): |
from .runners.pypirunner import PyPiRunner |
||||
def __init__(self) -> None: |
from .runners.dockerrunner import DockerRunner |
||||
self.runnertypes = self.__load_runners() |
from .runners.pypirepo import PypiRepoRunner |
||||
self.runners = {} |
from .exceptions import ConfigException |
||||
self.workdir = getcwd() |
|
||||
self.globals = {} |
|
||||
|
class Factory(): |
||||
def __load_runners(self): |
def __init__(self, globals, runner_configs) -> None: |
||||
# TODO: Runners can be imported via cli too |
self.globals = globals |
||||
# module = __import__("module_file") |
self.runner_configs = {} |
||||
# my_class = getattr(module, "class_name") |
self.runnertypes = {} |
||||
|
self.runners = {} |
||||
return {"python": PythonRunner} |
self.__load_runners() |
||||
|
self.__gen_runner_configs(runner_configs) |
||||
def set_globals(self, globals): |
|
||||
self.globals = globals |
def __load_runners(self): |
||||
|
# TODO: Runners can be imported via cli too |
||||
def update_globals(self, update): |
# https://git.gyulai.cloud/gyulaid/alice/issues/4 |
||||
if "env" in update: |
# module = __import__("module_file") |
||||
self.globals["env"].update(update["env"]) |
# my_class = getattr(module, "class_name") |
||||
|
self.runnertypes = {"python": PythonRunner, |
||||
def update_runners(self, config): |
"pypi": PyPiRunner, |
||||
for runnertype, runnerconfig in config.items(): |
"docker": DockerRunner, |
||||
if runnertype != "global": |
"pypirepo": PypiRepoRunner} |
||||
self.get_runner(runnertype).update_config(runnerconfig) |
|
||||
|
logging.info(f"[Alice] Available runners: {'|'.join(self.runnertypes.keys())}") |
||||
def get_runner(self, runnertype): |
|
||||
if runnertype not in self.runners: |
def __gen_runner_configs(self, config): |
||||
self.runners[runnertype] = self.runnertypes[runnertype](self.workdir, self.globals) |
for runnertype, runnerconfig in config.items(): |
||||
return self.runners[runnertype] |
if runnertype != "global": |
||||
|
logging.info(f"[Alice] Global config found for runner {runnertype}") |
||||
|
config = self.globals.copy() |
||||
|
for key, value in runnerconfig.items(): |
||||
|
if key == "env": |
||||
|
for env_var in value: |
||||
|
config["env"][env_var["name"]] = env_var["value"] |
||||
|
elif key == "workdir": |
||||
|
config["workdir"] = abspath(join(config["workdir"], value)) |
||||
|
else: |
||||
|
config[key] = value |
||||
|
self.runner_configs[runnertype] = config |
||||
|
logging.debug(f"[Alice] Globals for {runnertype}: {runnerconfig}") |
||||
|
|
||||
|
def get_runner(self, runnertype): |
||||
|
if runnertype not in self.runners: |
||||
|
if runnertype in self.runnertypes: |
||||
|
logging.info(f"[Alice] Initializing runner: {runnertype}") |
||||
|
# If there is a runner specific config, use that, else global |
||||
|
config = self.runner_configs.get(runnertype, self.globals.copy()) |
||||
|
self.runners[runnertype] = self.runnertypes[runnertype](config) |
||||
|
else: |
||||
|
raise ConfigException(f"Invalid runner type: {runnertype}") |
||||
|
return self.runners[runnertype] |
||||
|
@ -0,0 +1,4 @@ |
|||||
|
# flake8: noqa F401 |
||||
|
from .pythonrunner import PythonRunner |
||||
|
from .pypirunner import PyPiRunner |
||||
|
from .dockerrunner import DockerRunner |
@ -1 +1,241 @@ |
|||||
# TODO Implement |
from enum import Enum |
||||
|
import json |
||||
|
import logging |
||||
|
from os import path, getcwd |
||||
|
import docker |
||||
|
|
||||
|
from .pyutils import grab_from, gen_dict |
||||
|
from ..exceptions import ConfigException, NonZeroRetcode, RunnerError |
||||
|
|
||||
|
|
||||
|
class ImageSource(Enum): |
||||
|
NONE = 1 |
||||
|
BUILD = 2 |
||||
|
PULL = 3 |
||||
|
|
||||
|
|
||||
|
def get_user(config, default): |
||||
|
if "credentials" in config: |
||||
|
if "username" in config["credentials"]: |
||||
|
data = config["credentials"]["username"] |
||||
|
if isinstance(data, str): |
||||
|
return data |
||||
|
else: |
||||
|
return grab_from(data) |
||||
|
return default |
||||
|
|
||||
|
|
||||
|
def get_pass(config, default): |
||||
|
if "credentials" in config: |
||||
|
if "password" in config["credentials"]: |
||||
|
data = config["credentials"]["password"] |
||||
|
if isinstance(data, str): |
||||
|
return data |
||||
|
else: |
||||
|
return grab_from(data) |
||||
|
return default |
||||
|
|
||||
|
|
||||
|
def get_provider(config, default, default_type): |
||||
|
if "image" in config: |
||||
|
build = False |
||||
|
pull = False |
||||
|
candidate_type = default_type |
||||
|
if "build" in config["image"]: |
||||
|
build = True |
||||
|
if default_type == ImageSource.BUILD: |
||||
|
candidate = default.copy(config["image"]["build"]) |
||||
|
else: |
||||
|
candidate = Builder(config["image"]["build"]) |
||||
|
candidate_type = ImageSource.BUILD |
||||
|
elif "pull" in config["image"]: |
||||
|
pull = True |
||||
|
if default_type == ImageSource.PULL: |
||||
|
candidate = default.copy(config["image"]["pull"]) |
||||
|
else: |
||||
|
candidate = Puller(config["image"]["pull"]) |
||||
|
candidate_type = ImageSource.PULL |
||||
|
|
||||
|
if build and pull: |
||||
|
raise ConfigException("[DockerRunner] Can't build and pull the same image!") |
||||
|
|
||||
|
return candidate, candidate_type |
||||
|
return default, default_type |
||||
|
|
||||
|
|
||||
|
class Tagger: |
||||
|
def __init__(self, config={}) -> None: |
||||
|
self.name = config.get("name", None) |
||||
|
self.username = get_user(config, None) |
||||
|
self.password = get_pass(config, None) |
||||
|
self.publish = config.get("publish", False) |
||||
|
|
||||
|
def copy(self, job_config): |
||||
|
t = Tagger() |
||||
|
t.name = job_config.get("name", self.name) |
||||
|
t.username = get_user(job_config, self.username) |
||||
|
t.password = get_pass(job_config, self.password) |
||||
|
t.publish = job_config.get("publish", self.publish) |
||||
|
return t |
||||
|
|
||||
|
def __str__(self) -> str: |
||||
|
data = { |
||||
|
"name": self.name, |
||||
|
"publish": self.publish, |
||||
|
"credentials": { |
||||
|
"username": self.username, |
||||
|
"password": self.password |
||||
|
} |
||||
|
} |
||||
|
return f"{data}" |
||||
|
|
||||
|
def handle(self, client, image): |
||||
|
if self.name is not None: |
||||
|
if self.name not in image.tags and f"{self.name}:latest" not in image.tags: |
||||
|
print(f"[DockerRunner] Tagging {image.tags[0]} as {self.name}") |
||||
|
image.tag(self.name) |
||||
|
if self.publish: |
||||
|
print(f"[DockerRunner] Pushing {self.name}") |
||||
|
client.push(self.name) |
||||
|
|
||||
|
|
||||
|
class Builder(): |
||||
|
def __init__(self, config) -> None: |
||||
|
self.dir = path.abspath(config.get("dir", getcwd())) |
||||
|
self.dockerfile = config.get("dockerfile", None) |
||||
|
self.name = config.get("name", None) |
||||
|
self.args = gen_dict(config.get("args", [])) |
||||
|
|
||||
|
def copy(self, job_config): |
||||
|
b = Builder({}) |
||||
|
b.dir = path.abspath(path.join(self.dir, job_config.get("dir", "."))) |
||||
|
b.dockerfile = job_config.get("dockerfile", self.dockerfile) |
||||
|
b.name = job_config.get("name", self.name) |
||||
|
b.args = self.args.copy().update(gen_dict(job_config.get("args", []))) |
||||
|
return b |
||||
|
|
||||
|
def __str__(self) -> str: |
||||
|
data = { |
||||
|
"type": "builder", |
||||
|
"dir": self.dir, |
||||
|
"dockerfile": self.dockerfile, |
||||
|
"name": self.name, |
||||
|
"args": self.args |
||||
|
} |
||||
|
return json.dumps(data) |
||||
|
|
||||
|
def prepare(self, client): |
||||
|
print(f"[DockerRunner] Building image {self.name}") |
||||
|
if self.dockerfile is None: |
||||
|
self.dockerfile = "Dockerfile" |
||||
|
try: |
||||
|
image, log = client.images.build(path=self.dir, |
||||
|
dockerfile=self.dockerfile, |
||||
|
tag=self.name, |
||||
|
buildargs=self.args, |
||||
|
labels={"builder": "alice-ci"}) |
||||
|
for i in log: |
||||
|
logging.debug(i) |
||||
|
return image |
||||
|
except docker.errors.BuildError as e: |
||||
|
raise RunnerError(f"[DockerRunner] Build failed: {e}") |
||||
|
except docker.errors.APIError as e: |
||||
|
raise RunnerError(f"[DockerRunner] Error: {e}") |
||||
|
|
||||
|
|
||||
|
class Puller(): |
||||
|
def __init__(self, config={}) -> None: |
||||
|
self.name = config.get("name", None) |
||||
|
self.username = get_user(config, None) |
||||
|
self.password = get_pass(config, None) |
||||
|
|
||||
|
def copy(self, job_config={}): |
||||
|
p = Puller() |
||||
|
p.name = job_config.get("name", self.name) |
||||
|
p.username = get_user(job_config, self.username) |
||||
|
p.password = get_pass(job_config, self.password) |
||||
|
|
||||
|
def __str__(self) -> str: |
||||
|
data = { |
||||
|
"name": self.name, |
||||
|
"credentials": { |
||||
|
"username": self.username, |
||||
|
"password": self.password |
||||
|
} |
||||
|
} |
||||
|
return f"{data}" |
||||
|
|
||||
|
def prepare(self, client): |
||||
|
print(f"[DockerRunner] Pulling image {self.name}") |
||||
|
return client.images.pull(self.name) |
||||
|
|
||||
|
|
||||
|
class DockerConfig: |
||||
|
def __init__(self, config={}) -> None: |
||||
|
self.username = get_user(config, None) |
||||
|
self.password = get_pass(config, None) |
||||
|
self.image_provider, self.provider_type = get_provider(config, None, ImageSource.NONE) |
||||
|
self.tagger = Tagger(config.get("tag", {})) |
||||
|
self.commands = config.get("commands", []) |
||||
|
self.env = config.get("env", {}) |
||||
|
|
||||
|
def copy(self, job_config={}): |
||||
|
d = DockerConfig() |
||||
|
d.username = get_user(job_config, self.username) |
||||
|
d.password = get_pass(job_config, self.password) |
||||
|
d.image_provider, d.provider_type = get_provider(job_config, self.image_provider, self.provider_type) |
||||
|
d.tagger = self.tagger.copy(job_config.get("tag", {})) |
||||
|
d.commands = self.commands.copy() + job_config.get("commands", []) |
||||
|
d.env = self.env.copy() |
||||
|
d.env.update(gen_dict(job_config.get("env", []))) |
||||
|
return d |
||||
|
|
||||
|
def __str__(self) -> str: |
||||
|
data = { |
||||
|
"credentials": { |
||||
|
"username": {self.username}, |
||||
|
"password": {self.password} |
||||
|
}, |
||||
|
"image": self.image_provider.__str__(), |
||||
|
"commands": self.commands, |
||||
|
"tag": self.tagger.__str__() |
||||
|
} |
||||
|
return f"{data}" |
||||
|
|
||||
|
|
||||
|
class DockerRunner(): |
||||
|
def __init__(self, config) -> None: |
||||
|
logging.info("[DockerRunner] Initializing") |
||||
|
self.config = DockerConfig(config) |
||||
|
self.client = docker.from_env() |
||||
|
|
||||
|
def run(self, job_spec): |
||||
|
job_config = self.config.copy(job_spec) |
||||
|
logging.debug(f"[DockerRunner] Job config: {job_config.__str__()}") |
||||
|
if job_config.image_provider is None: |
||||
|
raise RunnerError("[DockerRunner] No image provider configured!") |
||||
|
image = job_config.image_provider.prepare(self.client) |
||||
|
logging.info(f"[DockerRunner] Image: {image.tags} ({image.id})") |
||||
|
|
||||
|
if len(job_config.commands) > 0: |
||||
|
if "PATH" in job_config.env: |
||||
|
del job_config.env["PATH"] |
||||
|
container = self.client.containers.run(image=image.id, |
||||
|
entrypoint=["sleep", "infinity"], |
||||
|
detach=True, |
||||
|
auto_remove=True) |
||||
|
try: |
||||
|
for i in job_config.commands: |
||||
|
command = ["/bin/sh", "-c", i] |
||||
|
logging.debug(f"[DockerRunner] Command array: {command}") |
||||
|
code, output = container.exec_run(cmd=command, |
||||
|
environment=job_config.env) |
||||
|
for line in output.decode("UTF-8").splitlines(): |
||||
|
print(f"[{job_spec['name']}] {line}") |
||||
|
if code != 0: |
||||
|
raise NonZeroRetcode(f"Command {i} returned code {code}") |
||||
|
finally: |
||||
|
if container is not None: |
||||
|
container.stop() |
||||
|
|
||||
|
job_config.tagger.handle(self.client, image) |
||||
|
@ -0,0 +1,168 @@ |
|||||
|
import logging |
||||
|
import subprocess |
||||
|
import docker |
||||
|
from os.path import join, isdir |
||||
|
from os import getcwd, mkdir |
||||
|
import os |
||||
|
import requests |
||||
|
import platform |
||||
|
import time |
||||
|
|
||||
|
from ..exceptions import RunnerError |
||||
|
from ..config import ConfigHolder |
||||
|
|
||||
|
|
||||
|
pipconf = """[global] |
||||
|
index-url = URL |
||||
|
trusted-host = BASE |
||||
|
pypi.org |
||||
|
extra-index-url= http://pypi.org/simple""" |
||||
|
|
||||
|
|
||||
|
class RepoConfig: |
||||
|
def __init__(self, config={}) -> None: |
||||
|
self.port = config.get("port", 8888) |
||||
|
self.enabled = config.get("enabled", True) |
||||
|
self.container_name = config.get("container_name", "alice-pypiserver") |
||||
|
|
||||
|
def copy(self, job_config): |
||||
|
r = RepoConfig() |
||||
|
r.container_name = job_config.get("container_name", self.container_name) |
||||
|
r.enabled = job_config.get("enabled", self.enabled) |
||||
|
r.port = job_config.get("port", self.port) |
||||
|
return r |
||||
|
|
||||
|
|
||||
|
class PypiRepoRunner: |
||||
|
def __init__(self, config) -> None: |
||||
|
logging.info("[PyPiRepo] Initializing") |
||||
|
self.config = RepoConfig(config) |
||||
|
self.client = docker.from_env() |
||||
|
self.user = "alice" |
||||
|
self.passwd = "alice" |
||||
|
self.htpasswd = 'alice:{SHA}UisnajVr3zkBPfq+os1D4UHsyeg=' |
||||
|
|
||||
|
def get_image(self): |
||||
|
# TODO: remove when resolved: |
||||
|
# Official Docker image support for ARM? |
||||
|
# https://github.com/pypiserver/pypiserver/issues/364 |
||||
|
pypiserver = "https://github.com/pypiserver/pypiserver.git" |
||||
|
if platform.machine() == "aarch64": |
||||
|
tag = "alice.localhost/pypiserver:arm" |
||||
|
try: |
||||
|
self.client.images.get(tag) |
||||
|
return tag |
||||
|
except docker.errors.ImageNotFound: |
||||
|
print("[PyPiRepo] Building PyPiServer ARM image, this could take a while") |
||||
|
workdir = join(getcwd(), ".alice", "pypirepo", "source") |
||||
|
if not os.path.isdir(workdir): |
||||
|
os.mkdir(workdir) |
||||
|
git_command = ["git", "clone", pypiserver, "--branch=v1.3.2"] |
||||
|
output = [] |
||||
|
with subprocess.Popen(git_command, cwd=workdir, stdout=subprocess.PIPE) as p: |
||||
|
for line in p.stdout: |
||||
|
output.append(line.decode('utf8').strip()) |
||||
|
p.wait() |
||||
|
if p.returncode != 0: |
||||
|
print("\n".join(output)) |
||||
|
raise(RunnerError("[PyPiRepo] Could not fetch pypiserver source")) |
||||
|
source_path = os.path.join(workdir, "pypiserver") |
||||
|
self.client.images.build(path=source_path, tag=tag) |
||||
|
return tag |
||||
|
else: |
||||
|
return "pypiserver/pypiserver:latest" |
||||
|
|
||||
|
def run(self, job_spec): |
||||
|
job_config = self.config.copy(job_spec) |
||||
|
docker_host_ip = None |
||||
|
for network in self.client.networks.list(): |
||||
|
if network.name == "bridge": |
||||
|
try: |
||||
|
docker_host_ip = network.attrs["IPAM"]["Config"][0]["Gateway"] |
||||
|
except KeyError: |
||||
|
docker_host_ip = network.attrs["IPAM"]["Config"][0]["Subnet"].replace(".0/16", ".1") |
||||
|
if docker_host_ip is None: |
||||
|
raise RunnerError("Unable to determine Docker host IP") |
||||
|
|
||||
|
if job_config.enabled: |
||||
|
try: |
||||
|
c = self.client.containers.get(job_config.container_name) |
||||
|
print(f"[PyPiRepo] {job_config.container_name} already running") |
||||
|
except docker.errors.NotFound: |
||||
|
persistency_dir = join(getcwd(), ".alice", "pypirepo") |
||||
|
if not isdir(persistency_dir): |
||||
|
mkdir(persistency_dir) |
||||
|
|
||||
|
package_dir = join(persistency_dir, "packages") |
||||
|
if not isdir(package_dir): |
||||
|
mkdir(package_dir) |
||||
|
|
||||
|
htpasswd_file = join(persistency_dir, ".htpasswd") |
||||
|
with open(htpasswd_file, 'w') as f: |
||||
|
f.write(self.htpasswd) |
||||
|
|
||||
|
c = self.client.containers.run( |
||||
|
name=job_config.container_name, |
||||
|
image=self.get_image(), |
||||
|
detach=True, |
||||
|
labels={"app": "alice"}, |
||||
|
command=["--overwrite", "-P", ".htpasswd", "packages"], |
||||
|
ports={"8080/tcp": job_config.port}, |
||||
|
volumes={ |
||||
|
package_dir: { |
||||
|
"bind": "/data/packages", |
||||
|
"mode": "rw" |
||||
|
}, |
||||
|
htpasswd_file: { |
||||
|
"bind": "/data/.htpasswd", |
||||
|
"mode": "ro" |
||||
|
} |
||||
|
}, |
||||
|
restart_policy={ |
||||
|
"Name": "unless-stopped" |
||||
|
} |
||||
|
) |
||||
|
print(f"[PyPiRepo] Started {job_config.container_name}") |
||||
|
|
||||
|
c.reload() |
||||
|
logging.info(f"[PyPiRepo] {job_config.container_name} : {c.status}") |
||||
|
if c.status != "running": |
||||
|
raise RunnerError(f"[PyPiRepo] Repo container unstable: {c.status}") |
||||
|
|
||||
|
uri = f"http://localhost:{job_config.port}" |
||||
|
unreachable = True |
||||
|
attempts = 0 |
||||
|
while unreachable and attempts < 5: |
||||
|
attempts += 1 |
||||
|
try: |
||||
|
requests.get(uri) |
||||
|
unreachable = False |
||||
|
except Exception as e: |
||||
|
logging.info(f"[PyPiRepo] {attempts} - Repo at {uri} is unavailable: {e}") |
||||
|
time.sleep(2) |
||||
|
if unreachable: |
||||
|
raise RunnerError(f"[PyPiRepo] Repo unreachable") |
||||
|
|
||||
|
|
||||
|
cfgh = ConfigHolder.getInstance() |
||||
|
cfgh.soft_set("PYPI_USER", self.user) |
||||
|
cfgh.soft_set("PYPI_PASS", self.passwd) |
||||
|
cfgh.soft_set("PYPI_REPO", uri) |
||||
|
cfgh.soft_set("DOCKER_PYPI_USER", self.user) |
||||
|
cfgh.soft_set("DOCKER_PYPI_PASS", self.passwd) |
||||
|
cfgh.soft_set("DOCKER_PYPI_REPO", f"http://{docker_host_ip}:{job_config.port}") |
||||
|
cfgh.commit() |
||||
|
|
||||
|
venv = join(os.getcwd(), "venv") |
||||
|
if os.path.isdir(venv): |
||||
|
netloc = f"localhost:{job_config.port}" |
||||
|
url = f"http://{self.user}:{self.passwd}@{netloc}" |
||||
|
conf = pipconf.replace("URL", url).replace("BASE", netloc) |
||||
|
|
||||
|
if os.name == "nt": # Windows |
||||
|
filename = join(venv, "pip.ini") |
||||
|
else: # Linux & Mac |
||||
|
filename = join(venv, "pip.conf") |
||||
|
with open(filename, 'w') as f: |
||||
|
f.write(conf) |
||||
|
print(f"[PyPiRepo] pip conf written to {filename}") |
@ -0,0 +1,345 @@ |
|||||
|
from distutils.command.config import config |
||||
|
from distutils.log import debug |
||||
|
import json |
||||
|
import logging |
||||
|
from ntpath import join |
||||
|
import os |
||||
|
import re |
||||
|
import subprocess |
||||
|
import sys |
||||
|
from pkg_resources import parse_version |
||||
|
from requests import get |
||||
|
from requests.auth import HTTPBasicAuth |
||||
|
from os import environ, path |
||||
|
from html.parser import HTMLParser |
||||
|
from alice.runners.pyutils import PackageManager, glob, grab_from |
||||
|
from alice.exceptions import ConfigException, RunnerError |
||||
|
import hashlib |
||||
|
from pathlib import Path |
||||
|
|
||||
|
|
||||
|
def md5_update_from_file(filename, hash): |
||||
|
assert Path(filename).is_file() |
||||
|
with open(str(filename), "rb") as f: |
||||
|
for chunk in iter(lambda: f.read(4096), b""): |
||||
|
hash.update(chunk) |
||||
|
return hash |
||||
|
|
||||
|
|
||||
|
def md5_file(filename): |
||||
|
return md5_update_from_file(filename, hashlib.md5()).hexdigest() |
||||
|
|
||||
|
|
||||
|
def md5_update_from_dir(directory, hash, exclude_dirs, exclude_extensions, exclude_dirs_wildcard): |
||||
|
assert Path(directory).is_dir() |
||||
|
for _path in os.listdir(directory): |
||||
|
path = os.path.join(directory, _path) |
||||
|
if os.path.isfile(path) : |
||||
|
hash.update(_path.encode()) |
||||
|
logging.debug(f"[PyPiRunner][Hash] File: {path}") |
||||
|
hash = md5_update_from_file(path, hash) |
||||
|
elif os.path.isdir(path): |
||||
|
skip = False |
||||
|
for name in exclude_dirs: |
||||
|
if name in os.path.basename(_path): |
||||
|
skip = True |
||||
|
if not skip: |
||||
|
hash = md5_update_from_dir(path, hash, exclude_dirs, exclude_extensions, exclude_dirs_wildcard) |
||||
|
return hash |
||||
|
|
||||
|
|
||||
|
def md5_dir(directory, exclude_dirs=[], exclude_extensions=[], exclude_dirs_wildcard=[]): |
||||
|
return md5_update_from_dir(directory, hashlib.sha1(), exclude_dirs, exclude_extensions, exclude_dirs_wildcard).hexdigest() |
||||
|
|
||||
|
|
||||
|
def get_uri(config, default): |
||||
|
url = config.get("repo", {}).get("uri", default) |
||||
|
if url is not None: |
||||
|
if not isinstance(url, str): |
||||
|
url = grab_from(url) |
||||
|
if not re.match('(?:http|ftp|https)://', url): |
||||
|
url = f"https://{url}" |
||||
|
return url |
||||
|
|
||||
|
|
||||
|
def get_user(config, default): |
||||
|
if "repo" in config: |
||||
|
if "username" in config["repo"]: |
||||
|
data = config["repo"]["username"] |
||||
|
if isinstance(data, str): |
||||
|
return data |
||||
|
else: |
||||
|
return grab_from(data) |
||||
|
return default |
||||
|
|
||||
|
|
||||
|
def get_pass(config, default): |
||||
|
if "repo" in config: |
||||
|
if "password" in config["repo"]: |
||||
|
data = config["repo"]["password"] |
||||
|
if isinstance(data, str): |
||||
|
return data |
||||
|
else: |
||||
|
return grab_from(data) |
||||
|
return default |
||||
|
|
||||
|
|
||||
|
class SimpleRepoParser(HTMLParser): |
||||
|
def __init__(self): |
||||
|
HTMLParser.__init__(self) |
||||
|
self.packages = [] |
||||
|
|
||||
|
def handle_data(self, data): |
||||
|
re_groups = re.findall("(\d*\.\d*\.\d*)", data) |
||||
|
if len(re_groups) == 1: |
||||
|
file_version = re_groups[0] |
||||
|
if file_version not in self.packages: |
||||
|
self.packages.append(file_version) |
||||
|
|
||||
|
|
||||
|
# Parses and stores the config from yaml |
||||
|
class PypiConfig: |
||||
|
def __init__(self, config={}) -> None: |
||||
|
self.workdir = path.abspath(config.get("workdir", ".")) |
||||
|
self.repo_uri = get_uri(config, None) |
||||
|
self.repo_user = get_user(config, None) |
||||
|
self.repo_pass = get_pass(config, None) |
||||
|
self.packages = set(config.get("packages", [])) |
||||
|
self.upload = config.get("upload", False) |
||||
|
self.fail_if_exists = config.get("fail_if_exists", False) |
||||
|
|
||||
|
# returns a PyPiConfig with merged values |
||||
|
def copy(self, job_config={}): |
||||
|
p = PypiConfig() |
||||
|
p.workdir = path.abspath(path.join(self.workdir, job_config.get("workdir", "."))) |
||||
|
p.repo_uri = get_uri(job_config, self.repo_uri) |
||||
|
p.repo_user = get_user(job_config, self.repo_user) |
||||
|
p.repo_pass = get_pass(job_config, self.repo_pass) |
||||
|
job_pkg_set = set(job_config["packages"]) |
||||
|
job_pkg_set.update(self.packages) |
||||
|
p.packages = job_pkg_set |
||||
|
p.upload = job_config.get("upload", self.upload) |
||||
|
p.fail_if_exists = job_config.get("fail_if_exists", self.fail_if_exists) |
||||
|
return p |
||||
|
|
||||
|
|
||||
|
# TODO: Refactor to something sensible, more flexible |
||||
|
class PackageMeta: |
||||
|
def __init__(self): |
||||
|
self.conf_dir = path.join(os.getcwd(), ".alice", "pypirunner") |
||||
|
self.metafile = path.join(self.conf_dir, "packagemeta.json") |
||||
|
if not path.isdir(self.conf_dir): |
||||
|
os.mkdir(self.conf_dir) |
||||
|
if path.isfile(self.metafile): |
||||
|
with open(self.metafile) as f: |
||||
|
self.metadata = json.load(f) |
||||
|
else: |
||||
|
self.metadata = {} |
||||
|
self.__save() |
||||
|
|
||||
|
def __save(self): |
||||
|
with open(self.metafile, 'w') as f: |
||||
|
json.dump(self.metadata, f) |
||||
|
|
||||
|
def get(self, package, key): |
||||
|
return self.metadata.get(package, {}).get(key, "") |
||||
|
|
||||
|
def set(self, package, key, value): |
||||
|
if package not in self.metadata: |
||||
|
self.metadata[package] = {} |
||||
|
self.metadata[package][key] = value |
||||
|
self.__save() |
||||
|
|
||||
|
|
||||
|
# TODO: consider "--skip-existing" flag for twine |
||||
|
class PyPiRunner(): |
||||
|
def __init__(self, config) -> None: |
||||
|
logging.info("[PyPiRunner] Initializing") |
||||
|
self.workdir = config["workdir"] |
||||
|
self.config = PypiConfig(config) |
||||
|
self.metadata = PackageMeta() |
||||
|
|
||||
|
def __versions(self, config, pkg_name): |
||||
|
repo = config.repo_uri |
||||
|
if repo is None: |
||||
|
repo = "https://pypi.python.org/pypi" |
||||
|
|
||||
|
if config.repo_pass is not None and config.repo_user is not None: |
||||
|
logging.info(f"[PyPiRunner][Versions] Set auth headers from config") |
||||
|
logging.debug(f"[PyPiRunner][Versions] Auth: {config.repo_user}:{config.repo_pass}") |
||||
|
auth = HTTPBasicAuth(config.repo_user, config.repo_pass) |
||||
|
else: |
||||
|
logging.info(f"[PyPiRunner][Versions] No auth headers in config, skip") |
||||
|
logging.debug(f"[PyPiRunner][Versions] Auth: {config.repo_user}:{config.repo_pass}") |
||||
|
auth = None |
||||
|
|
||||
|
try: |
||||
|
if repo.endswith("pypi"): |
||||
|
url = f'{repo}/{pkg_name}/json' |
||||
|
logging.info(f"[PyPiRunner][Versions] Trying JSON API at {url}") |
||||
|
response = get(url, auth=auth) |
||||
|
if response.status_code == 200: |
||||
|
releases = json.loads(response.text)["releases"] |
||||
|
return sorted(releases, key=parse_version, reverse=True) |
||||
|
else: |
||||
|
logging.info(f"[PyPiRunner][Versions] JSON failed: [{response.status_code}]") |
||||
|
logging.debug(response.text) |
||||
|
repo = f"{repo}/simple" |
||||
|
url = f"{repo}/{pkg_name}" |
||||
|
logging.info(f"[PyPiRunner][Versions] Trying Simple API at {url}") |
||||
|
response = get(url, auth=auth) |
||||
|
if response.status_code == 200: |
||||
|
parser = SimpleRepoParser() |
||||
|
parser.feed(response.text) |
||||
|
return sorted(parser.packages, key=parse_version, reverse=True) |
||||
|
if response.status_code == 404: |
||||
|
return [] |
||||
|
else: |
||||
|
logging.info(f"[PyPiRunner][Versions] Simple failed: [{response.status_code}]") |
||||
|
logging.debug(response.text) |
||||
|
raise Exception("Failed to fetch available versions") |
||||
|
|
||||
|
except Exception as e: |
||||
|
raise RunnerError(f"{url}: {e}") |
||||
|
|
||||
|
def build(self, config, package): |
||||
|
print(f"[PyPiRunner] Building {package}") |
||||
|
pkg_path = path.join(config.workdir, package) |
||||
|
if not path.isdir(pkg_path): |
||||
|
raise ConfigException(f"Path does not exists: {pkg_path}") |
||||
|
PackageManager.getInstance().ensure("build") |
||||
|
command = [sys.executable, "-m", "build", package] |
||||
|
if logging.root.isEnabledFor(logging.DEBUG): |
||||
|
with subprocess.Popen(command, cwd=config.workdir) as p: |
||||
|
p.wait() |
||||
|
if p.returncode != 0: |
||||
|
raise RunnerError(f"[PyPiRunner] Failed to build {package}") |
||||
|
else: |
||||
|
with subprocess.Popen(command, cwd=config.workdir, stdout=subprocess.PIPE) as p: |
||||
|
p.wait() |
||||
|
if p.returncode != 0: |
||||
|
raise RunnerError(f"[PyPiRunner] Failed to build {package}") |
||||
|
print(f"[PyPiRunner] Package {package} built") |
||||
|
|
||||
|
def find_unuploaded(self, config, file_list, pkg_name): |
||||
|
versions = self.__versions(config, pkg_name) |
||||
|
unuploaded = [] |
||||
|
for file in file_list: |
||||
|
# flake8: noqa W605 |
||||
|
re_groups = re.findall("(\d*\.\d*\.\d*)", file) |
||||
|
if len(re_groups) < 1: |
||||
|
raise RunnerError(f"Unable to determine version of file {file}") |
||||
|
file_version = re_groups[0] |
||||
|
if file_version not in versions: |
||||
|
unuploaded.append(file) |
||||
|
else: |
||||
|
print(f"[PyPiRunner] File already uploaded: {os.path.basename(file)}") |
||||
|
print(f"[PyPiRunner] Packages to publish: {', '.join(unuploaded) if len(unuploaded) > 1 else 'None'}") |
||||
|
return unuploaded |
||||
|
|
||||
|
def upload_command(self, config, package, _command, to_upload): |
||||
|
unregistered = False |
||||
|
command = _command + to_upload |
||||
|
with subprocess.Popen(command, cwd=config.workdir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: |
||||
|
p.wait() |
||||
|
if p.returncode != 0: |
||||
|
for line in map(lambda x: x.decode('utf8').strip(), p.stderr): |
||||
|
if "405 Method Not Allowed" in line: |
||||
|
unregistered = True |
||||
|
if not unregistered: |
||||
|
print("STDOUT:") |
||||
|
sys.stdout.buffer.write(p.stdout.read()) |
||||
|
print("STDERR:") |
||||
|
sys.stdout.buffer.write(p.stderr.read()) |
||||
|
raise RunnerError(f"[PyPiRunner] Failed to upload {package} ({p.returncode})") |
||||
|
if unregistered: |
||||
|
print("[PyPiRunner] Registering package") |
||||
|
register_command = [sys.executable, "-m", "twine", "register", "--verbose", "--non-interactive"] |
||||
|
if config.repo_uri is not None: |
||||
|
register_command.append("--repository-url") |
||||
|
register_command.append(config.repo_uri) |
||||
|
if config.repo_user is not None and config.repo_pass is not None: |
||||
|
register_command.append("-u") |
||||
|
register_command.append(config.repo_user) |
||||
|
register_command.append("-p") |
||||
|
register_command.append(config.repo_pass) |
||||
|
register_command.append(to_upload[0]) |
||||
|
with subprocess.Popen(register_command, cwd=config.workdir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: |
||||
|
p.wait() |
||||
|
if p.returncode != 0: |
||||
|
print("STDOUT:") |
||||
|
sys.stdout.buffer.write(p.stdout.read()) |
||||
|
print("STDERR:") |
||||
|
sys.stdout.buffer.write(p.stderr.read()) |
||||
|
raise RunnerError(f"[PyPiRunner] Failed to register {package} ({p.returncode})") |
||||
|
self.upload_command(config, package, _command, to_upload) |
||||
|
|
||||
|
def upload(self, config, package, current_version): |
||||
|
print(f"[PyPiRunner] Uploading {package}") |
||||
|
PackageManager.getInstance().ensure("twine") |
||||
|
command = [sys.executable, "-m", "twine", "upload", "--verbose", "--non-interactive"] |
||||
|
if config.repo_uri is not None: |
||||
|
command.append("--repository-url") |
||||
|
command.append(config.repo_uri) |
||||
|
if config.repo_user is not None and config.repo_pass is not None: |
||||
|
command.append("-u") |
||||
|
command.append(config.repo_user) |
||||
|
command.append("-p") |
||||
|
command.append(config.repo_pass) |
||||
|
else: |
||||
|
raise RunnerError("[PyPiRunner] Can't upload without credentials!") |
||||
|
|
||||
|
dist_path = os.path.abspath(os.path.join(config.workdir, package, "dist")) |
||||
|
_files = glob(os.path.join(dist_path, "*"), config.workdir) |
||||
|
files = [] |
||||
|
for file in _files: |
||||
|
if current_version in os.path.basename(file): |
||||
|
files.append(file) |
||||
|
print(f"[PyPiRunner] Found: {file}") |
||||
|
else: |
||||
|
logging.info(f"[PyPiRunner] Dropped: {file} doesn't match current version: {current_version}") |
||||
|
|
||||
|
to_upload = self.find_unuploaded(config, files, package) |
||||
|
if len(to_upload) == 0: |
||||
|
return |
||||
|
#command += to_upload |
||||
|
self.upload_command(config, package, command, to_upload) |
||||
|
print(f"[PyPiRunner] Uploaded {package}") |
||||
|
|
||||
|
def package_version(self, config, package): |
||||
|
cfg_path = path.join(config.workdir, package, "setup.cfg") |
||||
|
with open(cfg_path) as f: |
||||
|
for line in f: |
||||
|
if line.startswith("version"): |
||||
|
re_groups = re.findall("(\d*\.\d*\.\d*)", line) |
||||
|
if len(re_groups) < 1: |
||||
|
raise RunnerError(f"Unable to determine version of package: |{line}|") |
||||
|
return re_groups[0] |
||||
|
|
||||
|
def run(self, job_spec): |
||||
|
job_config = self.config.copy(job_spec) |
||||
|
|
||||
|
for package in job_config.packages: |
||||
|
pkg_dir = path.join(job_config.workdir, package) |
||||
|
pkg_hash = md5_dir(pkg_dir, exclude_dirs=["pycache", "pytest_cache", "build", "dist", "egg-info"]) |
||||
|
logging.debug(f"[PyPiRunner] {package} hash: {pkg_hash}") |
||||
|
pkg_version = self.package_version(job_config, package) |
||||
|
logging.debug(f"[PyPiRunner] {package} local version: {pkg_version}") |
||||
|
repo_versions = self.__versions(job_config, package) |
||||
|
logging.debug(f"[PyPiRunner] {package} remote version: {repo_versions}") |
||||
|
|
||||
|
if pkg_version not in repo_versions: |
||||
|
print(f"[PyPiRunner] {package} not found in repo") |
||||
|
self.build(job_config, package) |
||||
|
self.metadata.set(package, pkg_version, pkg_hash) |
||||
|
else: |
||||
|
if pkg_hash != self.metadata.get(package, pkg_version): |
||||
|
self.build(job_config, package) |
||||
|
self.metadata.set(package, pkg_version, pkg_hash) |
||||
|
else: |
||||
|
print(f"[PyPiRunner] {package} Unchanged since last build") |
||||
|
|
||||
|
if job_config.upload: |
||||
|
self.upload(job_config, package, pkg_version) |
||||
|
else: |
||||
|
print(f"[PyPiRunner] Upload disabled, skipping") |
@ -1,96 +1,102 @@ |
|||||
import subprocess |
import logging |
||||
import os |
import subprocess |
||||
import sys |
import os |
||||
import shlex |
import sys |
||||
|
import shlex |
||||
from exceptions import NonZeroRetcode, RunnerError, ConfigException |
|
||||
|
from ..exceptions import NonZeroRetcode, RunnerError, ConfigException |
||||
|
from .pyutils import PackageManager, glob_command, grab_from |
||||
# same venv across all runs! |
|
||||
class PythonRunner(): |
|
||||
def __init__(self, workdir, defaults) -> None: |
# TODO: Handle config like PyPiConfig |
||||
self.workdir = workdir |
class PythonRunner: |
||||
self.virtual_dir = os.path.abspath(os.path.join(workdir, "venv")) |
def __init__(self, config) -> None: |
||||
self.config = defaults |
logging.info("[PythonRunner] Initializing") |
||||
self.env_vars = os.environ.copy() |
self.workdir = config["workdir"] |
||||
for env_var in defaults["env"]: |
self.virtual_dir = os.path.abspath(os.path.join(self.workdir, "venv")) |
||||
self.env_vars[env_var["name"]] = env_var["value"] |
self.config = config |
||||
|
PackageManager.getInstance().ensure("virtualenv") |
||||
self.__init_venv() |
self.__init_venv() |
||||
|
|
||||
def __init_venv(self): |
# TODO: Detect if the prev venv is the same OS type |
||||
if os.name == "nt": # Windows |
def __init_venv(self): |
||||
self.vpython = os.path.join(self.virtual_dir, "Scripts", "python.exe") |
if os.name == "nt": # Windows |
||||
else: # Linux & Mac |
self.vpython = os.path.join(self.virtual_dir, "Scripts", "python.exe") |
||||
self.vpython = os.path.join(self.virtual_dir, "bin", "python3") |
else: # Linux & Mac |
||||
|
self.vpython = os.path.join(self.virtual_dir, "bin", "python") |
||||
if not os.path.exists(self.vpython): |
|
||||
with subprocess.Popen([sys.executable, "-m", "virtualenv", self.virtual_dir], |
if not os.path.exists(self.vpython): |
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: |
logging.debug(f"[PythonRunner] Venv not found at {self.vpython}") |
||||
p.wait() |
logging.info("[PythonRunner] Initializing venv") |
||||
if p.returncode != 0: |
output = [] |
||||
sys.stdout.buffer.write(p.stderr.read()) |
with subprocess.Popen([sys.executable, "-m", "virtualenv", self.virtual_dir], |
||||
raise RunnerError("PythonRunner: Could not create virtualenv") |
stdout=subprocess.PIPE) as p: |
||||
else: |
p.wait() |
||||
print(f"PythonRunner: Virtualenv initialized at {self.virtual_dir}") |
for line in p.stdout: |
||||
else: |
output.append(line.decode('utf8').strip()) |
||||
print(f"PythonRunner: Found virtualenv at {self.virtual_dir}") |
if p.returncode != 0: |
||||
|
print("\n".join(output)) |
||||
# Stores common defaults for all jobs - all types! |
raise RunnerError("[PythonRunner] Could not create virtualenv") |
||||
# Also - dependency install by config is only allowed in this step |
else: |
||||
def update_config(self, config): |
logging.info(f"[PythonRunner] Virtualenv initialized at {self.virtual_dir}") |
||||
if "dependencies" in config: |
else: |
||||
for dependency in config["dependencies"]: |
logging.info(f"[PythonRunner] Found virtualenv at {self.virtual_dir}") |
||||
# TODO: Check what happens with fixed version |
dependencies = self.config.get("dependencies", []) |
||||
with subprocess.Popen([self.vpython, "-m", "pip", "install", dependency, "--upgrade"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: |
if len(dependencies) > 0: |
||||
p.wait() |
logging.info(f"[PythonRunner] Ensuring dependencies: {', '.join(dependencies)}") |
||||
if p.returncode != 0: |
command = [self.vpython, "-m", "pip", "install"] + dependencies |
||||
sys.stdout.buffer.write(p.stderr.read()) |
if logging.root.isEnabledFor(logging.DEBUG): |
||||
raise(RunnerError(f"PythonRunner: Could not install dependency: {dependency} ({p.returncode})")) |
with subprocess.Popen(command) as p: |
||||
for env_var in config["env"]: |
p.wait() |
||||
self.env_vars[env_var["name"]] = env_var["value"] |
if p.returncode != 0: |
||||
if "workdir" in config and config["workdir"] is not None: |
raise(RunnerError(f"[PythonRunner] Could not install dependencies: {dependencies} ({p.returncode})")) |
||||
self.workdir = os.path.join(self.workdir, config["workdir"]) |
else: |
||||
|
output = [] |
||||
def __ghetto_glob(self, command): |
with subprocess.Popen(command, stdout=subprocess.PIPE) as p: |
||||
new_command = [] |
for line in p.stdout: |
||||
for item in command: |
output.append(line.decode('utf8').strip()) |
||||
if "*" in item: |
p.wait() |
||||
dir = os.path.abspath(os.path.dirname(item)) |
if p.returncode != 0: |
||||
base_name = os.path.basename(item) |
print("\n".join(output)) |
||||
if os.path.isdir(dir): |
raise(RunnerError(f"[PythonRunner] Could not install dependencies: {dependencies} ({p.returncode})")) |
||||
item_parts = base_name.split("*") |
logging.info("[PythonRunner] Installation done") |
||||
print(item_parts) |
|
||||
for file in os.listdir(dir): |
# Executes the given job in the one and only venv |
||||
# TODO: Fix ordering! A*B = B*A = AB* |
# parameter is the raw jobscpec |
||||
if item_parts[0] in file and item_parts[1] in file: |
def run(self, job_spec): |
||||
new_command.append(os.path.join(dir, file)) |
if "workdir" in job_spec: |
||||
else: |
pwd = os.path.abspath(os.path.join(self.workdir, job_spec["workdir"])) |
||||
new_command.append(item) |
else: |
||||
return new_command |
pwd = self.workdir |
||||
|
run_env = {} |
||||
# Executes the given job in the one and only venv |
for k, v in self.config["env"].items(): |
||||
# parameter shall be the raw jobscpec |
if isinstance(v, str): |
||||
def run(self, job_spec): |
run_env[k] = v |
||||
if "workdir" in job_spec: |
else: |
||||
pwd = os.path.abspath(os.path.join(self.workdir, job_spec["workdir"])) |
run_env[k] = grab_from(v) |
||||
else: |
if "env" in job_spec: |
||||
pwd = self.workdir |
for env_var in job_spec["env"]: |
||||
run_env = self.env_vars.copy() |
if isinstance(env_var["value"], str): |
||||
if "env" in job_spec: |
run_env[env_var["name"]] = env_var["value"] |
||||
for env_var in job_spec["env"]: |
else: |
||||
run_env[env_var["name"]] = env_var["value"] |
run_env[env_var["name"]] = grab_from(env_var["value"]) |
||||
if "commands" in job_spec: |
if "commands" in job_spec: |
||||
commands = job_spec["commands"] |
commands = job_spec["commands"] |
||||
for command in commands: |
for command in commands: |
||||
# TODO: only split if command is not an array |
logging.debug(f"[PythonRunner] Raw command: {command}") |
||||
run_command = self.__ghetto_glob(shlex.split(command)) |
# TODO: only split if command is not an array |
||||
if os.path.isdir(pwd): |
if "*" in command: |
||||
with subprocess.Popen([self.vpython] + run_command, cwd=pwd, env=run_env) as p: |
run_command = glob_command(shlex.split(command), pwd) |
||||
p.wait() |
else: |
||||
if p.returncode != 0: |
run_command = shlex.split(command) |
||||
raise NonZeroRetcode(f"Command {command} returned code {p.returncode}") |
logging.info(f"[PythonRunner] Command to execute: {run_command}") |
||||
else: |
logging.debug(f"[PythonRunner] Workdir: {pwd}") |
||||
raise RunnerError(f"PythonRunner: Invalid path for shell command: {pwd}") |
if os.path.isdir(pwd): |
||||
else: |
with subprocess.Popen([self.vpython] + run_command, cwd=pwd, env=run_env) as p: |
||||
raise ConfigException(f"PythonRunner: No commands specified in step {job_spec['name']}") |
p.wait() |
||||
|
if p.returncode != 0: |
||||
|
raise NonZeroRetcode(f"Command {command} returned code {p.returncode}") |
||||
|
else: |
||||
|
raise RunnerError(f"[PythonRunner] Invalid path for shell command: {pwd}") |
||||
|
else: |
||||
|
raise ConfigException(f"[PythonRunner] No commands specified in step {job_spec['name']}") |
||||
|
@ -0,0 +1,154 @@ |
|||||
|
import logging |
||||
|
import os |
||||
|
import subprocess |
||||
|
import sys |
||||
|
from pkg_resources import parse_version |
||||
|
import re |
||||
|
|
||||
|
from ..exceptions import RunnerError, ConfigException |
||||
|
from ..config import ConfigHolder |
||||
|
|
||||
|
|
||||
|
class PackageManager: |
||||
|
__instance = None |
||||
|
|
||||
|
@staticmethod |
||||
|
def getInstance(): |
||||
|
""" Static access method. """ |
||||
|
if PackageManager.__instance is None: |
||||
|
PackageManager() |
||||
|
return PackageManager.__instance |
||||
|
|
||||
|
def __init__(self): |
||||
|
""" Virtually private constructor. """ |
||||
|
if PackageManager.__instance is not None: |
||||
|
raise Exception("This class is a singleton!") |
||||
|
else: |
||||
|
PackageManager.__instance = self |
||||
|
self.package_list = self.__get_packages() |
||||
|
|
||||
|
def __get_packages(self): |
||||
|
packages = {} |
||||
|
with subprocess.Popen([sys.executable, "-m", "pip", "freeze"], |
||||
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: |
||||
|
p.wait() |
||||
|
installed = list(map(lambda x: x.decode("UTF-8").split("=="), filter(lambda x: b'==' in x, p.stdout.read().splitlines()))) |
||||
|
for name, version in installed: |
||||
|
packages[name] = parse_version(version) |
||||
|
logging.debug(f"[PackageManager] Picked up packages: {packages}") |
||||
|
return packages |
||||
|
|
||||
|
def ensure_more(self, package_list, executable=sys.executable): |
||||
|
to_install = list(filter(lambda x: not self.__has_package(x), package_list)) |
||||
|
if len(to_install) > 0: |
||||
|
command = [executable, "-m", "pip", "install"] + to_install |
||||
|
with subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: |
||||
|
p.wait() |
||||
|
if p.returncode != 0: |
||||
|
sys.stdout.buffer.write(p.stderr.read()) |
||||
|
raise(RunnerError(f"[PackageManager] Could not install dependencies ({p.returncode})")) |
||||
|
self.package_list = self.__get_packages() |
||||
|
|
||||
|
# Assumption: there are more hits in the long run, than misses |
||||
|
def ensure(self, package_string, executable=sys.executable): |
||||
|
if not self.__has_package(package_string): |
||||
|
logging.info(f"[PackageManager] Installing {package_string}") |
||||
|
command = [executable, "-m", "pip", "install", package_string] |
||||
|
with subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: |
||||
|
p.wait() |
||||
|
if p.returncode != 0: |
||||
|
sys.stdout.buffer.write(p.stderr.read()) |
||||
|
raise(RunnerError(f"[PackageManager] Could not install dependencies ({p.returncode})")) |
||||
|
self.package_list = self.__get_packages() |
||||
|
else: |
||||
|
logging.info(f"[PackageManager] {package_string} already installed") |
||||
|
|
||||
|
def __has_package(self, package_string): |
||||
|
package_data = re.split("==|>|>=|<|<=", package_string) |
||||
|
# check in cache |
||||
|
if package_data[0] in self.package_list: |
||||
|
# check if version is needed |
||||
|
if len(package_data) == 2: |
||||
|
required_version = parse_version(package_data[1]) |
||||
|
installed_version = self.package_list[package_data[0]] |
||||
|
comparator = package_string.replace(package_data[0], "").replace(package_data[1], "") |
||||
|
if comparator == "==": |
||||
|
return required_version == installed_version |
||||
|
elif comparator == ">": |
||||
|
return installed_version > required_version |
||||
|
elif comparator == ">=": |
||||
|
return installed_version >= required_version |
||||
|
elif comparator == "<": |
||||
|
return installed_version < required_version |
||||
|
elif comparator == "<=": |
||||
|
return installed_version <= required_version |
||||
|
else: |
||||
|
raise ConfigException(f"Illegal comparator found: {comparator}") |
||||
|
else: |
||||
|
return True |
||||
|
return False |
||||
|
|
||||
|
|
||||
|
def glob(item, workdir): |
||||
|
new_command = [] |
||||
|
if "*" in item: |
||||
|
logging.debug(f"[Globbing] Found item: [{item}]") |
||||
|
dir = os.path.abspath(os.path.join(workdir, os.path.dirname(item))) |
||||
|
base_name = os.path.basename(item) |
||||
|
if os.path.isdir(dir): |
||||
|
item_parts = base_name.split("*") |
||||
|
for file in os.listdir(dir): |
||||
|
# TODO: Fix ordering! A*B = B*A = AB* |
||||
|
if item_parts[0] in file and item_parts[1] in file: |
||||
|
new_item = os.path.join(dir, file) |
||||
|
logging.debug(f"[Globbing] Substitute: {new_item}") |
||||
|
new_command.append(new_item) |
||||
|
else: |
||||
|
raise ConfigException(f"[Globbing] Dir not exists: {dir}") |
||||
|
return new_command |
||||
|
else: |
||||
|
return [item] |
||||
|
|
||||
|
|
||||
|
def glob_command(command, workdir): |
||||
|
logging.debug(f"[Globbing] Starting command: {' '.join(command)}") |
||||
|
new_command = [] |
||||
|
for item in command: |
||||
|
new_command += glob(item, workdir) |
||||
|
return new_command |
||||
|
|
||||
|
|
||||
|
def grab_from(target): |
||||
|
if "from_env" in target: |
||||
|
try: |
||||
|
return os.environ[target["from_env"]] |
||||
|
except KeyError: |
||||
|
raise ConfigException(f"Env var unset: {target['from_env']}") |
||||
|
elif "from_cfg" in target: |
||||
|
value = ConfigHolder.getInstance().get(target["from_cfg"]) |
||||
|
if len(value) == 0: |
||||
|
value = None |
||||
|
return value |
||||
|
else: |
||||
|
raise ConfigException(f"Unsupported grabber: {target}") |
||||
|
|
||||
|
|
||||
|
def gen_dict(list_of_dicts): |
||||
|
""" |
||||
|
Generates a dictionary from a list of dictionaries composed of |
||||
|
'name' and 'value' keys. |
||||
|
|
||||
|
[{'name': 'a', 'value': 'b'}] => {'a': 'b'} |
||||
|
""" |
||||
|
return_dict = {} |
||||
|
|
||||
|
for _dict in list_of_dicts: |
||||
|
try: |
||||
|
if isinstance(_dict["value"], str): |
||||
|
return_dict[_dict["name"]] = _dict["value"] |
||||
|
else: |
||||
|
return_dict[_dict["name"]] = grab_from(_dict["value"]) |
||||
|
except KeyError: |
||||
|
raise ConfigException(f"Invalid dict item: {_dict}") |
||||
|
|
||||
|
return return_dict |
@ -1,78 +0,0 @@ |
|||||
import yaml |
|
||||
from runners.pythonrunner import PythonRunner |
|
||||
from exceptions import NonZeroRetcode, ConfigException |
|
||||
|
|
||||
|
|
||||
class DummyRunner(): |
|
||||
def __init__(self, type) -> None: |
|
||||
self.type = type |
|
||||
|
|
||||
def run(self, command, workdir=None, env=None): |
|
||||
raise Exception(f"Invalid runner type in config: {self.type}") |
|
||||
|
|
||||
|
|
||||
class Job(): |
|
||||
def __init__(self, type, repoDir, vpython, workspace, env={}) -> None: |
|
||||
self.runner = self.__get_runner(type, repoDir, vpython) |
|
||||
self.commands = [] |
|
||||
self.workspace = workspace |
|
||||
self.env = env |
|
||||
|
|
||||
def __get_runner(self, type, repoDir, vpython): |
|
||||
if type == "python": |
|
||||
return PythonRunner(repoDir, vpython) |
|
||||
else: |
|
||||
return DummyRunner(type) |
|
||||
|
|
||||
def run_commands(self, _env={}): |
|
||||
try: |
|
||||
if self.env is None: |
|
||||
env = _env.copy() |
|
||||
else: |
|
||||
env = self.env.copy() |
|
||||
env.update(_env) |
|
||||
for command in self.commands: |
|
||||
self.runner.run(command, self.workspace, env) |
|
||||
except NonZeroRetcode as n: |
|
||||
print(n) |
|
||||
exit(1) |
|
||||
|
|
||||
|
|
||||
class ConfigParser: |
|
||||
def __init__(self, file_path, factory) -> None: |
|
||||
with open(file_path) as f: |
|
||||
self.config = yaml.safe_load(f) |
|
||||
self.factory = factory |
|
||||
if "runners" in self.config: |
|
||||
if "global" in self.config["runners"]: |
|
||||
self.factory.set_globals(self.__gen_globals()) |
|
||||
self.factory.update_runners(self.config["runners"]) |
|
||||
self.jobs = self.__get_jobs() |
|
||||
|
|
||||
# Initialize env, workdir if not present |
|
||||
def __gen_globals(self): |
|
||||
globals = self.config["runners"]["global"] |
|
||||
if "env" not in globals: |
|
||||
globals["env"] = [] |
|
||||
if "workdir" not in globals: |
|
||||
globals["workdir"] = None |
|
||||
return globals |
|
||||
|
|
||||
def __get_jobs(self): |
|
||||
if "jobs" in self.config: |
|
||||
jobs = {} |
|
||||
for job_spec in self.config["jobs"]: |
|
||||
name = job_spec["name"] |
|
||||
if name in jobs: |
|
||||
raise ConfigException(f"Job with name {name} already exists!") |
|
||||
|
|
||||
jobs[name] = job_spec |
|
||||
return jobs |
|
||||
else: |
|
||||
raise ConfigException("No jobs defined in config") |
|
||||
|
|
||||
def execute_job(self, job_name): |
|
||||
if job_name in self.jobs: |
|
||||
# Pass the job_spec to a runner |
|
||||
runner = self.factory.get_runner(self.jobs[job_name]["type"]) |
|
||||
runner.run(self.jobs[job_name]) |
|
@ -1,29 +1,98 @@ |
|||||
runners: |
runners: |
||||
global: |
global: |
||||
env: |
env: |
||||
- name: A |
- name: A |
||||
value: A |
value: A |
||||
- name: B |
- name: B |
||||
value: B |
value: B |
||||
- name: C |
- name: C |
||||
value: C |
value: C |
||||
workdir: packages |
workdir: . |
||||
python: |
python: |
||||
env: |
env: |
||||
- name: A |
- name: A |
||||
value: D |
value: D |
||||
dependencies: |
dependencies: |
||||
- flake8 |
- flake8 |
||||
- build |
- build |
||||
jobs: |
docker: |
||||
- name: env |
credentials: |
||||
type: python |
username: D |
||||
env: |
password: D |
||||
- name: B |
|
||||
value: E |
jobs: |
||||
commands: |
- name: env |
||||
- "-c \"import os; print(os.environ)\"" |
type: python |
||||
- name: lint |
changes: |
||||
workdir: alice-ci |
branch: origin/master |
||||
commands: |
paths: |
||||
- "-m flake8 --ignore E501" |
- "docs" |
||||
|
- "alice-ci" |
||||
|
env: |
||||
|
- name: B |
||||
|
value: E |
||||
|
commands: |
||||
|
- "-c \"from os import environ; assert environ['A'] == 'D'; assert environ['B'] == 'E'; assert environ['C'] == 'C'; print('Assertions passed')\"" |
||||
|
- name: lint |
||||
|
type: python |
||||
|
workdir: alice-ci/src |
||||
|
commands: |
||||
|
- "-m flake8 --ignore E501" |
||||
|
- name: pkg |
||||
|
type: pypi |
||||
|
workdir: . |
||||
|
upload: false |
||||
|
fail_if_exists: false # TODO: currently unused |
||||
|
repo: |
||||
|
uri: example.com |
||||
|
username: |
||||
|
from_env: PYPIUSER |
||||
|
password: |
||||
|
from_env: PYPIPASS |
||||
|
packages: |
||||
|
- alice-ci |
||||
|
- name: "image" |
||||
|
type: docker |
||||
|
credentials: |
||||
|
username: A |
||||
|
#password: B |
||||
|
image: |
||||
|
build: |
||||
|
dir: ci-examples/images/hello |
||||
|
#dockerfile: ci-examples/images/hello/Dockerfile |
||||
|
dockerfile: Dockerfile |
||||
|
name: "sssss" |
||||
|
args: |
||||
|
- name: CIPASS |
||||
|
value: NONE |
||||
|
#pull: |
||||
|
#name: python:latest |
||||
|
#credentials: |
||||
|
#username: PASS |
||||
|
#password: WORD |
||||
|
env: |
||||
|
- name: VAR |
||||
|
value: CHAR |
||||
|
commands: |
||||
|
- which python3 |
||||
|
- /usr/bin/python3 --version |
||||
|
- date |
||||
|
- env |
||||
|
tag: |
||||
|
publish: false |
||||
|
name: repo.example.com/test/na |
||||
|
credentials: |
||||
|
username: B |
||||
|
password: B |
||||
|
- name: pypi_init |
||||
|
type: pypirepo |
||||
|
enabled: true |
||||
|
port: 8888 |
||||
|
container_name: pypiserver |
||||
|
|
||||
|
pipelines: |
||||
|
default: |
||||
|
- lint |
||||
|
- env |
||||
|
- pkg |
||||
|
|
@ -0,0 +1,9 @@ |
|||||
|
FROM ubuntu:latest |
||||
|
|
||||
|
RUN apt update && apt install -y python3 |
||||
|
|
||||
|
ADD hello.py /opt/hello.py |
||||
|
|
||||
|
#ENTRYPOINT [ "/bin/sh", "-c" ] |
||||
|
|
||||
|
#CMD ["/usr/local/python/bin/python3", "/opt/hello.py"] |
@ -0,0 +1,2 @@ |
|||||
|
if __name__ == "__main__": |
||||
|
print("Hi Mom!") |
@ -0,0 +1,18 @@ |
|||||
|
runners: |
||||
|
python: |
||||
|
dependencies: |
||||
|
- flake8 |
||||
|
- build |
||||
|
- twine |
||||
|
jobs: |
||||
|
- name: selfcheck |
||||
|
type: python |
||||
|
workdir: ci |
||||
|
commands: |
||||
|
- "-m flake8 --ignore E501 --exclude venv" |
||||
|
|
||||
|
- name: lint |
||||
|
type: python |
||||
|
workdir: alice-ci/src |
||||
|
commands: |
||||
|
- "-m flake8 --ignore E501" |
@ -1,24 +1,24 @@ |
|||||
# alice-ci.yaml examples |
# alice-ci.yaml examples |
||||
|
|
||||
## Python lint |
## Python lint |
||||
|
|
||||
Installes flake8 package in a virtual elvironment, then lints the contents of the packages directory in the current working dir. |
Installes flake8 package in a virtual elvironment, then lints the contents of the packages directory in the current working dir. |
||||
|
|
||||
``` |
``` |
||||
runners: |
runners: |
||||
python: |
python: |
||||
dependencies: |
dependencies: |
||||
- name: flake8 |
- name: flake8 |
||||
jobs: |
jobs: |
||||
- name: lint |
- name: lint |
||||
type: python |
type: python |
||||
workdir: packages |
workdir: packages |
||||
commands: |
commands: |
||||
- "-m flake8" |
- "-m flake8" |
||||
``` |
``` |
||||
|
|
||||
To run this job: |
To run this job: |
||||
|
|
||||
``` |
``` |
||||
pythom3 -m alice lint |
pythom3 -m alice lint |
||||
``` |
``` |
@ -1,8 +1,50 @@ |
|||||
# Runners |
# Runners |
||||
|
|
||||
Runners are responsible to execute a list of commands in a set environment defined in the CI yaml file. |
Runners are responsible to execute a list of commands in a set environment defined in the CI yaml file. |
||||
|
|
||||
## List of runners |
## List of runners |
||||
|
|
||||
* Python - executes python commands in a virtual environment |
* Python - executes python commands in a virtual environment |
||||
* Docker - executes each job in a separate Docker container - unimplemented |
* Docker - executes each job in a separate Docker container - unimplemented |
||||
|
|
||||
|
## Import schema |
||||
|
|
||||
|
What you need to do to make Alice recognise and import your custom Runners |
||||
|
TODO |
||||
|
|
||||
|
## Runner API |
||||
|
|
||||
|
Each runner has to support the following functions: |
||||
|
|
||||
|
### __init__(params, config) |
||||
|
|
||||
|
* params: dict of runtime variables for the program itself. |
||||
|
* config: Runner config data, aplies to all jobs |
||||
|
|
||||
|
#### Params |
||||
|
|
||||
|
Currently the only param used is the dict is "verbose", whichis a boolean. The intended purpose is to show debug output if set to True. |
||||
|
|
||||
|
#### config |
||||
|
|
||||
|
Dict. Has two fix keys, `env` and `workdir`. Env is the environment variables of the host, expanded by CLI parameters, expanded by global config values from yaml. A key defined in the yaml overwrites the value copied from the host. Workdir is similar, can be assigned at CI yaml level as global, but runners may overwrite it. |
||||
|
Order: |
||||
|
By default: os.cwd() |
||||
|
if overwritten in global |
||||
|
------------------------------- Below this level is the runner's responsibility |
||||
|
if owerwritten in runner config |
||||
|
if overwritten in job |
||||
|
|
||||
|
Runner shall receive the current working directory, unless stated otherwise in global config. |
||||
|
|
||||
|
The expected behaviour of "overwriting" the workdir is adding the new directory name to the existing path. For example: |
||||
|
* cwd = /root |
||||
|
* global defines workdir as "projects" |
||||
|
* runner config defines workdir as "python" |
||||
|
* job defines workdir as "alice" |
||||
|
|
||||
|
In the case above, the actual working directory of the running job shall be `/root/projects/python/alice`. |
||||
|
|
||||
|
### run(job_spec) |
||||
|
|
||||
|
This function executes one job attributed ith the type of the runner called. As the only hard requirement for Alice is the "type" field in a job (or the optional "changes"), everything else is handled by the runner. |
@ -0,0 +1,26 @@ |
|||||
|
# Schema |
||||
|
|
||||
|
``` |
||||
|
name: "" |
||||
|
type: docker |
||||
|
credentials: - global ...ish |
||||
|
username |
||||
|
password |
||||
|
image: - to use, pull, run |
||||
|
build: |
||||
|
dir: |
||||
|
dockerfile: |
||||
|
name: - defaults to step name |
||||
|
args: |
||||
|
- name: |
||||
|
- value: |
||||
|
pull: - pulls, current working image - mutually exclusive with build |
||||
|
name: |
||||
|
credentials: - optional |
||||
|
command: - overwrite, not append |
||||
|
- ... |
||||
|
tag: |
||||
|
publish: true |
||||
|
name: - published name with repo and everything |
||||
|
credentials: |
||||
|
``` |
@ -1,39 +1,39 @@ |
|||||
# alice-ci.yaml |
# alice-ci.yaml |
||||
|
|
||||
This yaml file defines the job steps executed by Alice. The jobs are called by names for each passed parameter on CLI. For example the following command searches for a job called lint defined in the `alice-ci.yaml` file in the current working directory, then runs it. |
This yaml file defines the job steps executed by Alice. The jobs are called by names for each passed parameter on CLI. For example the following command searches for a job called lint defined in the `alice-ci.yaml` file in the current working directory, then runs it. |
||||
|
|
||||
``` |
``` |
||||
pythom3 -m alice lint |
pythom3 -m alice lint |
||||
``` |
``` |
||||
|
|
||||
[Example configs](examples.md) |
[Example configs](examples.md) |
||||
|
|
||||
## runners |
## runners |
||||
|
|
||||
Contains global configuration for various runners. Currently the only supported runner is `python`. |
Contains global configuration for various runners. Currently the only supported runner is `python`. |
||||
|
|
||||
### Python |
### Python |
||||
|
|
||||
#### Dependencies |
#### Dependencies |
||||
|
|
||||
List of dependencies installed in the virtual environment. Each dependency has a `name` and an `import_name`, as Alice checks the availability of each package by trying to import `import_name`, and if it fails, calls pip to install `name`. |
List of dependencies installed in the virtual environment. Each dependency has a `name` and an `import_name`, as Alice checks the availability of each package by trying to import `import_name`, and if it fails, calls pip to install `name`. |
||||
|
|
||||
## jobs |
## jobs |
||||
|
|
||||
List of jobs. Each job has a mandatory name, type and a list of commands, optional parameter is workdir. |
List of jobs. Each job has a mandatory name, type and a list of commands, optional parameter is workdir. |
||||
|
|
||||
### name |
### name |
||||
|
|
||||
Mandatory value, string. Has to be unique in the current file. |
Mandatory value, string. Has to be unique in the current file. |
||||
|
|
||||
### type |
### type |
||||
|
|
||||
Job type, selects the runner executing the commands. Currently the only supported type is `python`. |
Job type, selects the runner executing the commands. Currently the only supported type is `python`. |
||||
|
|
||||
### comands |
### comands |
||||
|
|
||||
List of strings, each executed one by one from top to bottom in the current context. |
List of strings, each executed one by one from top to bottom in the current context. |
||||
|
|
||||
### workdir |
### workdir |
||||
|
|
||||
Optional, defines Working directory relative to PWD. The default working directory is the current directory. |
Optional, defines Working directory relative to PWD. The default working directory is the current directory. |
||||
|
Loading…
Reference in new issue