Browse Source
Fix #13 Co-authored-by: gyulaid <gyulaid@gyulai.cloud> Reviewed-on: #18 Co-authored-by: Daniel Gyulai <gyulaid@gyulai.cloud> Co-committed-by: Daniel Gyulai <gyulaid@gyulai.cloud>pull/19/head
10 changed files with 328 additions and 43 deletions
@ -1,2 +1,3 @@ |
|||
# flake8: noqa F401 |
|||
from alice.runners.pythonrunner import PythonRunner |
|||
from alice.runners.pypirunner import PyPiRunner |
|||
|
@ -0,0 +1,177 @@ |
|||
import json |
|||
import os |
|||
import re |
|||
import subprocess |
|||
import sys |
|||
from urllib import request, error |
|||
from pkg_resources import parse_version |
|||
from os import environ, path |
|||
from alice.runners.pyutils import PackageManager, glob |
|||
from alice.exceptions import ConfigException, RunnerError |
|||
|
|||
|
|||
def grab_from(target): |
|||
if "from_env" in target: |
|||
return environ[target["from_env"]] |
|||
else: |
|||
raise ConfigException(f"Unsupported grabber: {target.keys()}") |
|||
|
|||
|
|||
def get_uri(config, default): |
|||
url = config.get("repo", {}).get("uri", default) |
|||
if url is not None: |
|||
if not re.match('(?:http|ftp|https)://', url): |
|||
url = f"https://{url}" |
|||
return url |
|||
|
|||
|
|||
def get_user(config, default): |
|||
if "repo" in config: |
|||
if "username" in config["repo"]: |
|||
data = config["repo"]["username"] |
|||
if isinstance(data, str): |
|||
return data |
|||
else: |
|||
return grab_from(data) |
|||
return default |
|||
|
|||
|
|||
def get_pass(config, default): |
|||
if "repo" in config: |
|||
if "password" in config["repo"]: |
|||
data = config["repo"]["password"] |
|||
if isinstance(data, str): |
|||
return data |
|||
else: |
|||
return grab_from(data) |
|||
return default |
|||
|
|||
|
|||
# Parses and stores the config from yaml |
|||
class PypiConfig: |
|||
def __init__(self, config={}) -> None: |
|||
self.workdir = path.abspath(config.get("workdir", ".")) |
|||
self.repo_uri = get_uri(config, None) |
|||
self.repo_user = get_user(config, None) |
|||
self.repo_pass = get_pass(config, None) |
|||
self.packages = set(config.get("packages", [])) |
|||
self.upload = config.get("upload", False) |
|||
self.fail_if_exists = config.get("fail_if_exists", False) |
|||
|
|||
# returns a PyPiConfig with merged values |
|||
def copy(self, job_config={}): |
|||
p = PypiConfig() |
|||
p.workdir = path.abspath(path.join(self.workdir, job_config.get("workdir", "."))) |
|||
p.repo_uri = get_uri(job_config, self.repo_uri) |
|||
p.repo_user = get_user(job_config, self.repo_user) |
|||
p.repo_pass = get_pass(job_config, self.repo_pass) |
|||
job_pkg_set = set(job_config["packages"]) |
|||
job_pkg_set.update(self.packages) |
|||
p.packages = job_pkg_set |
|||
p.upload = job_config.get("upload", self.upload) |
|||
p.fail_if_exists = job_config.get("fail_if_exists", self.fail_if_exists) |
|||
return p |
|||
|
|||
|
|||
# TODO: consider "--skip-existing" flag for twine |
|||
class PyPiRunner(): |
|||
def __init__(self, params, config) -> None: |
|||
self.verbose = params["verbose"] |
|||
if self.verbose: |
|||
print("[PyPiRunner] Initializing") |
|||
self.workdir = config["workdir"] |
|||
self.config = PypiConfig(config) |
|||
|
|||
def __versions(self, repo, pkg_name): |
|||
if repo is not None: |
|||
url = f'{repo}/{pkg_name}/json' |
|||
else: |
|||
url = f"https://pypi.python.org/pypi/{pkg_name}/json" |
|||
try: |
|||
releases = json.loads(request.urlopen(url).read())['releases'] |
|||
except error.URLError as e: |
|||
raise RunnerError(f"{url}: {e}") |
|||
|
|||
return sorted(releases, key=parse_version, reverse=True) |
|||
|
|||
def build(self, config, package): |
|||
# TODO: Actual build - silent, unless failure! |
|||
pkg_path = path.join(config.workdir, package) |
|||
if not path.isdir(pkg_path): |
|||
raise ConfigException(f"Path does not exists: {pkg_path}") |
|||
command = [sys.executable, "-m", "build", package] |
|||
with subprocess.Popen(command, cwd=config.workdir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: |
|||
p.wait() |
|||
if p.returncode != 0: |
|||
print("STDOUT:") |
|||
sys.stdout.buffer.write(p.stdout.read()) |
|||
print("STDERR:") |
|||
sys.stdout.buffer.write(p.stderr.read()) |
|||
raise RunnerError(f"[PyPiRunner] Failed to build {package}") |
|||
|
|||
def find_unuploaded(self, repo, file_list, pkg_name): |
|||
versions = self.__versions(repo, pkg_name) |
|||
unuploaded = [] |
|||
for file in file_list: |
|||
# flake8: noqa W605 |
|||
re_groups = re.findall("(\d*\.\d*\.\d*)", file) |
|||
if len(re_groups) < 1: |
|||
raise RunnerError(f"Unable to determine version of file {file}") |
|||
file_version = re_groups[0] |
|||
if file_version not in versions: |
|||
unuploaded.append(file) |
|||
else: |
|||
print(f"[PyPiRunner] File already uploaded: {os.path.basename(file)}") |
|||
return unuploaded |
|||
|
|||
|
|||
def upload(self, config, package): |
|||
command = [sys.executable, "-m", "twine", "upload"] |
|||
if self.verbose: |
|||
command.append("--verbose") |
|||
if config.repo_uri is not None: |
|||
command.append("--repository-url") |
|||
command.append(config.repo_uri) |
|||
if config.repo_user is not None: |
|||
command.append("-u") |
|||
command.append(config.repo_user) |
|||
if config.repo_pass is not None: |
|||
command.append("-p") |
|||
command.append(config.repo_pass) |
|||
|
|||
dist_path = os.path.abspath(os.path.join(config.workdir, package, "dist")) |
|||
files = glob(os.path.join(dist_path, "*"), config.workdir) |
|||
for file in files: |
|||
print(f"[PyPiRunner] Found: {file}") |
|||
|
|||
to_upload = self.find_unuploaded(config.repo_uri, files, package) |
|||
if len(to_upload) == 0: |
|||
return |
|||
command += to_upload |
|||
print(command) |
|||
print(" ".join(command)) |
|||
with subprocess.Popen(command, cwd=config.workdir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: |
|||
p.wait() |
|||
if p.returncode != 0: |
|||
print("STDOUT:") |
|||
sys.stdout.buffer.write(p.stdout.read()) |
|||
print("STDERR:") |
|||
sys.stdout.buffer.write(p.stderr.read()) |
|||
raise RunnerError(f"[PyPiRunner] Failed to upload {package} ({p.returncode})") |
|||
|
|||
def run(self, job_spec): |
|||
job_config = self.config.copy(job_spec) |
|||
|
|||
PackageManager.getInstance().ensure("build") |
|||
for package in job_config.packages: |
|||
print(f"[PyPiRunner] Building {package}") |
|||
#self.build(job_config, package) |
|||
print(f"[PyPiRunner] Package {package} built") |
|||
|
|||
if job_config.upload: |
|||
PackageManager.getInstance().ensure("twine") |
|||
for package in job_config.packages: |
|||
print(f"[PyPiRunner] Uploading {package}") |
|||
self.upload(job_config, package) |
|||
else: |
|||
print(f"[PyPiRunner] Upload disabled, skiping") |
@ -0,0 +1,115 @@ |
|||
import os |
|||
import subprocess |
|||
import sys |
|||
from pkg_resources import parse_version |
|||
import re |
|||
|
|||
from alice.exceptions import RunnerError, ConfigException |
|||
|
|||
|
|||
class PackageManager: |
|||
__instance = None |
|||
|
|||
@staticmethod |
|||
def getInstance(): |
|||
""" Static access method. """ |
|||
if PackageManager.__instance is None: |
|||
PackageManager() |
|||
return PackageManager.__instance |
|||
|
|||
def __init__(self): |
|||
""" Virtually private constructor. """ |
|||
if PackageManager.__instance is not None: |
|||
raise Exception("This class is a singleton!") |
|||
else: |
|||
PackageManager.__instance = self |
|||
self.package_list = self.__get_packages() |
|||
|
|||
def __get_packages(self): |
|||
packages = {} |
|||
with subprocess.Popen([sys.executable, "-m", "pip", "freeze"], |
|||
stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: |
|||
p.wait() |
|||
installed = list(map(lambda x: x.decode("UTF-8").split("=="), filter(lambda x: b'==' in x, p.stdout.read().splitlines()))) |
|||
for name, version in installed: |
|||
packages[name] = parse_version(version) |
|||
return packages |
|||
|
|||
def ensure_more(self, package_list, executable=sys.executable): |
|||
to_install = list(filter(lambda x: not self.__has_package(x), package_list)) |
|||
if len(to_install) > 0: |
|||
command = [executable, "-m", "pip", "install"] + to_install |
|||
with subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: |
|||
p.wait() |
|||
if p.returncode != 0: |
|||
sys.stdout.buffer.write(p.stderr.read()) |
|||
raise(RunnerError(f"[PackageManager] Could not install dependencies ({p.returncode})")) |
|||
self.package_list = self.__get_packages() |
|||
|
|||
# Assumption: there are more hits in the long run, than misses |
|||
def ensure(self, package_string, executable=sys.executable): |
|||
if not self.__has_package(package_string): |
|||
command = [executable, "-m", "pip", "install", package_string] |
|||
with subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: |
|||
p.wait() |
|||
if p.returncode != 0: |
|||
sys.stdout.buffer.write(p.stderr.read()) |
|||
raise(RunnerError(f"[PackageManager] Could not install dependencies ({p.returncode})")) |
|||
self.package_list = self.__get_packages() |
|||
|
|||
def __has_package(self, package_string): |
|||
package_data = re.split("==|>|>=|<|<=", package_string) |
|||
# check in cache |
|||
if package_data[0] in self.package_list: |
|||
# check if version is needed |
|||
if len(package_data) == 2: |
|||
required_version = parse_version(package_data[1]) |
|||
installed_version = self.package_list[package_data[0]] |
|||
comparator = package_string.replace(package_data[0], "").replace(package_data[1], "") |
|||
if comparator == "==": |
|||
return required_version == installed_version |
|||
elif comparator == ">": |
|||
return installed_version > required_version |
|||
elif comparator == ">=": |
|||
return installed_version >= required_version |
|||
elif comparator == "<": |
|||
return installed_version < required_version |
|||
elif comparator == "<=": |
|||
return installed_version <= required_version |
|||
else: |
|||
raise ConfigException(f"Illegal comparator found: {comparator}") |
|||
else: |
|||
return True |
|||
return False |
|||
|
|||
|
|||
def glob(item, workdir, verbose=False): |
|||
new_command = [] |
|||
if "*" in item: |
|||
if verbose: |
|||
print(f"[Globbing] Found item: [{item}]") |
|||
dir = os.path.abspath(os.path.join(workdir, os.path.dirname(item))) |
|||
base_name = os.path.basename(item) |
|||
if os.path.isdir(dir): |
|||
item_parts = base_name.split("*") |
|||
for file in os.listdir(dir): |
|||
# TODO: Fix ordering! A*B = B*A = AB* |
|||
if item_parts[0] in file and item_parts[1] in file: |
|||
new_item = os.path.join(dir, file) |
|||
if verbose: |
|||
print(f"[Globbing] Substitute: {new_item}") |
|||
new_command.append(new_item) |
|||
else: |
|||
raise ConfigException(f"[Globbing] Dir not exists: {dir}") |
|||
return new_command |
|||
else: |
|||
return [item] |
|||
|
|||
|
|||
def glob_command(command, workdir, verbose=False): |
|||
if verbose: |
|||
print(f"[Globbing] Starting command: {' '.join(command)}") |
|||
new_command = [] |
|||
for item in command: |
|||
new_command += glob(item, workdir, verbose) |
|||
return new_command |
Loading…
Reference in new issue