From 0bd47f5ae64faf2513887f508bd3180357b9bf41 Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Sat, 3 Jul 2021 10:07:18 +0200 Subject: [PATCH 1/7] add dot generation --- setup.cfg | 1 + vinca/generate_dot.py | 350 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 351 insertions(+) create mode 100644 vinca/generate_dot.py diff --git a/setup.cfg b/setup.cfg index 7a63591..328aad2 100644 --- a/setup.cfg +++ b/setup.cfg @@ -39,6 +39,7 @@ console_scripts = vinca = vinca.main:main vinca-glab = vinca.generate_gitlab:main vinca-azure = vinca.generate_azure:main + vinca-dot = vinca.generate_dot:main [flake8] import-order-style = google diff --git a/vinca/generate_dot.py b/vinca/generate_dot.py new file mode 100644 index 0000000..c6263a0 --- /dev/null +++ b/vinca/generate_dot.py @@ -0,0 +1,350 @@ +import networkx as nx +import yaml +import re +import glob +import sys, os +import textwrap +import argparse +from distutils.dir_util import copy_tree +import yaml + + +class folded_unicode(str): + pass + + +class literal_unicode(str): + pass + + +def folded_unicode_representer(dumper, data): + return dumper.represent_scalar("tag:yaml.org,2002:str", data, style=">") + + +def literal_unicode_representer(dumper, data): + return dumper.represent_scalar("tag:yaml.org,2002:str", data, style="|") + + +yaml.add_representer(folded_unicode, folded_unicode_representer) +yaml.add_representer(literal_unicode, literal_unicode_representer) + + +azure_linux_script = literal_unicode("""\ +export CI=azure +export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME +export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) +.scripts/run_docker_build.sh""") + +azure_osx_script = literal_unicode(r"""\ +export CI=azure +export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME +export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) +.scripts/build_osx.sh""") + +azure_osx_arm64_script = literal_unicode(r"""\ +export CI=azure +export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME +export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) +.scripts/build_osx_arm64.sh""") + +azure_win_preconfig_script = literal_unicode("""\ +set "CI=azure" +call %CONDA%\\condabin\\conda_hook.bat +call %CONDA%\\condabin\\conda.bat activate base + +:: 2 cores available on Appveyor workers: https://www.appveyor.com/docs/build-environment/#build-vm-configurations +:: CPU_COUNT is passed through conda build: https://github.com/conda/conda-build/pull/1149 +set CPU_COUNT=2 + +set PYTHONUNBUFFERED=1 + +conda config --set show_channel_urls true +conda config --set auto_update_conda false +conda config --set add_pip_as_python_dependency false + +call setup_x64 + +:: Set the conda-build working directory to a smaller path +if "%CONDA_BLD_PATH%" == "" ( + set "CONDA_BLD_PATH=C:\\bld\\" +) + +:: Remove some directories from PATH +set "PATH=%PATH:C:\\ProgramData\\Chocolatey\\bin;=%" +set "PATH=%PATH:C:\\Program Files (x86)\\sbt\\bin;=%" +set "PATH=%PATH:C:\\Rust\\.cargo\\bin;=%" +set "PATH=%PATH:C:\\Program Files\\Git\\usr\\bin;=%" +set "PATH=%PATH:C:\\Program Files\\Git\\cmd;=%" +set "PATH=%PATH:C:\\Program Files\\Git\\mingw64\\bin;=%" +set "PATH=%PATH:C:\\Program Files (x86)\\Subversion\\bin;=%" +set "PATH=%PATH:C:\\Program Files\\CMake\\bin;=%" +set "PATH=%PATH:C:\\Program Files\\OpenSSL\\bin;=%" +set "PATH=%PATH:C:\\Strawberry\\c\\bin;=%" +set "PATH=%PATH:C:\\Strawberry\\perl\\bin;=%" +set "PATH=%PATH:C:\\Strawberry\\perl\\site\\bin;=%" +set "PATH=%PATH:c:\\tools\\php;=%" + +:: On azure, there are libcrypto*.dll & libssl*.dll under +:: C:\\Windows\\System32, which should not be there (no vendor dlls in windows folder). +:: They would be found before the openssl libs of the conda environment, so we delete them. +if defined CI ( + DEL C:\\Windows\\System32\\libcrypto-1_1-x64.dll || (Echo Ignoring failure to delete C:\\Windows\\System32\\libcrypto-1_1-x64.dll) + DEL C:\\Windows\\System32\\libssl-1_1-x64.dll || (Echo Ignoring failure to delete C:\\Windows\\System32\\libssl-1_1-x64.dll) +) + +:: Make paths like C:\\hostedtoolcache\\windows\\Ruby\\2.5.7\\x64\\bin garbage +set "PATH=%PATH:ostedtoolcache=%" + +mkdir "%CONDA%\\etc\\conda\\activate.d" + +echo set "CONDA_BLD_PATH=%CONDA_BLD_PATH%" > "%CONDA%\\etc\\conda\\activate.d\\conda-forge-ci-setup-activate.bat" +echo set "CPU_COUNT=%CPU_COUNT%" >> "%CONDA%\\etc\\conda\\activate.d\\conda-forge-ci-setup-activate.bat" +echo set "PYTHONUNBUFFERED=%PYTHONUNBUFFERED%" >> "%CONDA%\\etc\\conda\\activate.d\\conda-forge-ci-setup-activate.bat" +echo set "PATH=%PATH%" >> "%CONDA%\\etc\\conda\\activate.d\\conda-forge-ci-setup-activate.bat" + +conda info +conda config --show-sources +conda list --show-channel-urls +""") + +azure_win_script = literal_unicode("""\ +setlocal EnableExtensions EnableDelayedExpansion +call %CONDA%\\condabin\\conda_hook.bat +call %CONDA%\\condabin\\conda.bat activate base + +set "FEEDSTOCK_ROOT=%cd%" + +call conda config --append channels defaults +call conda config --add channels conda-forge +call conda config --add channels robostack +call conda config --set channel_priority strict + +:: conda remove --force m2-git + +C:\\Miniconda\\python.exe -m pip install git+https://github.com/mamba-org/boa.git@master +if errorlevel 1 exit 1 + +for %%X in (%CURRENT_RECIPES%) do ( + echo "BUILDING RECIPE %%X" + cd %FEEDSTOCK_ROOT%\\recipes\\%%X\\ + copy %FEEDSTOCK_ROOT%\\conda_build_config.yaml .\\conda_build_config.yaml + boa build . + if errorlevel 1 exit 1 +) + +anaconda -t %ANACONDA_API_TOKEN% upload "C:\\bld\\win-64\\*.tar.bz2" --force +if errorlevel 1 exit 1 +""") + +parsed_args = None + + +def parse_command_line(argv): + parser = argparse.ArgumentParser( + description="Conda recipe Azure pipeline generator for ROS packages" + ) + + default_dir = "./recipes" + parser.add_argument( + "-d", + "--dir", + dest="dir", + default=default_dir, + help="The recipes directory to process (default: {}).".format(default_dir), + ) + + parser.add_argument( + "-t", "--trigger-branch", dest="trigger_branch", help="Trigger branch for Azure" + ) + + parser.add_argument( + "-p", + "--platform", + dest="platform", + default="linux-64", + help="Platform to emit build pipeline for", + ) + + parser.add_argument( + "-a", "--additional-recipes", action="store_true", help="search for additional_recipes folder?") + + arguments = parser.parse_args(argv[1:]) + global parsed_args + parsed_args = arguments + return arguments + + +def normalize_name(s): + s = s.replace("-", "_") + return re.sub("[^a-zA-Z0-9_]+", "", s) + + +def batch_stages(stages, max_batch_size=5): + with open("vinca.yaml", "r") as vinca_yaml: + vinca_conf = yaml.safe_load(vinca_yaml) + + # this reduces the number of individual builds to try to save some time + stage_lengths = [len(s) for s in stages] + merged_stages = [] + curr_stage = [] + build_individually = vinca_conf.get("build_in_own_azure_stage", []) + + def chunks(lst, n): + """Yield successive n-sized chunks from lst.""" + for i in range(0, len(lst), n): + yield lst[i:i + n] + i = 0 + while i < len(stages): + for build_individually_pkg in build_individually: + if build_individually_pkg in stages[i]: + merged_stages.append([[build_individually_pkg]]) + stages[i].remove(build_individually_pkg) + + if stage_lengths[i] < max_batch_size and len(curr_stage) + stage_lengths[i] < max_batch_size: + # merge with previous stage + curr_stage += stages[i] + else: + if len(curr_stage): + merged_stages.append([curr_stage]) + curr_stage = [] + if stage_lengths[i] < max_batch_size: + curr_stage += stages[i] + else: + # split this stage into multiple + merged_stages.append(list(chunks(stages[i], max_batch_size))) + i += 1 + if len(curr_stage): + merged_stages.append([curr_stage]) + return merged_stages + +import requests + +def get_skip_existing(vinca_conf, platform): + fn = vinca_conf.get("skip_existing") + repodatas = [] + if fn is not None: + fns = list(fn) + else: + fns = [] + for fn in fns: + selected_bn = None + if "://" in fn: + fn += f"{platform}/repodata.json" + print(f"Fetching repodata: {fn}") + request = requests.get(fn) + + repodata = request.json() + repodatas.append(repodata) + else: + import json + with open(fn) as fi: + repodata = json.load(fi) + repodatas.append(repodata) + + return repodatas + +def add_additional_recipes(args): + additional_recipes_path = os.path.abspath(os.path.join(args.dir, '..', 'additional_recipes')) + + print("Searching additional recipes in ", additional_recipes_path) + + if not os.path.exists(additional_recipes_path): + return + + with open("vinca.yaml", "r") as vinca_yaml: + vinca_conf = yaml.safe_load(vinca_yaml) + + repodatas = get_skip_existing(vinca_conf, args.platform) + + for recipe_path in glob.glob(additional_recipes_path + '/**/recipe.yaml'): + with open(recipe_path) as recipe: + additional_recipe = yaml.safe_load(recipe) + + name, version, bnumber = (additional_recipe["package"]["name"], additional_recipe["package"]["version"], additional_recipe["build"]["number"]) + print("Checking if ", name, version, bnumber, " exists") + skip = False + for repo in repodatas: + for key, pkg in repo.get("packages", {}).items(): + if pkg["name"] == name and pkg["version"] == version and pkg["build_number"] == bnumber: + skip = True + print(f"{name}=={version}=={bnumber} already exists. Skipping.") + break + + if not skip: + print("Adding ", os.path.dirname(recipe_path)) + goal_folder = os.path.join(args.dir, name) + os.makedirs(goal_folder, exist_ok=True) + copy_tree(os.path.dirname(recipe_path), goal_folder) + + +def main(): + + args = parse_command_line(sys.argv) + + metas = [] + + if args.additional_recipes: + add_additional_recipes(args) + + if not os.path.exists(args.dir): + print(f"{args.dir} not found. Not generating a pipeline.") + + all_recipes = glob.glob(os.path.join(args.dir, "**", "*.yaml")) + for f in all_recipes: + with open(f) as fi: + metas.append(yaml.safe_load(fi.read())) + + if len(metas) >= 1: + requirements = {} + + for pkg in metas: + requirements[pkg["package"]["name"]] = ( + pkg["requirements"].get("host", []) + pkg["requirements"].get("run", []) + ) + + # sort out requirements that are not built in this run + for pkg_name, reqs in requirements.items(): + requirements[pkg_name] = [ + r.split()[0] for r in reqs if (isinstance(r, str) and r in reqs) + ] + print(requirements) + + G = nx.DiGraph() + for pkg, reqs in requirements.items(): + G.add_node(pkg) + for r in reqs: + if r.startswith("ros-"): + G.add_edge(pkg, r) + + import matplotlib.pyplot as plt + from networkx.drawing.nx_agraph import write_dot + + nx.draw(G, with_labels=True, font_weight='bold') + plt.show() + + write_dot(G, "grid.dot") + + tg = list(reversed(list(nx.topological_sort(G)))) + + stages = [] + current_stage = [] + for pkg in tg: + reqs = requirements.get(pkg, []) + sort_in_stage = 0 + for r in reqs: + # sort up the stages, until first stage found where all requirements are fulfilled. + for sidx, stage in enumerate(stages): + if r in stages[sidx]: + sort_in_stage = max(sidx + 1, sort_in_stage) + + # if r in current_stage: + # stages.append(current_stage) + # current_stage = [] + if sort_in_stage >= len(stages): + stages.append([pkg]) + else: + stages[sort_in_stage].append(pkg) + # current_stage.append(pkg) + + if len(current_stage): + stages.append(current_stage) From ac20226e046a1925a8103d047c1752830183ff85 Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Tue, 6 Jul 2021 19:58:56 +0200 Subject: [PATCH 2/7] add migrator --- vinca/migrate.py | 82 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 82 insertions(+) create mode 100644 vinca/migrate.py diff --git a/vinca/migrate.py b/vinca/migrate.py new file mode 100644 index 0000000..21bd306 --- /dev/null +++ b/vinca/migrate.py @@ -0,0 +1,82 @@ +import json +import requests +import networkx as nx + +packages_to_migrate = ["libopencv"] +distro = "noetic" + +arches = ["linux-64", "linux-aarch64", "win-64", "osx-64", "osx-arm64"] +arches = ["linux-64"] + +for arch in arches: + url = f"https://conda.anaconda.org/robostack/{arch}/repodata.json" + print("URL: ", url) + # return + repodata = requests.get(url).json() + packages = repodata["packages"] + to_migrate = set() + ros_pkgs = set() + ros_prefix = f"ros-{distro}" + for pkey in packages: + if not pkey.startswith(ros_prefix): + continue + + pname = pkey.rsplit('-', 2)[0] + ros_pkgs.add(pname) + + p = packages[pkey] + + for d in p.get("depends", []): + if d.split()[0] in packages_to_migrate: + # print(f"need to migrate {pkey}") + to_migrate.add(pname) + + # print(to_migrate) + # print(ros_pkgs) + + latest = {} + for pkg in ros_pkgs: + current = current_version = None + for pkey in packages: + if packages[pkey]["name"] == pkg: + tmp = packages[pkey]["version"].split('.') + version = [] + for el in tmp: + if el.isdecimal(): + version.append(int(el)) + else: + x = re.search(r'[^0-9]', version).start() + version.append(int(el[:x])) + + version = tuple(version) + + if not current or version > current_version: + current_version = version + current = pkey + latest[pkg] = current + + # print(latest) + + # now we can build the graph ... + + G = nx.DiGraph() + for pkg, pkgkey in latest.items(): + full_pkg = packages[pkgkey] + for dep in full_pkg.get("depends", []): + req = dep.split(' ')[0] + G.add_node(pkg) + if req.startswith(ros_prefix): + G.add_edge(pkg, req) + + gsorted = nx.topological_sort(G) + gsorted = list(reversed([g for g in gsorted])) + + to_migrate = sorted(to_migrate, key=lambda x: gsorted.index(x)) + + print("Sorted to migrate: ", to_migrate) + + # import matplotlib.pyplot as plt + # nx.draw(G, with_labels=True, font_weight='bold') + # plt.show() + + From 51976bf3aa347234cde137c9d82f9f75a2994ba5 Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Tue, 6 Jul 2021 21:19:36 +0200 Subject: [PATCH 3/7] add more code to generate migrations --- vinca/generate_azure.py | 332 ++++++++++++++++++++++++---------------- vinca/main.py | 9 +- vinca/migrate.py | 36 ++++- 3 files changed, 241 insertions(+), 136 deletions(-) diff --git a/vinca/generate_azure.py b/vinca/generate_azure.py index e4c5511..5ecbec8 100644 --- a/vinca/generate_azure.py +++ b/vinca/generate_azure.py @@ -7,6 +7,7 @@ import argparse from distutils.dir_util import copy_tree import yaml +import requests class folded_unicode(str): @@ -279,93 +280,7 @@ def add_additional_recipes(args): os.makedirs(goal_folder, exist_ok=True) copy_tree(os.path.dirname(recipe_path), goal_folder) - -def main(): - - args = parse_command_line(sys.argv) - - metas = [] - - if args.additional_recipes: - add_additional_recipes(args) - - if not os.path.exists(args.dir): - print(f"{args.dir} not found. Not generating a pipeline.") - - all_recipes = glob.glob(os.path.join(args.dir, "**", "*.yaml")) - for f in all_recipes: - with open(f) as fi: - metas.append(yaml.safe_load(fi.read())) - - if len(metas) >= 1: - requirements = {} - - for pkg in metas: - requirements[pkg["package"]["name"]] = ( - pkg["requirements"].get("host", []) + pkg["requirements"].get("run", []) - ) - - # sort out requirements that are not built in this run - for pkg_name, reqs in requirements.items(): - requirements[pkg_name] = [ - r.split()[0] for r in reqs if (isinstance(r, str) and r in reqs) - ] - print(requirements) - - G = nx.DiGraph() - for pkg, reqs in requirements.items(): - G.add_node(pkg) - for r in reqs: - if r.startswith("ros-"): - G.add_edge(pkg, r) - - # import matplotlib.pyplot as plt - # nx.draw(G, with_labels=True, font_weight='bold') - # plt.show() - - tg = list(reversed(list(nx.topological_sort(G)))) - - stages = [] - current_stage = [] - for pkg in tg: - reqs = requirements.get(pkg, []) - sort_in_stage = 0 - for r in reqs: - # sort up the stages, until first stage found where all requirements are fulfilled. - for sidx, stage in enumerate(stages): - if r in stages[sidx]: - sort_in_stage = max(sidx + 1, sort_in_stage) - - # if r in current_stage: - # stages.append(current_stage) - # current_stage = [] - if sort_in_stage >= len(stages): - stages.append([pkg]) - else: - stages[sort_in_stage].append(pkg) - # current_stage.append(pkg) - - if len(current_stage): - stages.append(current_stage) - elif len(metas) == 1: - fn_wo_yaml = os.path.splitext(os.path.basename(all_recipes[0]))[0] - stages = [[fn_wo_yaml]] - requirements = [fn_wo_yaml] - else: - stages = [] - requirements = [] - - - # filter out packages that we are not actually building - filtered_stages = [] - for stage in stages: - filtered = [pkg for pkg in stage if pkg in requirements] - if len(filtered): - filtered_stages.append(filtered) - - stages = batch_stages(filtered_stages) - print(stages) - +def build_linux(stages, trigger_branch): # Build Linux pipeline azure_template = {"pool": {"vmImage": "ubuntu-latest"}} @@ -400,20 +315,26 @@ def main(): # all packages skipped ... azure_stages.append(stage) - azure_template["trigger"] = [args.trigger_branch] + azure_template["trigger"] = [trigger_branch] azure_template["pr"] = "none" if azure_stages: azure_template["stages"] = azure_stages - if args.platform == "linux-64" and len(azure_stages): + if len(azure_stages): with open("linux.yml", "w") as fo: fo.write(yaml.dump(azure_template, sort_keys=False)) - # Build OSX pipeline - azure_template = {"pool": {"vmImage": "macOS-10.15"}} +def build_win(stages, trigger_branch): + # windows + azure_template = {"pool": {"vmImage": "vs2017-win2016"}} azure_stages = [] + global azure_win_script + if os.path.exists(".scripts/build_win.bat"): + with open(".scripts/build_win.bat", "r") as fi: + azure_win_script = literal_unicode(fi.read()) + stage_names = [] for i, s in enumerate(stages): stage_name = f"stage_{i}" @@ -425,15 +346,29 @@ def main(): stage["jobs"].append( { "job": f"stage_{i}_job_{len(stage['jobs'])}", + "variables": {"CONDA_BLD_PATH": "C:\\\\bld\\\\"}, "steps": [ { - "script": azure_osx_script, + "powershell": 'Write-Host "##vso[task.prependpath]$env:CONDA\\Scripts"', + "displayName": "Add conda to PATH" + }, + { + "script": 'conda install -c conda-forge --yes --quiet conda-build pip mamba ruamel.yaml anaconda-client', + "displayName": "Install conda-build, boa and activate environment" + }, + { + "script": azure_win_preconfig_script, + "displayName": "conda-forge build setup", + }, + { + "script": azure_win_script, "env": { "ANACONDA_API_TOKEN": "$(ANACONDA_API_TOKEN)", - "CURRENT_RECIPES": f"{' '.join([pkg for pkg in batch])}" + "CURRENT_RECIPES": f"{' '.join([pkg for pkg in batch])}", + "PYTHONUNBUFFERED": 1, }, "displayName": f"Build {' '.join([pkg for pkg in batch])}", - } + }, ], } ) @@ -442,16 +377,17 @@ def main(): # all packages skipped ... azure_stages.append(stage) - azure_template["trigger"] = [args.trigger_branch] + azure_template["trigger"] = [trigger_branch] azure_template["pr"] = "none" if azure_stages: azure_template["stages"] = azure_stages - if args.platform == "osx-64" and len(azure_stages): - with open("osx.yml", "w") as fo: + if len(azure_stages): + with open("win.yml", "w") as fo: fo.write(yaml.dump(azure_template, sort_keys=False)) - # Build OSX-arm64 pipeline +def build_osx(stages, trigger_branch): + # Build OSX pipeline azure_template = {"pool": {"vmImage": "macOS-10.15"}} azure_stages = [] @@ -469,7 +405,7 @@ def main(): "job": f"stage_{i}_job_{len(stage['jobs'])}", "steps": [ { - "script": azure_osx_arm64_script, + "script": azure_osx_script, "env": { "ANACONDA_API_TOKEN": "$(ANACONDA_API_TOKEN)", "CURRENT_RECIPES": f"{' '.join([pkg for pkg in batch])}" @@ -484,15 +420,16 @@ def main(): # all packages skipped ... azure_stages.append(stage) - azure_template["trigger"] = [args.trigger_branch] + azure_template["trigger"] = [trigger_branch] azure_template["pr"] = "none" if azure_stages: azure_template["stages"] = azure_stages - if args.platform == "osx-arm64" and len(azure_stages): - with open("osx_arm64.yml", "w") as fo: + if len(azure_stages): + with open("osx.yml", "w") as fo: fo.write(yaml.dump(azure_template, sort_keys=False)) +def build_linux_aarch64(stages, trigger_branch): # Build aarch64 pipeline azure_template = { "pool": { @@ -532,25 +469,20 @@ def main(): # all packages skipped ... azure_stages.append(stage) - azure_template["trigger"] = [args.trigger_branch] + azure_template["trigger"] = [trigger_branch] azure_template["pr"] = "none" if azure_stages: azure_template["stages"] = azure_stages - if args.platform == "linux-aarch64" and len(azure_stages): + if len(azure_stages): with open("linux_aarch64.yml", "w") as fo: fo.write(yaml.dump(azure_template, sort_keys=False)) - # windows - azure_template = {"pool": {"vmImage": "windows-2019"}} - +def build_osx_arm64(stages, trigger_branch): + # Build OSX-arm64 pipeline + azure_template = {"pool": {"vmImage": "macOS-10.15"}} azure_stages = [] - global azure_win_script - if os.path.exists(".scripts/build_win.bat"): - with open(".scripts/build_win.bat", "r") as fi: - azure_win_script = literal_unicode(fi.read()) - stage_names = [] for i, s in enumerate(stages): stage_name = f"stage_{i}" @@ -562,29 +494,15 @@ def main(): stage["jobs"].append( { "job": f"stage_{i}_job_{len(stage['jobs'])}", - "variables": {"CONDA_BLD_PATH": "C:\\\\bld\\\\"}, "steps": [ { - "powershell": 'Write-Host "##vso[task.prependpath]$env:CONDA\\Scripts"', - "displayName": "Add conda to PATH" - }, - { - "script": 'conda install -c conda-forge --yes --quiet conda-build pip mamba ruamel.yaml anaconda-client', - "displayName": "Install conda-build, boa and activate environment" - }, - { - "script": azure_win_preconfig_script, - "displayName": "conda-forge build setup", - }, - { - "script": azure_win_script, + "script": azure_osx_arm64_script, "env": { "ANACONDA_API_TOKEN": "$(ANACONDA_API_TOKEN)", - "CURRENT_RECIPES": f"{' '.join([pkg for pkg in batch])}", - "PYTHONUNBUFFERED": 1, + "CURRENT_RECIPES": f"{' '.join([pkg for pkg in batch])}" }, "displayName": f"Build {' '.join([pkg for pkg in batch])}", - }, + } ], } ) @@ -593,11 +511,161 @@ def main(): # all packages skipped ... azure_stages.append(stage) - azure_template["trigger"] = [args.trigger_branch] + azure_template["trigger"] = [trigger_branch] azure_template["pr"] = "none" if azure_stages: azure_template["stages"] = azure_stages - if args.platform.startswith("win") and len(azure_stages): - with open("win.yml", "w") as fo: + if len(azure_stages): + with open("osx_arm64.yml", "w") as fo: fo.write(yaml.dump(azure_template, sort_keys=False)) + + +def extend_graph(graph, arch='linux-64'): + url = f"https://conda.anaconda.org/robostack/{arch}/repodata.json" + repodata = requests.get(url).json() + + latest = {} + ros_pkgs = set() + distro = "noetic" + ros_prefix = f"ros-{distro}" + packages = repodata.get("packages", {}) + + for pkey in packages: + if not pkey.startswith(ros_prefix): + continue + + pname = pkey.rsplit('-', 2)[0] + ros_pkgs.add(pname) + + for pkg in ros_pkgs: + current = current_version = None + for pkey in packages: + if packages[pkey]["name"] == pkg: + tmp = packages[pkey]["version"].split('.') + version = [] + for el in tmp: + if el.isdecimal(): + version.append(int(el)) + else: + x = re.search(r'[^0-9]', version).start() + version.append(int(el[:x])) + + version = tuple(version) + + if not current or version > current_version: + current_version = version + current = pkey + + latest[pkg] = current + + for pkg, pkgkey in latest.items(): + full_pkg = packages[pkgkey] + for dep in full_pkg.get("depends", []): + req = dep.split(' ')[0] + graph.add_node(pkg) + if req.startswith(ros_prefix): + graph.add_edge(pkg, req) + +def main(): + + args = parse_command_line(sys.argv) + + metas = [] + + if args.additional_recipes: + add_additional_recipes(args) + + if not os.path.exists(args.dir): + print(f"{args.dir} not found. Not generating a pipeline.") + + all_recipes = glob.glob(os.path.join(args.dir, "**", "*.yaml")) + for f in all_recipes: + with open(f) as fi: + metas.append(yaml.safe_load(fi.read())) + + if len(metas) >= 1: + requirements = {} + + for pkg in metas: + requirements[pkg["package"]["name"]] = ( + pkg["requirements"].get("host", []) + pkg["requirements"].get("run", []) + ) + + G = nx.DiGraph() + for pkg, reqs in requirements.items(): + G.add_node(pkg) + for r in reqs: + if not isinstance(r, str): + continue + if r.startswith("ros-"): + G.add_edge(pkg, r) + + extend_graph(G, arch=args.platform) + # import matplotlib.pyplot as plt + # nx.draw(G, with_labels=True, font_weight='bold') + # plt.show() + + tg = list(reversed(list(nx.topological_sort(G)))) + print("Fully sorted graph: ", tg) + + recipes = list(requirements.keys()) + tg = sorted(recipes, key=lambda x: tg.index(x)) + print("SUBGRAPH OF INTEREST: ", tg) + + # # sort out requirements that are not built in this run + # for pkg_name in tg: + # requirements[pkg_name] = [ + # r.split()[0] for r in reqs if (isinstance(r, str) and r in tg) + # ] + + stages = [] + current_stage = [] + for pkg in tg: + reqs = requirements.get(pkg, []) + sort_in_stage = 0 + for r in reqs: + # sort up the stages, until first stage found where all requirements are fulfilled. + for sidx, stage in enumerate(stages): + if r in stages[sidx]: + sort_in_stage = max(sidx + 1, sort_in_stage) + + # if r in current_stage: + # stages.append(current_stage) + # current_stage = [] + if sort_in_stage >= len(stages): + stages.append([pkg]) + else: + stages[sort_in_stage].append(pkg) + # current_stage.append(pkg) + + if len(current_stage): + stages.append(current_stage) + elif len(metas) == 1: + fn_wo_yaml = os.path.splitext(os.path.basename(all_recipes[0]))[0] + stages = [[fn_wo_yaml]] + requirements = [fn_wo_yaml] + else: + stages = [] + requirements = [] + + # filter out packages that we are not actually building + filtered_stages = [] + for stage in stages: + filtered = [pkg for pkg in stage if pkg in requirements] + if len(filtered): + filtered_stages.append(filtered) + + stages = batch_stages(filtered_stages) + print(stages) + + if args.platform == "linux-64": + build_linux(stages, args.trigger_branch) + elif args.platform == "linux-aarch64": + build_linux_aarch64(stages, args.trigger_branch) + elif args.platform == "osx-64": + build_osx(stages, args.trigger_branch) + elif args.platform == "osx-arm64": + build_osx_arm64(stages, args.trigger_branch) + elif args.platform == "win-64": + build_win(stages, args.trigger_branch) diff --git a/vinca/main.py b/vinca/main.py index d1ed4aa..1efeaad 100644 --- a/vinca/main.py +++ b/vinca/main.py @@ -87,6 +87,13 @@ def parse_command_line(argv): default=default_dir, help="The directory to process (default: {}).".format(default_dir), ) + parser.add_argument( + "-f", + "--file", + dest="file", + default="vinca.yaml", + help="The vinca file to process (default: vinca.yaml)", + ) parser.add_argument( "-s", "--skip", @@ -738,7 +745,7 @@ def main(): arguments = parse_command_line(sys.argv) base_dir = os.path.abspath(arguments.dir) - vinca_yaml = os.path.join(base_dir, "vinca.yaml") + vinca_yaml = os.path.join(base_dir, arguments.file) vinca_conf = read_vinca_yaml(vinca_yaml) vinca_conf["_conda_indexes"] = get_conda_index(vinca_conf, base_dir) diff --git a/vinca/migrate.py b/vinca/migrate.py index 21bd306..55da182 100644 --- a/vinca/migrate.py +++ b/vinca/migrate.py @@ -2,12 +2,24 @@ import requests import networkx as nx +from vinca.distro import Distro + packages_to_migrate = ["libopencv"] -distro = "noetic" +distro_version = "noetic" +ros_prefix = f"ros-{distro_version}" arches = ["linux-64", "linux-aarch64", "win-64", "osx-64", "osx-arm64"] arches = ["linux-64"] +def to_ros_name(distro, pkg_name): + shortname = pkg_name[len(ros_prefix) + 1:] + if distro.check_package(shortname): + return shortname + elif distro.check_package(shortname.replace('-', '_')): + return shortname.replace('-', '_') + else: + raise RuntimeError(f"Couldnt convert {pkg_name} to ROS pkg name") + for arch in arches: url = f"https://conda.anaconda.org/robostack/{arch}/repodata.json" print("URL: ", url) @@ -16,7 +28,6 @@ packages = repodata["packages"] to_migrate = set() ros_pkgs = set() - ros_prefix = f"ros-{distro}" for pkey in packages: if not pkey.startswith(ros_prefix): continue @@ -74,7 +85,26 @@ to_migrate = sorted(to_migrate, key=lambda x: gsorted.index(x)) print("Sorted to migrate: ", to_migrate) - + + distro = Distro(distro_version) + # import IPython; IPython.embed() + + ros_names = [] + for pkg in to_migrate: + ros_names.append(to_ros_name(distro, pkg)) + print("Final names: ", ros_names) + + from vinca.main import read_vinca_yaml + import ruamel.yaml + yaml = ruamel.yaml.YAML() + with open("vinca.yaml", "r") as fi: + vinca_conf = yaml.load(fi) + + vinca_conf["packages_select_by_deps"] = ros_names + vinca_conf["skip_all_deps"] = True + with open("vinca_generated.yaml", "w") as fo: + yaml.dump(vinca_conf, fo) + # import matplotlib.pyplot as plt # nx.draw(G, with_labels=True, font_weight='bold') # plt.show() From 4cce52211dcd57e5906b1647c53fada3cae3e775 Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Fri, 23 Jul 2021 17:34:48 +0200 Subject: [PATCH 4/7] add vinca-migrate function --- setup.cfg | 1 + vinca/generate_azure.py | 151 +++++++++++++++++++++++----------------- vinca/migrate.py | 68 +++++++++++++++--- 3 files changed, 146 insertions(+), 74 deletions(-) diff --git a/setup.cfg b/setup.cfg index 328aad2..7a0a646 100644 --- a/setup.cfg +++ b/setup.cfg @@ -40,6 +40,7 @@ console_scripts = vinca-glab = vinca.generate_gitlab:main vinca-azure = vinca.generate_azure:main vinca-dot = vinca.generate_dot:main + vinca-migrate = vinca.migrate:main [flake8] import-order-style = google diff --git a/vinca/generate_azure.py b/vinca/generate_azure.py index 5ecbec8..ec20af1 100644 --- a/vinca/generate_azure.py +++ b/vinca/generate_azure.py @@ -143,40 +143,6 @@ def literal_unicode_representer(dumper, data): parsed_args = None -def parse_command_line(argv): - parser = argparse.ArgumentParser( - description="Conda recipe Azure pipeline generator for ROS packages" - ) - - default_dir = "./recipes" - parser.add_argument( - "-d", - "--dir", - dest="dir", - default=default_dir, - help="The recipes directory to process (default: {}).".format(default_dir), - ) - - parser.add_argument( - "-t", "--trigger-branch", dest="trigger_branch", help="Trigger branch for Azure" - ) - - parser.add_argument( - "-p", - "--platform", - dest="platform", - default="linux-64", - help="Platform to emit build pipeline for", - ) - - parser.add_argument( - "-a", "--additional-recipes", action="store_true", help="search for additional_recipes folder?") - - arguments = parser.parse_args(argv[1:]) - global parsed_args - parsed_args = arguments - return arguments - def normalize_name(s): s = s.replace("-", "_") @@ -197,6 +163,7 @@ def chunks(lst, n): """Yield successive n-sized chunks from lst.""" for i in range(0, len(lst), n): yield lst[i:i + n] + i = 0 while i < len(stages): for build_individually_pkg in build_individually: @@ -247,8 +214,8 @@ def get_skip_existing(vinca_conf, platform): return repodatas -def add_additional_recipes(args): - additional_recipes_path = os.path.abspath(os.path.join(args.dir, '..', 'additional_recipes')) +def add_additional_recipes(recipe_dir, platform): + additional_recipes_path = os.path.abspath(os.path.join(recipe_dir, '..', 'additional_recipes')) print("Searching additional recipes in ", additional_recipes_path) @@ -258,7 +225,10 @@ def add_additional_recipes(args): with open("vinca.yaml", "r") as vinca_yaml: vinca_conf = yaml.safe_load(vinca_yaml) - repodatas = get_skip_existing(vinca_conf, args.platform) + if vinca_conf.get("is_migration"): + return + + repodatas = get_skip_existing(vinca_conf, platform) for recipe_path in glob.glob(additional_recipes_path + '/**/recipe.yaml'): with open(recipe_path) as recipe: @@ -276,7 +246,7 @@ def add_additional_recipes(args): if not skip: print("Adding ", os.path.dirname(recipe_path)) - goal_folder = os.path.join(args.dir, name) + goal_folder = os.path.join(recipe_dir, name) os.makedirs(goal_folder, exist_ok=True) copy_tree(os.path.dirname(recipe_path), goal_folder) @@ -521,7 +491,7 @@ def build_osx_arm64(stages, trigger_branch): fo.write(yaml.dump(azure_template, sort_keys=False)) -def extend_graph(graph, arch='linux-64'): +def extend_graph(graph, arch='linux-64', distro='noetic'): url = f"https://conda.anaconda.org/robostack/{arch}/repodata.json" repodata = requests.get(url).json() @@ -567,19 +537,9 @@ def extend_graph(graph, arch='linux-64'): if req.startswith(ros_prefix): graph.add_edge(pkg, req) -def main(): - - args = parse_command_line(sys.argv) - +def generate_pipeline(recipe_dir, platform, trigger_branch, sequential=False): metas = [] - - if args.additional_recipes: - add_additional_recipes(args) - - if not os.path.exists(args.dir): - print(f"{args.dir} not found. Not generating a pipeline.") - - all_recipes = glob.glob(os.path.join(args.dir, "**", "*.yaml")) + all_recipes = glob.glob(os.path.join(recipe_dir, "**", "*.yaml")) for f in all_recipes: with open(f) as fi: metas.append(yaml.safe_load(fi.read())) @@ -601,7 +561,7 @@ def main(): if r.startswith("ros-"): G.add_edge(pkg, r) - extend_graph(G, arch=args.platform) + extend_graph(G, arch=platform) # import matplotlib.pyplot as plt # nx.draw(G, with_labels=True, font_weight='bold') # plt.show() @@ -656,16 +616,77 @@ def main(): if len(filtered): filtered_stages.append(filtered) - stages = batch_stages(filtered_stages) - print(stages) - - if args.platform == "linux-64": - build_linux(stages, args.trigger_branch) - elif args.platform == "linux-aarch64": - build_linux_aarch64(stages, args.trigger_branch) - elif args.platform == "osx-64": - build_osx(stages, args.trigger_branch) - elif args.platform == "osx-arm64": - build_osx_arm64(stages, args.trigger_branch) - elif args.platform == "win-64": - build_win(stages, args.trigger_branch) + if sequential: + single_stage = [] + for s in filtered_stages: + single_stage.extend(s) + stages = [[single_stage]] + else: + stages = batch_stages(filtered_stages) + + if platform == "linux-64": + build_linux(stages, trigger_branch) + elif platform == "linux-aarch64": + build_linux_aarch64(stages, trigger_branch) + elif platform == "osx-64": + build_osx(stages, trigger_branch) + elif platform == "osx-arm64": + build_osx_arm64(stages, trigger_branch) + elif platform == "win-64": + build_win(stages, trigger_branch) + + +def parse_command_line(argv): + parser = argparse.ArgumentParser( + description="Conda recipe Azure pipeline generator for ROS packages" + ) + + default_dir = "./recipes" + parser.add_argument( + "-d", + "--dir", + dest="dir", + default=default_dir, + help="The recipes directory to process (default: {}).".format(default_dir), + ) + parser.add_argument( + "--sequential", + dest="sequential", + action="store_true", + help="Don't parallelize stages", + ) + parser.add_argument( + "-t", "--trigger-branch", dest="trigger_branch", help="Trigger branch for Azure" + ) + + parser.add_argument( + "-p", + "--platform", + dest="platform", + default="linux-64", + help="Platform to emit build pipeline for", + ) + + parser.add_argument( + "-a", "--additional-recipes", action="store_true", help="search for additional_recipes folder?") + + arguments = parser.parse_args(argv[1:]) + global parsed_args + parsed_args = arguments + return arguments + + +def main(): + + args = parse_command_line(sys.argv) + + metas = [] + + if not os.path.exists(args.dir): + print(f"{args.dir} not found. Not generating a pipeline.") + return + + if args.additional_recipes: + add_additional_recipes(args.dir, args.platform) + + generate_pipeline(args.dir, args.platform, args.trigger_branch, args.sequential) \ No newline at end of file diff --git a/vinca/migrate.py b/vinca/migrate.py index 55da182..5aa18c6 100644 --- a/vinca/migrate.py +++ b/vinca/migrate.py @@ -1,6 +1,11 @@ -import json +import yaml +import sys, os +import glob +import argparse import requests import networkx as nx +import subprocess +import shutil from vinca.distro import Distro @@ -9,7 +14,6 @@ ros_prefix = f"ros-{distro_version}" arches = ["linux-64", "linux-aarch64", "win-64", "osx-64", "osx-arm64"] -arches = ["linux-64"] def to_ros_name(distro, pkg_name): shortname = pkg_name[len(ros_prefix) + 1:] @@ -20,7 +24,7 @@ def to_ros_name(distro, pkg_name): else: raise RuntimeError(f"Couldnt convert {pkg_name} to ROS pkg name") -for arch in arches: +def create_migration_instructions(arch, packages_to_migrate, trigger_branch): url = f"https://conda.anaconda.org/robostack/{arch}/repodata.json" print("URL: ", url) # return @@ -102,11 +106,57 @@ def to_ros_name(distro, pkg_name): vinca_conf["packages_select_by_deps"] = ros_names vinca_conf["skip_all_deps"] = True - with open("vinca_generated.yaml", "w") as fo: - yaml.dump(vinca_conf, fo) - - # import matplotlib.pyplot as plt - # nx.draw(G, with_labels=True, font_weight='bold') - # plt.show() + vinca_conf["is_migration"] = True + with open("vinca.yaml", "w") as fo: + yaml.dump(vinca_conf, fo) + if os.path.exists("recipes"): + shutil.rmtree("recipes") + + subprocess.check_call(["vinca", "-f", "vinca.yaml", "--multiple", "--platform", arch]) + subprocess.check_call(["vinca-azure", "--platform", arch, "--trigger-branch", "buildbranch_linux", "-d", "./recipes", "--additional-recipes", "--sequential"]) + +def parse_command_line(argv): + parser = argparse.ArgumentParser( + description="Conda recipe Azure pipeline generator for ROS packages" + ) + + default_dir = "./recipes" + parser.add_argument( + "-d", + "--dir", + dest="dir", + default=default_dir, + help="The recipes directory to process (default: {}).".format(default_dir), + ) + + parser.add_argument( + "-t", "--trigger-branch", dest="trigger_branch", help="Trigger branch for Azure" + ) + + parser.add_argument( + "-p", + "--platform", + dest="platform", + default="linux-64", + help="Platform to emit build pipeline for", + ) + + parser.add_argument( + "-a", "--additional-recipes", action="store_true", help="search for additional_recipes folder?") + + arguments = parser.parse_args(argv[1:]) + global parsed_args + parsed_args = arguments + return arguments + + +def main(): + args = parse_command_line(sys.argv) + + mfile = os.path.join(args.dir + "/migration.yaml") + with open(mfile, "r") as fi: + migration = yaml.safe_load(fi) + print(migration) + create_migration_instructions(args.platform, migration.get('packages', []), args.trigger_branch) \ No newline at end of file From 0a41b900c1c0aee32e13bdb988b14a70e3fd3e71 Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Fri, 23 Jul 2021 17:40:46 +0200 Subject: [PATCH 5/7] remove tapi for now --- vinca/main.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/vinca/main.py b/vinca/main.py index 1efeaad..d8607de 100644 --- a/vinca/main.py +++ b/vinca/main.py @@ -222,7 +222,8 @@ def generate_output(pkg_shortname, vinca_conf, distro, version, all_pkgs=[]): "{{ compiler('c') }}", "ninja", {"sel(unix)": "make"}, - {"sel(osx)": "tapi"}, + # let's figure out if we need this, was added for ROS2 + # {"sel(osx)": "tapi"}, "cmake", {"sel(build_platform != target_platform)": "python"}, {"sel(build_platform != target_platform)": "cross-python_{{ target_platform }}"}, From abd07b58ed6c3d2308255e2f39308e45cb49109f Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Fri, 23 Jul 2021 17:54:58 +0200 Subject: [PATCH 6/7] simplify dot generator --- vinca/generate_dot.py | 177 +----------------------------------------- 1 file changed, 2 insertions(+), 175 deletions(-) diff --git a/vinca/generate_dot.py b/vinca/generate_dot.py index c6263a0..a48e605 100644 --- a/vinca/generate_dot.py +++ b/vinca/generate_dot.py @@ -7,141 +7,14 @@ import argparse from distutils.dir_util import copy_tree import yaml - - -class folded_unicode(str): - pass - - -class literal_unicode(str): - pass - - -def folded_unicode_representer(dumper, data): - return dumper.represent_scalar("tag:yaml.org,2002:str", data, style=">") - - -def literal_unicode_representer(dumper, data): - return dumper.represent_scalar("tag:yaml.org,2002:str", data, style="|") - - -yaml.add_representer(folded_unicode, folded_unicode_representer) -yaml.add_representer(literal_unicode, literal_unicode_representer) - - -azure_linux_script = literal_unicode("""\ -export CI=azure -export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME -export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) -.scripts/run_docker_build.sh""") - -azure_osx_script = literal_unicode(r"""\ -export CI=azure -export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME -export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) -.scripts/build_osx.sh""") - -azure_osx_arm64_script = literal_unicode(r"""\ -export CI=azure -export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME -export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) -.scripts/build_osx_arm64.sh""") - -azure_win_preconfig_script = literal_unicode("""\ -set "CI=azure" -call %CONDA%\\condabin\\conda_hook.bat -call %CONDA%\\condabin\\conda.bat activate base - -:: 2 cores available on Appveyor workers: https://www.appveyor.com/docs/build-environment/#build-vm-configurations -:: CPU_COUNT is passed through conda build: https://github.com/conda/conda-build/pull/1149 -set CPU_COUNT=2 - -set PYTHONUNBUFFERED=1 - -conda config --set show_channel_urls true -conda config --set auto_update_conda false -conda config --set add_pip_as_python_dependency false - -call setup_x64 - -:: Set the conda-build working directory to a smaller path -if "%CONDA_BLD_PATH%" == "" ( - set "CONDA_BLD_PATH=C:\\bld\\" -) - -:: Remove some directories from PATH -set "PATH=%PATH:C:\\ProgramData\\Chocolatey\\bin;=%" -set "PATH=%PATH:C:\\Program Files (x86)\\sbt\\bin;=%" -set "PATH=%PATH:C:\\Rust\\.cargo\\bin;=%" -set "PATH=%PATH:C:\\Program Files\\Git\\usr\\bin;=%" -set "PATH=%PATH:C:\\Program Files\\Git\\cmd;=%" -set "PATH=%PATH:C:\\Program Files\\Git\\mingw64\\bin;=%" -set "PATH=%PATH:C:\\Program Files (x86)\\Subversion\\bin;=%" -set "PATH=%PATH:C:\\Program Files\\CMake\\bin;=%" -set "PATH=%PATH:C:\\Program Files\\OpenSSL\\bin;=%" -set "PATH=%PATH:C:\\Strawberry\\c\\bin;=%" -set "PATH=%PATH:C:\\Strawberry\\perl\\bin;=%" -set "PATH=%PATH:C:\\Strawberry\\perl\\site\\bin;=%" -set "PATH=%PATH:c:\\tools\\php;=%" - -:: On azure, there are libcrypto*.dll & libssl*.dll under -:: C:\\Windows\\System32, which should not be there (no vendor dlls in windows folder). -:: They would be found before the openssl libs of the conda environment, so we delete them. -if defined CI ( - DEL C:\\Windows\\System32\\libcrypto-1_1-x64.dll || (Echo Ignoring failure to delete C:\\Windows\\System32\\libcrypto-1_1-x64.dll) - DEL C:\\Windows\\System32\\libssl-1_1-x64.dll || (Echo Ignoring failure to delete C:\\Windows\\System32\\libssl-1_1-x64.dll) -) - -:: Make paths like C:\\hostedtoolcache\\windows\\Ruby\\2.5.7\\x64\\bin garbage -set "PATH=%PATH:ostedtoolcache=%" - -mkdir "%CONDA%\\etc\\conda\\activate.d" - -echo set "CONDA_BLD_PATH=%CONDA_BLD_PATH%" > "%CONDA%\\etc\\conda\\activate.d\\conda-forge-ci-setup-activate.bat" -echo set "CPU_COUNT=%CPU_COUNT%" >> "%CONDA%\\etc\\conda\\activate.d\\conda-forge-ci-setup-activate.bat" -echo set "PYTHONUNBUFFERED=%PYTHONUNBUFFERED%" >> "%CONDA%\\etc\\conda\\activate.d\\conda-forge-ci-setup-activate.bat" -echo set "PATH=%PATH%" >> "%CONDA%\\etc\\conda\\activate.d\\conda-forge-ci-setup-activate.bat" - -conda info -conda config --show-sources -conda list --show-channel-urls -""") - -azure_win_script = literal_unicode("""\ -setlocal EnableExtensions EnableDelayedExpansion -call %CONDA%\\condabin\\conda_hook.bat -call %CONDA%\\condabin\\conda.bat activate base - -set "FEEDSTOCK_ROOT=%cd%" - -call conda config --append channels defaults -call conda config --add channels conda-forge -call conda config --add channels robostack -call conda config --set channel_priority strict - -:: conda remove --force m2-git - -C:\\Miniconda\\python.exe -m pip install git+https://github.com/mamba-org/boa.git@master -if errorlevel 1 exit 1 - -for %%X in (%CURRENT_RECIPES%) do ( - echo "BUILDING RECIPE %%X" - cd %FEEDSTOCK_ROOT%\\recipes\\%%X\\ - copy %FEEDSTOCK_ROOT%\\conda_build_config.yaml .\\conda_build_config.yaml - boa build . - if errorlevel 1 exit 1 -) - -anaconda -t %ANACONDA_API_TOKEN% upload "C:\\bld\\win-64\\*.tar.bz2" --force -if errorlevel 1 exit 1 -""") +import requests parsed_args = None def parse_command_line(argv): parser = argparse.ArgumentParser( - description="Conda recipe Azure pipeline generator for ROS packages" + description="Conda recipe Dot graphic generator for ROS packages" ) default_dir = "./recipes" @@ -173,52 +46,6 @@ def parse_command_line(argv): parsed_args = arguments return arguments - -def normalize_name(s): - s = s.replace("-", "_") - return re.sub("[^a-zA-Z0-9_]+", "", s) - - -def batch_stages(stages, max_batch_size=5): - with open("vinca.yaml", "r") as vinca_yaml: - vinca_conf = yaml.safe_load(vinca_yaml) - - # this reduces the number of individual builds to try to save some time - stage_lengths = [len(s) for s in stages] - merged_stages = [] - curr_stage = [] - build_individually = vinca_conf.get("build_in_own_azure_stage", []) - - def chunks(lst, n): - """Yield successive n-sized chunks from lst.""" - for i in range(0, len(lst), n): - yield lst[i:i + n] - i = 0 - while i < len(stages): - for build_individually_pkg in build_individually: - if build_individually_pkg in stages[i]: - merged_stages.append([[build_individually_pkg]]) - stages[i].remove(build_individually_pkg) - - if stage_lengths[i] < max_batch_size and len(curr_stage) + stage_lengths[i] < max_batch_size: - # merge with previous stage - curr_stage += stages[i] - else: - if len(curr_stage): - merged_stages.append([curr_stage]) - curr_stage = [] - if stage_lengths[i] < max_batch_size: - curr_stage += stages[i] - else: - # split this stage into multiple - merged_stages.append(list(chunks(stages[i], max_batch_size))) - i += 1 - if len(curr_stage): - merged_stages.append([curr_stage]) - return merged_stages - -import requests - def get_skip_existing(vinca_conf, platform): fn = vinca_conf.get("skip_existing") repodatas = [] From 325a59b17f2f79370a303ac2f4572ddbb423ef0a Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Fri, 23 Jul 2021 18:38:49 +0200 Subject: [PATCH 7/7] automatically get distro --- vinca/migrate.py | 39 +++++++++++++++++++++++---------------- 1 file changed, 23 insertions(+), 16 deletions(-) diff --git a/vinca/migrate.py b/vinca/migrate.py index 5aa18c6..1d48c06 100644 --- a/vinca/migrate.py +++ b/vinca/migrate.py @@ -6,14 +6,23 @@ import networkx as nx import subprocess import shutil +from vinca.main import read_vinca_yaml +import ruamel.yaml + from vinca.distro import Distro -packages_to_migrate = ["libopencv"] -distro_version = "noetic" -ros_prefix = f"ros-{distro_version}" +distro_version = None +ros_prefix = None -arches = ["linux-64", "linux-aarch64", "win-64", "osx-64", "osx-arm64"] +# arches = ["linux-64", "linux-aarch64", "win-64", "osx-64", "osx-arm64"] +# arch_to_fname = { +# "linux-64": "linux", +# "linux-aarch64": "linux_aarch_64", +# "win-64": "win", +# "osx-64": "osx", +# "osx-arm64": "osx_arm64" +# } def to_ros_name(distro, pkg_name): shortname = pkg_name[len(ros_prefix) + 1:] @@ -26,6 +35,15 @@ def to_ros_name(distro, pkg_name): def create_migration_instructions(arch, packages_to_migrate, trigger_branch): url = f"https://conda.anaconda.org/robostack/{arch}/repodata.json" + + yaml = ruamel.yaml.YAML() + with open("vinca.yaml", "r") as fi: + vinca_conf = yaml.load(fi) + + global distro_version, ros_prefix + distro_version = vinca_conf['ros_distro'] + ros_prefix = f"ros-{distro_version}" + print("URL: ", url) # return repodata = requests.get(url).json() @@ -46,9 +64,6 @@ def create_migration_instructions(arch, packages_to_migrate, trigger_branch): # print(f"need to migrate {pkey}") to_migrate.add(pname) - # print(to_migrate) - # print(ros_pkgs) - latest = {} for pkg in ros_pkgs: current = current_version = None @@ -70,9 +85,7 @@ def create_migration_instructions(arch, packages_to_migrate, trigger_branch): current = pkey latest[pkg] = current - # print(latest) - - # now we can build the graph ... + # now we can build the graph ... G = nx.DiGraph() for pkg, pkgkey in latest.items(): @@ -98,12 +111,6 @@ def create_migration_instructions(arch, packages_to_migrate, trigger_branch): ros_names.append(to_ros_name(distro, pkg)) print("Final names: ", ros_names) - from vinca.main import read_vinca_yaml - import ruamel.yaml - yaml = ruamel.yaml.YAML() - with open("vinca.yaml", "r") as fi: - vinca_conf = yaml.load(fi) - vinca_conf["packages_select_by_deps"] = ros_names vinca_conf["skip_all_deps"] = True vinca_conf["is_migration"] = True