diff --git a/setup.cfg b/setup.cfg index 7a63591..7a0a646 100644 --- a/setup.cfg +++ b/setup.cfg @@ -39,6 +39,8 @@ console_scripts = vinca = vinca.main:main vinca-glab = vinca.generate_gitlab:main vinca-azure = vinca.generate_azure:main + vinca-dot = vinca.generate_dot:main + vinca-migrate = vinca.migrate:main [flake8] import-order-style = google diff --git a/vinca/generate_azure.py b/vinca/generate_azure.py index e4c5511..ec20af1 100644 --- a/vinca/generate_azure.py +++ b/vinca/generate_azure.py @@ -7,6 +7,7 @@ import argparse from distutils.dir_util import copy_tree import yaml +import requests class folded_unicode(str): @@ -142,40 +143,6 @@ def literal_unicode_representer(dumper, data): parsed_args = None -def parse_command_line(argv): - parser = argparse.ArgumentParser( - description="Conda recipe Azure pipeline generator for ROS packages" - ) - - default_dir = "./recipes" - parser.add_argument( - "-d", - "--dir", - dest="dir", - default=default_dir, - help="The recipes directory to process (default: {}).".format(default_dir), - ) - - parser.add_argument( - "-t", "--trigger-branch", dest="trigger_branch", help="Trigger branch for Azure" - ) - - parser.add_argument( - "-p", - "--platform", - dest="platform", - default="linux-64", - help="Platform to emit build pipeline for", - ) - - parser.add_argument( - "-a", "--additional-recipes", action="store_true", help="search for additional_recipes folder?") - - arguments = parser.parse_args(argv[1:]) - global parsed_args - parsed_args = arguments - return arguments - def normalize_name(s): s = s.replace("-", "_") @@ -196,6 +163,7 @@ def chunks(lst, n): """Yield successive n-sized chunks from lst.""" for i in range(0, len(lst), n): yield lst[i:i + n] + i = 0 while i < len(stages): for build_individually_pkg in build_individually: @@ -246,8 +214,8 @@ def get_skip_existing(vinca_conf, platform): return repodatas -def add_additional_recipes(args): - additional_recipes_path = os.path.abspath(os.path.join(args.dir, '..', 'additional_recipes')) +def add_additional_recipes(recipe_dir, platform): + additional_recipes_path = os.path.abspath(os.path.join(recipe_dir, '..', 'additional_recipes')) print("Searching additional recipes in ", additional_recipes_path) @@ -257,7 +225,10 @@ def add_additional_recipes(args): with open("vinca.yaml", "r") as vinca_yaml: vinca_conf = yaml.safe_load(vinca_yaml) - repodatas = get_skip_existing(vinca_conf, args.platform) + if vinca_conf.get("is_migration"): + return + + repodatas = get_skip_existing(vinca_conf, platform) for recipe_path in glob.glob(additional_recipes_path + '/**/recipe.yaml'): with open(recipe_path) as recipe: @@ -275,97 +246,11 @@ def add_additional_recipes(args): if not skip: print("Adding ", os.path.dirname(recipe_path)) - goal_folder = os.path.join(args.dir, name) + goal_folder = os.path.join(recipe_dir, name) os.makedirs(goal_folder, exist_ok=True) copy_tree(os.path.dirname(recipe_path), goal_folder) - -def main(): - - args = parse_command_line(sys.argv) - - metas = [] - - if args.additional_recipes: - add_additional_recipes(args) - - if not os.path.exists(args.dir): - print(f"{args.dir} not found. Not generating a pipeline.") - - all_recipes = glob.glob(os.path.join(args.dir, "**", "*.yaml")) - for f in all_recipes: - with open(f) as fi: - metas.append(yaml.safe_load(fi.read())) - - if len(metas) >= 1: - requirements = {} - - for pkg in metas: - requirements[pkg["package"]["name"]] = ( - pkg["requirements"].get("host", []) + pkg["requirements"].get("run", []) - ) - - # sort out requirements that are not built in this run - for pkg_name, reqs in requirements.items(): - requirements[pkg_name] = [ - r.split()[0] for r in reqs if (isinstance(r, str) and r in reqs) - ] - print(requirements) - - G = nx.DiGraph() - for pkg, reqs in requirements.items(): - G.add_node(pkg) - for r in reqs: - if r.startswith("ros-"): - G.add_edge(pkg, r) - - # import matplotlib.pyplot as plt - # nx.draw(G, with_labels=True, font_weight='bold') - # plt.show() - - tg = list(reversed(list(nx.topological_sort(G)))) - - stages = [] - current_stage = [] - for pkg in tg: - reqs = requirements.get(pkg, []) - sort_in_stage = 0 - for r in reqs: - # sort up the stages, until first stage found where all requirements are fulfilled. - for sidx, stage in enumerate(stages): - if r in stages[sidx]: - sort_in_stage = max(sidx + 1, sort_in_stage) - - # if r in current_stage: - # stages.append(current_stage) - # current_stage = [] - if sort_in_stage >= len(stages): - stages.append([pkg]) - else: - stages[sort_in_stage].append(pkg) - # current_stage.append(pkg) - - if len(current_stage): - stages.append(current_stage) - elif len(metas) == 1: - fn_wo_yaml = os.path.splitext(os.path.basename(all_recipes[0]))[0] - stages = [[fn_wo_yaml]] - requirements = [fn_wo_yaml] - else: - stages = [] - requirements = [] - - - # filter out packages that we are not actually building - filtered_stages = [] - for stage in stages: - filtered = [pkg for pkg in stage if pkg in requirements] - if len(filtered): - filtered_stages.append(filtered) - - stages = batch_stages(filtered_stages) - print(stages) - +def build_linux(stages, trigger_branch): # Build Linux pipeline azure_template = {"pool": {"vmImage": "ubuntu-latest"}} @@ -400,20 +285,26 @@ def main(): # all packages skipped ... azure_stages.append(stage) - azure_template["trigger"] = [args.trigger_branch] + azure_template["trigger"] = [trigger_branch] azure_template["pr"] = "none" if azure_stages: azure_template["stages"] = azure_stages - if args.platform == "linux-64" and len(azure_stages): + if len(azure_stages): with open("linux.yml", "w") as fo: fo.write(yaml.dump(azure_template, sort_keys=False)) - # Build OSX pipeline - azure_template = {"pool": {"vmImage": "macOS-10.15"}} +def build_win(stages, trigger_branch): + # windows + azure_template = {"pool": {"vmImage": "vs2017-win2016"}} azure_stages = [] + global azure_win_script + if os.path.exists(".scripts/build_win.bat"): + with open(".scripts/build_win.bat", "r") as fi: + azure_win_script = literal_unicode(fi.read()) + stage_names = [] for i, s in enumerate(stages): stage_name = f"stage_{i}" @@ -425,15 +316,29 @@ def main(): stage["jobs"].append( { "job": f"stage_{i}_job_{len(stage['jobs'])}", + "variables": {"CONDA_BLD_PATH": "C:\\\\bld\\\\"}, "steps": [ { - "script": azure_osx_script, + "powershell": 'Write-Host "##vso[task.prependpath]$env:CONDA\\Scripts"', + "displayName": "Add conda to PATH" + }, + { + "script": 'conda install -c conda-forge --yes --quiet conda-build pip mamba ruamel.yaml anaconda-client', + "displayName": "Install conda-build, boa and activate environment" + }, + { + "script": azure_win_preconfig_script, + "displayName": "conda-forge build setup", + }, + { + "script": azure_win_script, "env": { "ANACONDA_API_TOKEN": "$(ANACONDA_API_TOKEN)", - "CURRENT_RECIPES": f"{' '.join([pkg for pkg in batch])}" + "CURRENT_RECIPES": f"{' '.join([pkg for pkg in batch])}", + "PYTHONUNBUFFERED": 1, }, "displayName": f"Build {' '.join([pkg for pkg in batch])}", - } + }, ], } ) @@ -442,16 +347,17 @@ def main(): # all packages skipped ... azure_stages.append(stage) - azure_template["trigger"] = [args.trigger_branch] + azure_template["trigger"] = [trigger_branch] azure_template["pr"] = "none" if azure_stages: azure_template["stages"] = azure_stages - if args.platform == "osx-64" and len(azure_stages): - with open("osx.yml", "w") as fo: + if len(azure_stages): + with open("win.yml", "w") as fo: fo.write(yaml.dump(azure_template, sort_keys=False)) - # Build OSX-arm64 pipeline +def build_osx(stages, trigger_branch): + # Build OSX pipeline azure_template = {"pool": {"vmImage": "macOS-10.15"}} azure_stages = [] @@ -469,7 +375,7 @@ def main(): "job": f"stage_{i}_job_{len(stage['jobs'])}", "steps": [ { - "script": azure_osx_arm64_script, + "script": azure_osx_script, "env": { "ANACONDA_API_TOKEN": "$(ANACONDA_API_TOKEN)", "CURRENT_RECIPES": f"{' '.join([pkg for pkg in batch])}" @@ -484,15 +390,16 @@ def main(): # all packages skipped ... azure_stages.append(stage) - azure_template["trigger"] = [args.trigger_branch] + azure_template["trigger"] = [trigger_branch] azure_template["pr"] = "none" if azure_stages: azure_template["stages"] = azure_stages - if args.platform == "osx-arm64" and len(azure_stages): - with open("osx_arm64.yml", "w") as fo: + if len(azure_stages): + with open("osx.yml", "w") as fo: fo.write(yaml.dump(azure_template, sort_keys=False)) +def build_linux_aarch64(stages, trigger_branch): # Build aarch64 pipeline azure_template = { "pool": { @@ -532,25 +439,20 @@ def main(): # all packages skipped ... azure_stages.append(stage) - azure_template["trigger"] = [args.trigger_branch] + azure_template["trigger"] = [trigger_branch] azure_template["pr"] = "none" if azure_stages: azure_template["stages"] = azure_stages - if args.platform == "linux-aarch64" and len(azure_stages): + if len(azure_stages): with open("linux_aarch64.yml", "w") as fo: fo.write(yaml.dump(azure_template, sort_keys=False)) - # windows - azure_template = {"pool": {"vmImage": "windows-2019"}} - +def build_osx_arm64(stages, trigger_branch): + # Build OSX-arm64 pipeline + azure_template = {"pool": {"vmImage": "macOS-10.15"}} azure_stages = [] - global azure_win_script - if os.path.exists(".scripts/build_win.bat"): - with open(".scripts/build_win.bat", "r") as fi: - azure_win_script = literal_unicode(fi.read()) - stage_names = [] for i, s in enumerate(stages): stage_name = f"stage_{i}" @@ -562,29 +464,15 @@ def main(): stage["jobs"].append( { "job": f"stage_{i}_job_{len(stage['jobs'])}", - "variables": {"CONDA_BLD_PATH": "C:\\\\bld\\\\"}, "steps": [ { - "powershell": 'Write-Host "##vso[task.prependpath]$env:CONDA\\Scripts"', - "displayName": "Add conda to PATH" - }, - { - "script": 'conda install -c conda-forge --yes --quiet conda-build pip mamba ruamel.yaml anaconda-client', - "displayName": "Install conda-build, boa and activate environment" - }, - { - "script": azure_win_preconfig_script, - "displayName": "conda-forge build setup", - }, - { - "script": azure_win_script, + "script": azure_osx_arm64_script, "env": { "ANACONDA_API_TOKEN": "$(ANACONDA_API_TOKEN)", - "CURRENT_RECIPES": f"{' '.join([pkg for pkg in batch])}", - "PYTHONUNBUFFERED": 1, + "CURRENT_RECIPES": f"{' '.join([pkg for pkg in batch])}" }, "displayName": f"Build {' '.join([pkg for pkg in batch])}", - }, + } ], } ) @@ -593,11 +481,212 @@ def main(): # all packages skipped ... azure_stages.append(stage) - azure_template["trigger"] = [args.trigger_branch] + azure_template["trigger"] = [trigger_branch] azure_template["pr"] = "none" if azure_stages: azure_template["stages"] = azure_stages - if args.platform.startswith("win") and len(azure_stages): - with open("win.yml", "w") as fo: + if len(azure_stages): + with open("osx_arm64.yml", "w") as fo: fo.write(yaml.dump(azure_template, sort_keys=False)) + + +def extend_graph(graph, arch='linux-64', distro='noetic'): + url = f"https://conda.anaconda.org/robostack/{arch}/repodata.json" + repodata = requests.get(url).json() + + latest = {} + ros_pkgs = set() + distro = "noetic" + ros_prefix = f"ros-{distro}" + packages = repodata.get("packages", {}) + + for pkey in packages: + if not pkey.startswith(ros_prefix): + continue + + pname = pkey.rsplit('-', 2)[0] + ros_pkgs.add(pname) + + for pkg in ros_pkgs: + current = current_version = None + for pkey in packages: + if packages[pkey]["name"] == pkg: + tmp = packages[pkey]["version"].split('.') + version = [] + for el in tmp: + if el.isdecimal(): + version.append(int(el)) + else: + x = re.search(r'[^0-9]', version).start() + version.append(int(el[:x])) + + version = tuple(version) + + if not current or version > current_version: + current_version = version + current = pkey + + latest[pkg] = current + + for pkg, pkgkey in latest.items(): + full_pkg = packages[pkgkey] + for dep in full_pkg.get("depends", []): + req = dep.split(' ')[0] + graph.add_node(pkg) + if req.startswith(ros_prefix): + graph.add_edge(pkg, req) + +def generate_pipeline(recipe_dir, platform, trigger_branch, sequential=False): + metas = [] + all_recipes = glob.glob(os.path.join(recipe_dir, "**", "*.yaml")) + for f in all_recipes: + with open(f) as fi: + metas.append(yaml.safe_load(fi.read())) + + if len(metas) >= 1: + requirements = {} + + for pkg in metas: + requirements[pkg["package"]["name"]] = ( + pkg["requirements"].get("host", []) + pkg["requirements"].get("run", []) + ) + + G = nx.DiGraph() + for pkg, reqs in requirements.items(): + G.add_node(pkg) + for r in reqs: + if not isinstance(r, str): + continue + if r.startswith("ros-"): + G.add_edge(pkg, r) + + extend_graph(G, arch=platform) + # import matplotlib.pyplot as plt + # nx.draw(G, with_labels=True, font_weight='bold') + # plt.show() + + tg = list(reversed(list(nx.topological_sort(G)))) + print("Fully sorted graph: ", tg) + + recipes = list(requirements.keys()) + tg = sorted(recipes, key=lambda x: tg.index(x)) + print("SUBGRAPH OF INTEREST: ", tg) + + # # sort out requirements that are not built in this run + # for pkg_name in tg: + # requirements[pkg_name] = [ + # r.split()[0] for r in reqs if (isinstance(r, str) and r in tg) + # ] + + stages = [] + current_stage = [] + for pkg in tg: + reqs = requirements.get(pkg, []) + sort_in_stage = 0 + for r in reqs: + # sort up the stages, until first stage found where all requirements are fulfilled. + for sidx, stage in enumerate(stages): + if r in stages[sidx]: + sort_in_stage = max(sidx + 1, sort_in_stage) + + # if r in current_stage: + # stages.append(current_stage) + # current_stage = [] + if sort_in_stage >= len(stages): + stages.append([pkg]) + else: + stages[sort_in_stage].append(pkg) + # current_stage.append(pkg) + + if len(current_stage): + stages.append(current_stage) + elif len(metas) == 1: + fn_wo_yaml = os.path.splitext(os.path.basename(all_recipes[0]))[0] + stages = [[fn_wo_yaml]] + requirements = [fn_wo_yaml] + else: + stages = [] + requirements = [] + + # filter out packages that we are not actually building + filtered_stages = [] + for stage in stages: + filtered = [pkg for pkg in stage if pkg in requirements] + if len(filtered): + filtered_stages.append(filtered) + + if sequential: + single_stage = [] + for s in filtered_stages: + single_stage.extend(s) + stages = [[single_stage]] + else: + stages = batch_stages(filtered_stages) + + if platform == "linux-64": + build_linux(stages, trigger_branch) + elif platform == "linux-aarch64": + build_linux_aarch64(stages, trigger_branch) + elif platform == "osx-64": + build_osx(stages, trigger_branch) + elif platform == "osx-arm64": + build_osx_arm64(stages, trigger_branch) + elif platform == "win-64": + build_win(stages, trigger_branch) + + +def parse_command_line(argv): + parser = argparse.ArgumentParser( + description="Conda recipe Azure pipeline generator for ROS packages" + ) + + default_dir = "./recipes" + parser.add_argument( + "-d", + "--dir", + dest="dir", + default=default_dir, + help="The recipes directory to process (default: {}).".format(default_dir), + ) + parser.add_argument( + "--sequential", + dest="sequential", + action="store_true", + help="Don't parallelize stages", + ) + parser.add_argument( + "-t", "--trigger-branch", dest="trigger_branch", help="Trigger branch for Azure" + ) + + parser.add_argument( + "-p", + "--platform", + dest="platform", + default="linux-64", + help="Platform to emit build pipeline for", + ) + + parser.add_argument( + "-a", "--additional-recipes", action="store_true", help="search for additional_recipes folder?") + + arguments = parser.parse_args(argv[1:]) + global parsed_args + parsed_args = arguments + return arguments + + +def main(): + + args = parse_command_line(sys.argv) + + metas = [] + + if not os.path.exists(args.dir): + print(f"{args.dir} not found. Not generating a pipeline.") + return + + if args.additional_recipes: + add_additional_recipes(args.dir, args.platform) + + generate_pipeline(args.dir, args.platform, args.trigger_branch, args.sequential) \ No newline at end of file diff --git a/vinca/generate_dot.py b/vinca/generate_dot.py new file mode 100644 index 0000000..a48e605 --- /dev/null +++ b/vinca/generate_dot.py @@ -0,0 +1,177 @@ +import networkx as nx +import yaml +import re +import glob +import sys, os +import textwrap +import argparse +from distutils.dir_util import copy_tree +import yaml +import requests + +parsed_args = None + + +def parse_command_line(argv): + parser = argparse.ArgumentParser( + description="Conda recipe Dot graphic generator for ROS packages" + ) + + default_dir = "./recipes" + parser.add_argument( + "-d", + "--dir", + dest="dir", + default=default_dir, + help="The recipes directory to process (default: {}).".format(default_dir), + ) + + parser.add_argument( + "-t", "--trigger-branch", dest="trigger_branch", help="Trigger branch for Azure" + ) + + parser.add_argument( + "-p", + "--platform", + dest="platform", + default="linux-64", + help="Platform to emit build pipeline for", + ) + + parser.add_argument( + "-a", "--additional-recipes", action="store_true", help="search for additional_recipes folder?") + + arguments = parser.parse_args(argv[1:]) + global parsed_args + parsed_args = arguments + return arguments + +def get_skip_existing(vinca_conf, platform): + fn = vinca_conf.get("skip_existing") + repodatas = [] + if fn is not None: + fns = list(fn) + else: + fns = [] + for fn in fns: + selected_bn = None + if "://" in fn: + fn += f"{platform}/repodata.json" + print(f"Fetching repodata: {fn}") + request = requests.get(fn) + + repodata = request.json() + repodatas.append(repodata) + else: + import json + with open(fn) as fi: + repodata = json.load(fi) + repodatas.append(repodata) + + return repodatas + +def add_additional_recipes(args): + additional_recipes_path = os.path.abspath(os.path.join(args.dir, '..', 'additional_recipes')) + + print("Searching additional recipes in ", additional_recipes_path) + + if not os.path.exists(additional_recipes_path): + return + + with open("vinca.yaml", "r") as vinca_yaml: + vinca_conf = yaml.safe_load(vinca_yaml) + + repodatas = get_skip_existing(vinca_conf, args.platform) + + for recipe_path in glob.glob(additional_recipes_path + '/**/recipe.yaml'): + with open(recipe_path) as recipe: + additional_recipe = yaml.safe_load(recipe) + + name, version, bnumber = (additional_recipe["package"]["name"], additional_recipe["package"]["version"], additional_recipe["build"]["number"]) + print("Checking if ", name, version, bnumber, " exists") + skip = False + for repo in repodatas: + for key, pkg in repo.get("packages", {}).items(): + if pkg["name"] == name and pkg["version"] == version and pkg["build_number"] == bnumber: + skip = True + print(f"{name}=={version}=={bnumber} already exists. Skipping.") + break + + if not skip: + print("Adding ", os.path.dirname(recipe_path)) + goal_folder = os.path.join(args.dir, name) + os.makedirs(goal_folder, exist_ok=True) + copy_tree(os.path.dirname(recipe_path), goal_folder) + + +def main(): + + args = parse_command_line(sys.argv) + + metas = [] + + if args.additional_recipes: + add_additional_recipes(args) + + if not os.path.exists(args.dir): + print(f"{args.dir} not found. Not generating a pipeline.") + + all_recipes = glob.glob(os.path.join(args.dir, "**", "*.yaml")) + for f in all_recipes: + with open(f) as fi: + metas.append(yaml.safe_load(fi.read())) + + if len(metas) >= 1: + requirements = {} + + for pkg in metas: + requirements[pkg["package"]["name"]] = ( + pkg["requirements"].get("host", []) + pkg["requirements"].get("run", []) + ) + + # sort out requirements that are not built in this run + for pkg_name, reqs in requirements.items(): + requirements[pkg_name] = [ + r.split()[0] for r in reqs if (isinstance(r, str) and r in reqs) + ] + print(requirements) + + G = nx.DiGraph() + for pkg, reqs in requirements.items(): + G.add_node(pkg) + for r in reqs: + if r.startswith("ros-"): + G.add_edge(pkg, r) + + import matplotlib.pyplot as plt + from networkx.drawing.nx_agraph import write_dot + + nx.draw(G, with_labels=True, font_weight='bold') + plt.show() + + write_dot(G, "grid.dot") + + tg = list(reversed(list(nx.topological_sort(G)))) + + stages = [] + current_stage = [] + for pkg in tg: + reqs = requirements.get(pkg, []) + sort_in_stage = 0 + for r in reqs: + # sort up the stages, until first stage found where all requirements are fulfilled. + for sidx, stage in enumerate(stages): + if r in stages[sidx]: + sort_in_stage = max(sidx + 1, sort_in_stage) + + # if r in current_stage: + # stages.append(current_stage) + # current_stage = [] + if sort_in_stage >= len(stages): + stages.append([pkg]) + else: + stages[sort_in_stage].append(pkg) + # current_stage.append(pkg) + + if len(current_stage): + stages.append(current_stage) diff --git a/vinca/main.py b/vinca/main.py index d1ed4aa..d8607de 100644 --- a/vinca/main.py +++ b/vinca/main.py @@ -87,6 +87,13 @@ def parse_command_line(argv): default=default_dir, help="The directory to process (default: {}).".format(default_dir), ) + parser.add_argument( + "-f", + "--file", + dest="file", + default="vinca.yaml", + help="The vinca file to process (default: vinca.yaml)", + ) parser.add_argument( "-s", "--skip", @@ -215,7 +222,8 @@ def generate_output(pkg_shortname, vinca_conf, distro, version, all_pkgs=[]): "{{ compiler('c') }}", "ninja", {"sel(unix)": "make"}, - {"sel(osx)": "tapi"}, + # let's figure out if we need this, was added for ROS2 + # {"sel(osx)": "tapi"}, "cmake", {"sel(build_platform != target_platform)": "python"}, {"sel(build_platform != target_platform)": "cross-python_{{ target_platform }}"}, @@ -738,7 +746,7 @@ def main(): arguments = parse_command_line(sys.argv) base_dir = os.path.abspath(arguments.dir) - vinca_yaml = os.path.join(base_dir, "vinca.yaml") + vinca_yaml = os.path.join(base_dir, arguments.file) vinca_conf = read_vinca_yaml(vinca_yaml) vinca_conf["_conda_indexes"] = get_conda_index(vinca_conf, base_dir) diff --git a/vinca/migrate.py b/vinca/migrate.py new file mode 100644 index 0000000..1d48c06 --- /dev/null +++ b/vinca/migrate.py @@ -0,0 +1,169 @@ +import yaml +import sys, os +import glob +import argparse +import requests +import networkx as nx +import subprocess +import shutil +from vinca.main import read_vinca_yaml +import ruamel.yaml + + +from vinca.distro import Distro + +distro_version = None +ros_prefix = None + +# arches = ["linux-64", "linux-aarch64", "win-64", "osx-64", "osx-arm64"] +# arch_to_fname = { +# "linux-64": "linux", +# "linux-aarch64": "linux_aarch_64", +# "win-64": "win", +# "osx-64": "osx", +# "osx-arm64": "osx_arm64" +# } + +def to_ros_name(distro, pkg_name): + shortname = pkg_name[len(ros_prefix) + 1:] + if distro.check_package(shortname): + return shortname + elif distro.check_package(shortname.replace('-', '_')): + return shortname.replace('-', '_') + else: + raise RuntimeError(f"Couldnt convert {pkg_name} to ROS pkg name") + +def create_migration_instructions(arch, packages_to_migrate, trigger_branch): + url = f"https://conda.anaconda.org/robostack/{arch}/repodata.json" + + yaml = ruamel.yaml.YAML() + with open("vinca.yaml", "r") as fi: + vinca_conf = yaml.load(fi) + + global distro_version, ros_prefix + distro_version = vinca_conf['ros_distro'] + ros_prefix = f"ros-{distro_version}" + + print("URL: ", url) + # return + repodata = requests.get(url).json() + packages = repodata["packages"] + to_migrate = set() + ros_pkgs = set() + for pkey in packages: + if not pkey.startswith(ros_prefix): + continue + + pname = pkey.rsplit('-', 2)[0] + ros_pkgs.add(pname) + + p = packages[pkey] + + for d in p.get("depends", []): + if d.split()[0] in packages_to_migrate: + # print(f"need to migrate {pkey}") + to_migrate.add(pname) + + latest = {} + for pkg in ros_pkgs: + current = current_version = None + for pkey in packages: + if packages[pkey]["name"] == pkg: + tmp = packages[pkey]["version"].split('.') + version = [] + for el in tmp: + if el.isdecimal(): + version.append(int(el)) + else: + x = re.search(r'[^0-9]', version).start() + version.append(int(el[:x])) + + version = tuple(version) + + if not current or version > current_version: + current_version = version + current = pkey + latest[pkg] = current + + # now we can build the graph ... + + G = nx.DiGraph() + for pkg, pkgkey in latest.items(): + full_pkg = packages[pkgkey] + for dep in full_pkg.get("depends", []): + req = dep.split(' ')[0] + G.add_node(pkg) + if req.startswith(ros_prefix): + G.add_edge(pkg, req) + + gsorted = nx.topological_sort(G) + gsorted = list(reversed([g for g in gsorted])) + + to_migrate = sorted(to_migrate, key=lambda x: gsorted.index(x)) + + print("Sorted to migrate: ", to_migrate) + + distro = Distro(distro_version) + # import IPython; IPython.embed() + + ros_names = [] + for pkg in to_migrate: + ros_names.append(to_ros_name(distro, pkg)) + print("Final names: ", ros_names) + + vinca_conf["packages_select_by_deps"] = ros_names + vinca_conf["skip_all_deps"] = True + vinca_conf["is_migration"] = True + + with open("vinca.yaml", "w") as fo: + yaml.dump(vinca_conf, fo) + + if os.path.exists("recipes"): + shutil.rmtree("recipes") + + subprocess.check_call(["vinca", "-f", "vinca.yaml", "--multiple", "--platform", arch]) + subprocess.check_call(["vinca-azure", "--platform", arch, "--trigger-branch", "buildbranch_linux", "-d", "./recipes", "--additional-recipes", "--sequential"]) + +def parse_command_line(argv): + parser = argparse.ArgumentParser( + description="Conda recipe Azure pipeline generator for ROS packages" + ) + + default_dir = "./recipes" + parser.add_argument( + "-d", + "--dir", + dest="dir", + default=default_dir, + help="The recipes directory to process (default: {}).".format(default_dir), + ) + + parser.add_argument( + "-t", "--trigger-branch", dest="trigger_branch", help="Trigger branch for Azure" + ) + + parser.add_argument( + "-p", + "--platform", + dest="platform", + default="linux-64", + help="Platform to emit build pipeline for", + ) + + parser.add_argument( + "-a", "--additional-recipes", action="store_true", help="search for additional_recipes folder?") + + arguments = parser.parse_args(argv[1:]) + global parsed_args + parsed_args = arguments + return arguments + + +def main(): + args = parse_command_line(sys.argv) + + mfile = os.path.join(args.dir + "/migration.yaml") + with open(mfile, "r") as fi: + migration = yaml.safe_load(fi) + print(migration) + create_migration_instructions(args.platform, migration.get('packages', []), args.trigger_branch) \ No newline at end of file