diff --git a/README.md b/README.md
index 1b193c1..9ae2f31 100644
--- a/README.md
+++ b/README.md
@@ -3,6 +3,8 @@ Ipython/Jupyter magic for inline D code in a python notebook
Liable to change. Depends on pyd (https://github.com/ariovistus/pyd), mergedict (https://pypi.python.org/pypi/mergedict) and dub (http://code.dlang.org)
+Example on colab: [
](https://colab.research.google.com/github/DlangScience/PydMagic/blob/master/examples/colab_example.ipynb)
+
To install, just enter
```
in [1]: %install_ext https://raw.githubusercontent.com/DlangScience/PydMagic/master/pyd_magic.py
diff --git a/examples/colab_example.ipynb b/examples/colab_example.ipynb
new file mode 100644
index 0000000..8beb519
--- /dev/null
+++ b/examples/colab_example.ipynb
@@ -0,0 +1,508 @@
+{
+ "nbformat": 4,
+ "nbformat_minor": 0,
+ "metadata": {
+ "colab": {
+ "provenance": []
+ },
+ "kernelspec": {
+ "name": "python3",
+ "display_name": "Python 3"
+ },
+ "language_info": {
+ "name": "python"
+ }
+ },
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "source": [
+ "# PydMagic examples"
+ ],
+ "metadata": {
+ "id": "SAwAlgrO7l_U"
+ }
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Install the ipython extension and dependencies"
+ ],
+ "metadata": {
+ "id": "y7Itd3EK_tcB"
+ }
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {
+ "id": "YAcDsg_xmEPd"
+ },
+ "outputs": [],
+ "source": [
+ "!curl -s https://raw.githubusercontent.com/Reavershark/PydMagic/2023-update-squash/pyd_magic.py > pyd_magic.py\n",
+ "!pip install mergedict pyd >/dev/null\n",
+ "!apt-get update >/dev/null && apt-get install -y ldc dub >/dev/null\n",
+ "%load_ext pyd_magic\n",
+ "!rm pyd_magic.py"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## General type conversion demo"
+ ],
+ "metadata": {
+ "id": "EofK5M_W7fkw"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "%%pyd\n",
+ "\n",
+ "import std;\n",
+ "\n",
+ "@pdef!() auto type_demo() {\n",
+ " return tuple(\n",
+ " \"Hello world!\",\n",
+ " true,\n",
+ " 1,\n",
+ " 5.iota.map!\"a*2\".array,\n",
+ " [\"a\": 1],\n",
+ " (int a) => a + 1\n",
+ " );\n",
+ "}"
+ ],
+ "metadata": {
+ "id": "FkDmUREyvXZE"
+ },
+ "execution_count": 2,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "print(type_demo())\n",
+ "print(type_demo()[-1](1)) # Call the lambda (the last tuple element)"
+ ],
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "xEqomDMJ8mPQ",
+ "outputId": "0f60b7c6-07d6-4a4f-b775-2df7d3d25fa2"
+ },
+ "execution_count": 3,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "('Hello world!', True, 1, [0, 2, 4, 6, 8], {'a': 1}, )\n",
+ "2\n"
+ ]
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ " ## Using custom python classes from D"
+ ],
+ "metadata": {
+ "id": "tPkmOtq1p6r7"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "class SomeClass:\n",
+ " a_field = 1\n",
+ " def test(self) -> int:\n",
+ " return 2"
+ ],
+ "metadata": {
+ "id": "gO4K0vYzs8M-"
+ },
+ "execution_count": 4,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "%%pyd\n",
+ "\n",
+ "@pdef!() auto py_class_demo() {\n",
+ " PydObject py_obj = py_eval(\"SomeClass()\", \"__main__\");\n",
+ " py_obj.method(\"test\"); // return value is discarded\n",
+ " py_obj.a_field = 10;\n",
+ " return py_obj;\n",
+ "}"
+ ],
+ "metadata": {
+ "id": "IKPSoNYwnDKS"
+ },
+ "execution_count": 5,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "some_class = py_class_demo()\n",
+ "print(some_class)\n",
+ "print(some_class.test())\n",
+ "print(some_class.a_field)"
+ ],
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "Gr8Sru_ftzEO",
+ "outputId": "52966ed2-8c8f-4ba2-a615-d2ac66c0d260"
+ },
+ "execution_count": 6,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "<__main__.SomeClass object at 0x7f4c603c0df0>\n",
+ "2\n",
+ "10\n"
+ ]
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Use D ranges in python"
+ ],
+ "metadata": {
+ "id": "9e0DcI9X5cIw"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "%%pyd\n",
+ "import std.range;\n",
+ "\n",
+ "struct FibonacciRange\n",
+ "{\n",
+ " // States of the Fibonacci generator\n",
+ " int a = 1, b = 1;\n",
+ "\n",
+ " // The fibonacci range never ends\n",
+ " enum empty = false;\n",
+ "\n",
+ " // Peek at the first element\n",
+ " int front() const @property\n",
+ " {\n",
+ " return a;\n",
+ " }\n",
+ "\n",
+ " // Remove the first element\n",
+ " void popFront()\n",
+ " {\n",
+ " auto t = a;\n",
+ " a = b;\n",
+ " b = t + b;\n",
+ " }\n",
+ "}\n",
+ "\n",
+ "static assert(isInputRange!FibonacciRange);\n",
+ "static assert(isInfinite!FibonacciRange);\n",
+ "\n",
+ "@pdef!() auto range_demo() {\n",
+ " // Can't return infinite ranges to python\n",
+ " return FibonacciRange().take(20);\n",
+ "}"
+ ],
+ "metadata": {
+ "id": "pRHE8cA15YOp"
+ },
+ "execution_count": 7,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "for el in range_demo():\n",
+ " print(el, end=\" \")"
+ ],
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "miNVvjJq5uDd",
+ "outputId": "6ae9fbc3-c8d9-4dea-b689-7c0ff8e860cd"
+ },
+ "execution_count": 8,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "1 1 2 3 5 8 13 21 34 55 89 144 233 377 610 987 1597 2584 4181 6765 "
+ ]
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Catching python exceptions in D"
+ ],
+ "metadata": {
+ "id": "Ta7wTmhJqF5S"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "%%pyd\n",
+ "\n",
+ "@pdef!() string py_exception_demo(PydObject some_lambda) {\n",
+ " bool caught_exception = false;\n",
+ " try {\n",
+ " some_lambda();\n",
+ " } catch (PythonException e) {\n",
+ " caught_exception = true;\n",
+ " }\n",
+ " return caught_exception ? \"Caught a PythonException\" : \"Didn't catch anything\";\n",
+ "}"
+ ],
+ "metadata": {
+ "id": "P9hNjJq0qKcR"
+ },
+ "execution_count": 9,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "def just_throw():\n",
+ " raise Exception('error')\n",
+ "\n",
+ "print(py_exception_demo(lambda: None))\n",
+ "print(py_exception_demo(lambda: just_throw()))"
+ ],
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "_fhs2WCyxhYE",
+ "outputId": "2adcd6f3-839f-4635-dc82-ecadf0248abb"
+ },
+ "execution_count": 10,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "Didn't catch anything\n",
+ "Caught a PythonException\n"
+ ]
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Catching D exceptions in python"
+ ],
+ "metadata": {
+ "id": "7_ZLHqfk0T8K"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "%%pyd\n",
+ "\n",
+ "@pdef!() void d_exception_demo() {\n",
+ " throw new Exception(\"message\");\n",
+ "}\n",
+ "\n",
+ "@pdef!() void print_first_n_lines(string msg, int n) {\n",
+ " import std : split, take, join;\n",
+ " auto print = py_eval(\"print\");\n",
+ " \n",
+ " string shortened = msg.split(\"\\n\").take(n).join(\"\\n\");\n",
+ " print(shortened);\n",
+ "}"
+ ],
+ "metadata": {
+ "id": "uYpxcE3F0Xbp"
+ },
+ "execution_count": 11,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "# Rewritten in d in the previous cell\n",
+ "#def print_first_n_lines(msg: str, n: int) -> None:\n",
+ "# print('\\n'.join(msg.split(\"\\n\")[0:n]))\n",
+ "\n",
+ "try:\n",
+ " d_exception_demo()\n",
+ "except Exception as e:\n",
+ " print(type(e))\n",
+ " print_first_n_lines(str(e), 15)"
+ ],
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "MEm46LmvqIoD",
+ "outputId": "7da7b8e3-5429-4200-adea-4b3c39500f16"
+ },
+ "execution_count": 12,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "\n",
+ "D Exception:\n",
+ "object.Exception@/root/.cache/ipython/pyd/_pyd_magic_0717fb6d456ecdb75f1a8e32f07192f4/_pyd_magic_0717fb6d456ecdb75f1a8e32f07192f4.d(14): message\n",
+ "----------------\n",
+ "??:? void _pyd_magic_0717fb6d456ecdb75f1a8e32f07192f4.d_exception_demo() [0x7f4c42ace936]\n",
+ "??:? void pyd.func_wrap.applyPyTupleToAlias!(_pyd_magic_0717fb6d456ecdb75f1a8e32f07192f4.d_exception_demo(), \"d_exception_demo\").applyPyTupleToAlias(deimos.python.object.PyObject*, deimos.python.object.PyObject*) [0x7f4c42ae491d]\n",
+ "??:? deimos.python.object.PyObject* pyd.func_wrap.pyApplyToAlias!(_pyd_magic_0717fb6d456ecdb75f1a8e32f07192f4.d_exception_demo(), \"d_exception_demo\").pyApplyToAlias(deimos.python.object.PyObject*, deimos.python.object.PyObject*) [0x7f4c42ae477c]\n",
+ "??:? deimos.python.object.PyObject* pyd.func_wrap.function_wrap!(_pyd_magic_0717fb6d456ecdb75f1a8e32f07192f4.d_exception_demo(), \"d_exception_demo\").func(deimos.python.object.PyObject*, deimos.python.object.PyObject*, deimos.python.object.PyObject*).__dgliteral4() [0x7f4c42ae4750]\n",
+ "??:? deimos.python.object.PyObject* pyd.exception.exception_catcher!(deimos.python.object.PyObject*).exception_catcher(deimos.python.object.PyObject* delegate()) [0x7f4c42acfd9c]\n",
+ "??:? extern (C) deimos.python.object.PyObject* pyd.func_wrap.function_wrap!(_pyd_magic_0717fb6d456ecdb75f1a8e32f07192f4.d_exception_demo(), \"d_exception_demo\").func(deimos.python.object.PyObject*, deimos.python.object.PyObject*, deimos.python.object.PyObject*) [0x7f4c42ae471b]\n",
+ "??:? [0x53aca8]\n",
+ "??:? _PyObject_MakeTpCall [0x62a4bd]\n",
+ "??:? _PyEval_EvalFrameDefault [0x5afc17]\n",
+ "??:? [0x5a9eb3]\n",
+ "??:? _PyEval_EvalCodeWithName [0x5aad9a]\n",
+ "??:? PyEval_EvalCode [0x6d01b6]\n"
+ ]
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Parsing formatted strings using std.format + benchmark"
+ ],
+ "metadata": {
+ "id": "uXTykQxndYf7"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "%%pyd --dub_args=\"--build=release-nobounds\"\n",
+ "\n",
+ "import std.typecons;\n",
+ "import std.format;\n",
+ "\n",
+ "@pdef!() Tuple!(int, int) parse_point_d(string s)\n",
+ "{\n",
+ " Tuple!(int, int) t;\n",
+ " s.formattedRead!\"{%d;%d}\"(t[0], t[1]);\n",
+ " return t;\n",
+ "}\n",
+ "\n",
+ "@pdef!() Tuple!(int, int)[] parse_point_list_d(string[] arr)\n",
+ "{\n",
+ " Tuple!(int, int)[] tuples;\n",
+ " tuples.length = arr.length;\n",
+ " \n",
+ " foreach (i, s; arr)\n",
+ " s.formattedRead!\"{%d;%d}\"(tuples[i][0], tuples[i][1]);\n",
+ "\n",
+ " return tuples;\n",
+ "}"
+ ],
+ "metadata": {
+ "id": "lT5IKl3VdX-z"
+ },
+ "execution_count": 13,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "def parse_point_py(s: str) -> tuple:\n",
+ " assert len(s) >= 5\n",
+ " s = s[1:-1]\n",
+ " s_split = s.split(';')\n",
+ " return (int(s_split[0]), int(s_split[1]))\n",
+ "\n",
+ "def parse_point_list_py(l: list) -> list:\n",
+ " return [parse_point_py(x) for x in l]"
+ ],
+ "metadata": {
+ "id": "LpyH4cPVfs9I"
+ },
+ "execution_count": 14,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "points = [\"{0;1}\", \"{2;3}\", \"{-1;0}\"]\n",
+ "\n",
+ "print(parse_point_list_d(points))\n",
+ "print(parse_point_list_py(points))\n",
+ "\n",
+ "from time import time\n",
+ "\n",
+ "points *= 1000\n",
+ "\n",
+ "start = time()\n",
+ "for i in range(10_000):\n",
+ " [parse_point_d(point) for point in points]\n",
+ "end = time()\n",
+ "print(\"parse_point_d:\", end - start)\n",
+ "\n",
+ "start = time()\n",
+ "for i in range(10_000):\n",
+ " [parse_point_py(point) for point in points]\n",
+ "end = time()\n",
+ "print(\"parse_point_py:\", end - start)\n",
+ "\n",
+ "start = time()\n",
+ "for i in range(10_000):\n",
+ " parse_point_list_d(points) # marginally faster\n",
+ "end = time()\n",
+ "print(\"parse_point_list_d:\", end - start)\n",
+ "\n",
+ "start = time()\n",
+ "for i in range(10_000):\n",
+ " parse_point_list_py(points)\n",
+ "end = time()\n",
+ "print(\"parse_point_list_py:\", end - start)"
+ ],
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "s80m1DdKhEsP",
+ "outputId": "9edf0388-5e03-4d7a-a4f4-87e2b1dd5020"
+ },
+ "execution_count": 15,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "[(0, 1), (2, 3), (-1, 0)]\n",
+ "[(0, 1), (2, 3), (-1, 0)]\n",
+ "parse_point_d: 24.900002479553223\n",
+ "parse_point_py: 21.717164516448975\n",
+ "parse_point_list_d: 16.407686948776245\n",
+ "parse_point_list_py: 21.314393758773804\n"
+ ]
+ }
+ ]
+ }
+ ]
+}
\ No newline at end of file
diff --git a/pyd_magic.py b/pyd_magic.py
index b96e736..83fa49b 100644
--- a/pyd_magic.py
+++ b/pyd_magic.py
@@ -1,26 +1,20 @@
-from IPython.core import magic_arguments
-from IPython.core.magic import cell_magic, magics_class, Magics
-from IPython.utils.path import get_ipython_cache_dir
-from IPython.utils import py3compat
import sys
-import os
-import io
-import time
-import imp
+if sys.version_info[0] < 3:
+ raise ImportError('Only python 3+ is supported.')
+
+import ast
+import hashlib
+import importlib.util
import json
+import os
import subprocess
-import shutil
-import ast
-
-from mergedict import ConfigDict
+import time
-try:
- import hashlib
-except ImportError:
- import md5 as hashlib
+from IPython.core import magic_arguments
+from IPython.core.magic import cell_magic, magics_class, Magics
+from IPython.paths import get_ipython_cache_dir
-from distutils.core import Distribution
-from distutils.command.build_ext import build_ext
+from mergedict import ConfigDict
import pyd.support
@@ -30,37 +24,22 @@ def __init__(self, shell):
super(PydMagics, self).__init__(shell)
self._reloads = {}
self._code_cache = {}
-
+
def _import_all(self, module):
for k,v in module.__dict__.items():
if not k.startswith('__'):
self.shell.push({k:v})
-
+
@magic_arguments.magic_arguments()
@magic_arguments.argument(
'-n', '--name',
help="Specify a name for the Pyd module."
)
- @magic_arguments.argument(
- '-I', '--include', action='append', default=[],
- help="Add a path to the list of include directories (can be specified "
- "multiple times)."
- )
@magic_arguments.argument(
'-f', '--force', action='store_true', default=False,
help="Force the compilation of a new module, even if the source has been "
"previously compiled."
)
- @magic_arguments.argument(
- '--compiler', action='store', default='dmd',
- help="Specify the D compiler to be used. Default is dmd"
- )
- @magic_arguments.argument(
- '--compiler_type', action='store', default='dmd',
- help="Specify the compiler type, as in dmd, gdc, ldc or sdc. Needed if "
- "you are using a non-standard compiler name e.g. dmd_HEAD for your"
- "own personal build of dmd from git master HEAD"
- )
@magic_arguments.argument(
'--pyd_version', action='store', default='>=0.9.7',
help="Specify the pyd version to use, as a dub version specifier "
@@ -83,136 +62,219 @@ def _import_all(self, module):
help="Print the output from the compilation process, even if compilation "
"runs sucessfully"
)
-
+
@cell_magic
def pyd(self, line, cell):
-
+ # Read all arguments to %%pyd
args = magic_arguments.parse_argstring(self.pyd, line)
- code = 'import ppyd;\n\n\
- extern(C) void PydMain()\n{\n \
- registerAll!(Alias!(__traits(parent, PydMain)))();\n\
- }\n\n'\
- + cell
- code = code if code.endswith('\n') else code+'\n'
-
- key = code, line, sys.version_info, sys.executable
-
- try:
- args.dub_config = json.loads(args.dub_config)
- except:
- args.dub_config = json.loads(ast.literal_eval(args.dub_config))
- pass
-
- try:
- args.dub_args = ast.literal_eval(args.dub_args)
- except:
- pass
-
- if args.force:
- # Force a new module name by adding the current time to the
- # key which is hashed to determine the module name.
- key += (time.time(),)
- args.dub_args = '--force ' + args.dub_args
-
- if args.name:
- module_name = py3compat.unicode_to_str(args.name)
- else:
- module_name = "_pyd_magic_" + hashlib.md5(str(key).encode('utf-8')).hexdigest()
-
- lib_dir = os.path.join(get_ipython_cache_dir(), 'pyd', module_name)
-
- if not os.path.exists(lib_dir):
- os.makedirs(lib_dir)
-
- if os.name == 'nt':
- so_ext = '.dll'
- else:
- so_ext = '.so' #might have to go to dylib on OS X at some point???
- module_path = os.path.join(lib_dir, 'lib' + module_name + so_ext)
-
- have_module = os.path.isfile(module_path)
- need_pydize = not have_module
-
- if need_pydize:
- d_include_dirs = args.include
- pyd_file = os.path.join(lib_dir, module_name + '.d')
- pyd_file = py3compat.cast_bytes_py2(pyd_file, encoding=sys.getfilesystemencoding())
- with io.open(pyd_file, 'w', encoding='utf-8') as f:
- f.write(code)
-
- pyd_dub_file = os.path.join(lib_dir, 'dub.json')
- pyd_dub_file = py3compat.cast_bytes_py2(pyd_dub_file, encoding=sys.getfilesystemencoding())
- pyd_dub_selections_file = os.path.join(lib_dir, 'dub.selections.json')
- pyd_dub_selections_file = py3compat.cast_bytes_py2(pyd_dub_selections_file, encoding=sys.getfilesystemencoding())
-
-
- pyd_dub_json = json.loads('{}')
- pyd_dub_json['name'] = module_name
- pyd_dub_json['dependencies'] = { "pyd": args.pyd_version, "ppyd": ">=0.1.3" }
- pyd_dub_json['subConfigurations'] = { "pyd": "python{0}{1}".format(sys.version_info.major, sys.version_info.minor) }
- pyd_dub_json['sourceFiles'] = [pyd_file]
- pyd_dub_json['targetType'] = 'dynamicLibrary'
- pyd_dub_json['dflags'] = ['-fPIC']
- pyd_dub_json['libs'] = ['phobos2']
- pyd_dub_json['versions'] = ['PydPythonExtension']
-
- with io.open(pyd_dub_file, 'w', encoding='utf-8') as f:
- f.write(unicode(json.dumps(pyd_dub_json)+'\n', encoding='utf-8'))
- try:
- os.remove(pyd_dub_selections_file)
- except:
- pass
-
- dub_desc = json.loads(subprocess.check_output(["dub", "describe", "--root=" + lib_dir], universal_newlines = True))
- for pack in dub_desc['packages']:
- if pack['name'] == 'pyd':
- _infraDir = os.path.join(pack['path'], 'infrastructure')
- break
+ # Construct a single d source code file
+ def construct_d_source_code() -> str:
+ d_code = """
+import pyd.pyd; // Imports everything in pyd except pyd.embedded
+import pyd.embedded;
+import ppyd; // For @pdef
+
+extern(C) void PydMain()
+{
+ import std.meta : Alias;
+ registerAll!(Alias!(__traits(parent, PydMain)))();
+}
+
+"""
+ d_code += cell
+
+ if not d_code.endswith('\n'):
+ d_code + '\n'
+
+ return d_code
+
+ # Parse --dub_config
+ def parse_dub_config() -> None:
+ if args.dub_config:
+ try:
+ args.dub_config = json.loads(args.dub_config)
+ except e1:
+ try:
+ args.dub_config = json.loads(ast.literal_eval(args.dub_config))
+ except e2:
+ print('Failed to parse --dub_config:')
+ print(f' json.loads method error: {e1}')
+ print(f' ast.literal_eval method error: {e2}')
+ raise e2
+
+ # Parse --dub_args
+ def parse_dub_args() -> None:
+ if args.dub_args:
+ try:
+ args.dub_args = ast.literal_eval(args.dub_args)
+ except e:
+ print("Failed to parse --dub_args: {e}")
+ raise e
+
+ # Handle --force
+ def handle_force() -> bool:
+ if args.force:
+ args.dub_args = '--force ' + args.dub_args
+ return True
+ return False
+
+ def get_module_name_from_config_tuple(config_tuple: tuple):
+ if args.name:
+ return args.name
+ else:
+ return "_pyd_magic_" + hashlib.md5(str(tuple_unique_to_this_config).encode('utf-8')).hexdigest()
+
+ # Choose and create the module dir (build folder)
+ def get_and_create_module_dir(module_name: str) -> str:
+ module_dir = os.path.join(get_ipython_cache_dir(), 'pyd', module_name)
+ if not os.path.exists(module_dir):
+ os.makedirs(module_dir)
+ return module_dir
+
+ # Choose the path of the dynamic library to be built
+ def get_module_so_path(module_dir: str) -> str:
if os.name == 'nt':
- boilerplatePath = os.path.join(_infraDir, 'd',
- 'python_dll_windows_boilerplate.d'
- )
+ so_ext = '.dll'
else:
- boilerplatePath = os.path.join(_infraDir, 'd',
- 'python_so_linux_boilerplate.d'
- )
- pyd_dub_json['sourceFiles'].append(boilerplatePath)
-
- if args.compiler == 'dmd':
- so_ctor_path = os.path.join(_infraDir, 'd', 'so_ctor.c')
- so_ctor_object_path = os.path.join(lib_dir, "so_ctor.o")
- subprocess.check_call(['cc', "-c", "-fPIC", "-o" + so_ctor_object_path, so_ctor_path])
- pyd_dub_json['sourceFiles'].append(so_ctor_object_path)
-
- mainTemplate = os.path.join(_infraDir, 'd', 'pydmain_template.d')
- mainTemplate = py3compat.cast_bytes_py2(mainTemplate, encoding=sys.getfilesystemencoding())
- mainTemplateOut = os.path.join(lib_dir, 'pydmain.d')
- mainTemplateOut = py3compat.cast_bytes_py2(mainTemplateOut, encoding=sys.getfilesystemencoding())
- with io.open(mainTemplate, 'r', encoding='utf-8') as t, io.open(mainTemplateOut, 'w', encoding='utf-8') as m:
- m.write(t.read() % {'modulename' : module_name})
- pyd_dub_json['sourceFiles'].append(mainTemplateOut)
-
- pyd_dub_json = ConfigDict(pyd_dub_json)
- pyd_dub_json.merge(args.dub_config)
-
- with io.open(pyd_dub_file, 'w', encoding='utf-8') as f:
- f.write(unicode(json.dumps(pyd_dub_json)+'\n', encoding='utf-8'))
-
- try:
- output = subprocess.check_output(["dub", "build", "--root=" + lib_dir] + args.dub_args.split(' '),
- universal_newlines=True, stderr=subprocess.STDOUT)
- except (subprocess.CalledProcessError) as e:
- print(e.output)
- raise e
- if args.print_compiler_output:
- print(output)
-
- if not have_module:
- self._code_cache[key] = module_name
-
- module = imp.load_dynamic(module_name, module_path)
+ so_ext = '.so' #might have to go to dylib on OS X at some point???
+ return os.path.join(module_dir, 'lib' + module_name + so_ext)
+
+ # Used for choosing the name of the module/build folder by hashing the tuple
+ tuple_unique_to_this_config = ()
+
+ # Rebuild if the execution environment changes significantly
+ tuple_unique_to_this_config += (sys.version_info, sys.executable)
+
+ # Rebuild if the args to %%pyd change
+ tuple_unique_to_this_config += (line,)
+
+ d_code = construct_d_source_code()
+
+ # Rebuild if the source code changes
+ tuple_unique_to_this_config += (d_code,)
+
+ parse_dub_config()
+ parse_dub_args()
+
+ if handle_force():
+ # Force rebuild by appending the current time to the config tuple
+ tuple_unique_to_this_config += (time.time(),)
+
+ module_name = get_module_name_from_config_tuple(tuple_unique_to_this_config)
+ module_dir = get_and_create_module_dir(module_name=module_name)
+ module_path = get_module_so_path(module_dir=module_dir)
+
+ was_already_built = os.path.isfile(module_path)
+
+ if not was_already_built: # Build module
+ def write_source_code_file() -> None:
+ pyd_file = os.path.join(module_dir, f'{module_name}.d')
+ with open(pyd_file, 'w', encoding='utf-8') as f:
+ f.write(d_code)
+
+ def generate_dub_json_basic() -> dict:
+ pyd_dub_json = {}
+ pyd_dub_json['name'] = module_name
+ pyd_dub_json['dependencies'] = { "pyd": args.pyd_version, "ppyd": ">=0.1.3" }
+ pyd_dub_json['subConfigurations'] = { "pyd": f"python{sys.version_info.major}{sys.version_info.minor}" }
+ pyd_dub_json['sourceFiles'] = [f'{module_name}.d']
+ pyd_dub_json['targetType'] = 'dynamicLibrary'
+ #pyd_dub_json['dflags-dmd'] = ['-fPIC']
+ #pyd_dub_json['dflags-ldc'] = ['--relocation-model=pic']
+ pyd_dub_json['libs'] = []
+ pyd_dub_json['versions'] = ['PydPythonExtension']
+ return pyd_dub_json
+
+ def write_dub_json(json_val: dict) -> None:
+ pyd_dub_file = os.path.join(module_dir, 'dub.json')
+
+ with open(pyd_dub_file, 'w', encoding='utf-8') as f:
+ f.write(json.dumps(json_val) + '\n')
+
+ def remove_sub_selections_json() -> None:
+ pyd_dub_selections_file = os.path.join(module_dir, 'dub.selections.json')
+
+ try:
+ os.remove(pyd_dub_selections_file)
+ except:
+ pass
+
+ write_source_code_file()
+ write_dub_json(generate_dub_json_basic())
+ remove_sub_selections_json()
+
+ def get_pyd_infrastructure_path() -> str:
+ dub_desc = json.loads(subprocess.check_output(['dub', 'describe', f'--root={module_dir}'], universal_newlines = True))
+ for package in dub_desc['packages']:
+ if package['name'] == 'pyd':
+ return os.path.join(package['path'], 'infrastructure')
+ raise Exception("Package pyd not found in dub describe output")
+
+ def get_boilerplate_source_file(pyd_infrastructure_path: str) -> str:
+ if os.name == 'nt':
+ boilerplate_file_name = 'python_dll_windows_boilerplate.d'
+ else:
+ boilerplate_file_name = 'python_so_linux_boilerplate.d'
+ return os.path.join(pyd_infrastructure_path, 'd', boilerplate_file_name)
+
+ def generate_pydmain_source_file(pyd_infrastructure_path: str) -> str:
+ template = os.path.join(pyd_infrastructure_path, 'd', 'pydmain_template.d')
+ template_out = os.path.join(module_dir, 'pydmain.d')
+ with open(template, 'r', encoding='utf-8') as infile, open(template_out, 'w', encoding='utf-8') as outfile:
+ outfile.write(infile.read() % {'modulename' : module_name})
+ return template_out
+
+ def generate_so_ctor_object_file(pyd_infrastructure_path: str) -> str:
+ so_ctor_source_file_path = os.path.join(pyd_infrastructure_path, 'd', 'so_ctor.c')
+ so_ctor_object_file_path = os.path.join(module_dir, "so_ctor.o")
+ subprocess.check_call(['cc', '-c', '-fPIC', '-o', so_ctor_object_file_path, so_ctor_source_file_path])
+ return so_ctor_object_file_path
+
+ def generate_dub_json_extended() -> ConfigDict:
+ pyd_dub_json = generate_dub_json_basic()
+
+ pyd_infrastructure_path = get_pyd_infrastructure_path()
+
+ boilerplate_source_file_path = get_boilerplate_source_file(pyd_infrastructure_path=pyd_infrastructure_path)
+ pyd_dub_json['sourceFiles'].append(boilerplate_source_file_path)
+
+ so_ctor_object_file_path = generate_so_ctor_object_file(pyd_infrastructure_path=pyd_infrastructure_path)
+ pyd_dub_json['sourceFiles'].append(so_ctor_object_file_path)
+
+ pydmain_source_file_path = generate_pydmain_source_file(pyd_infrastructure_path=pyd_infrastructure_path)
+ pyd_dub_json['sourceFiles'].append(pydmain_source_file_path)
+
+ pyd_dub_json = ConfigDict(pyd_dub_json)
+ pyd_dub_json.merge(args.dub_config)
+
+ return pyd_dub_json
+
+ write_dub_json(generate_dub_json_extended())
+
+ def build_module() -> None:
+ try:
+ output = subprocess.check_output(
+ ['dub', 'build', f'--root={module_dir}', *args.dub_args.split(None)],
+ universal_newlines=True, stderr=subprocess.STDOUT
+ )
+ except (subprocess.CalledProcessError) as e:
+ print('Error encountered while building:')
+ print(e.output)
+ raise e
+ if args.print_compiler_output:
+ print('Build output:')
+ print(output)
+
+ build_module()
+
+ if not was_already_built:
+ self._code_cache[tuple_unique_to_this_config] = module_name
+
+ # Import the module
+ module_spec = importlib.util.spec_from_file_location(module_name, module_path)
+ module = importlib.util.module_from_spec(module_spec)
self._import_all(module)
-
+
def load_ipython_extension(ip):
ip.register_magics(PydMagics)