diff --git a/Dockerfile b/Dockerfile index 3f9c5af12..19c02b3b9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -41,7 +41,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ < /etc/apt/sources.list.d/deadsnakes.list @@ -58,6 +58,11 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ libgdal-dev nginx certbot gettext-base cron postgresql-client gettext tzdata # Create virtualenv python$PYTHON_VERSION -m venv $WORKDIR/venv + # Build entwine + mkdir /staging && cd /staging + git clone -b 290 https://github.com/OpenDroneMap/entwine && cd entwine + mkdir build && cd build && cmake .. -DWITH_TESTS=OFF -DWITH_ZSTD=OFF -DCMAKE_INSTALL_PREFIX=/staging/entwine/build/install && make -j6 && make install + cd /webodm EOT # Modify PATH to prioritize venv, effectively activating venv @@ -147,6 +152,8 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ rm -rf /tmp/* /var/tmp/* EOT +COPY --from=build /staging/entwine/build/install/bin/entwine /usr/bin/entwine +COPY --from=build /staging/entwine/build/install/lib/libentwine* /usr/lib/ COPY --from=build $WORKDIR ./ VOLUME /webodm/app/media diff --git a/app/admin.py b/app/admin.py index 565c457cc..5b2d074f3 100644 --- a/app/admin.py +++ b/app/admin.py @@ -17,6 +17,7 @@ from app.models import Preset from app.models import Plugin from app.models import Profile +from app.models import Redirect from app.plugins import get_plugin_by_name, enable_plugin, disable_plugin, delete_plugin, valid_plugin, \ get_plugins_persistent_path, clear_plugins_cache, init_plugins from .models import Project, Task, Setting, Theme @@ -96,6 +97,9 @@ class ThemeAdmin(admin.ModelAdmin): admin.site.register(Theme, ThemeAdmin) admin.site.register(PluginDatum, admin.ModelAdmin) +if settings.CLUSTER_ID is not None: + admin.site.register(Redirect, admin.ModelAdmin) + class PluginAdmin(admin.ModelAdmin): list_display = ("name", "description", "version", "author", "enabled", "plugin_actions") diff --git a/app/management/commands/cluster.py b/app/management/commands/cluster.py index a683bfdac..f8c87ff19 100644 --- a/app/management/commands/cluster.py +++ b/app/management/commands/cluster.py @@ -1,42 +1,425 @@ import os import json import math +import shutil from django.core.management.base import BaseCommand from django.core.management import call_command -from app.models import Project +from app.models import Project, Task, Preset, PluginDatum, Redirect from webodm import settings from django.db import connection +from django.contrib.auth.models import User +from django.core import serializers +from guardian.shortcuts import get_users_with_perms +from django.db import transaction + +class DryRunException(Exception): + pass + +def die(msg): + print(msg) + exit(1) + +def serialize(obj): + data = json.loads(serializers.serialize("json", [obj]))[0] + f = data['fields'] + + model = data['model'] + if model == "app.project": + f['owner'] = "_" + data['permissions'] = [] + perms = get_users_with_perms(obj, attach_perms=True, with_group_users=False) + for user in perms: + if user.id == obj.owner.id: + data['permissions'] = perms[user] + assert len(data['permissions']) == 4, "Default permissions, other combinations should never happen" + else: + print(f"Warning! Permissions for [{obj.name}] ({obj.id}) are related to \"{user.username}\" which is not going to be exported.") + elif model == "app.task": + f['processing_node'] = None + elif model == "app.profile": + data['pk'] = None + f['user'] = '_' + elif model == "auth.user": + data['pk'] = None + elif model == "app.preset": + data['pk'] = None + f['owner'] = '_' + elif model == "app.plugindatum": + data['pk'] = None + f['user'] = '_' + else: + raise Exception("Unknown model: %s" % model) + + return data + +def deserialize(data, username=None, user=None): + model = data['model'] + f = data['fields'] + + if model != 'auth.user': + if user is None: + raise Exception("user expected") + + if model == 'auth.user': + if username is None: + raise Exception("username expected") + if f['username'] != username: + print("Importing exported user %s as %s" % (f['username'], username)) + f['username'] = username + elif model == 'app.profile': + data['pk'] = user.profile.id + f['user'] = int(user.id) + elif model == 'app.preset': + f['owner'] = int(user.id) + elif model == 'app.plugindatum': + f['user'] = int(user.id) + elif model == 'app.project': + f['owner'] = int(user.id) + elif model == 'app.task': + pass + else: + raise Exception("Unknown model: %s" % model) + + obj = next(serializers.deserialize("json", json.dumps([data]))) + + return obj + + +def importexport_user(action, username, dry_run=False, cluster_export_dir=None, merge=False): + if action != "import" and action != "export": + die("Invalid action") + + if dry_run: + print("!!! Dry run !!!") + + def make_dir(d): + if not os.path.isdir(d): + print("Creating %s" % d) + if not dry_run: + os.makedirs(d) + else: + print("Dir exists: %s" % d) + + def remove_dir(d): + if os.path.isdir(d): + print("Removing %s" % d) + if not dry_run: + shutil.rmtree(d) + + def list_safe(d): + if os.path.isdir(d): + return os.listdir(d) + else: + return [] + + def copy_dir(src, dst): + if os.path.isdir(src): + print("Copying %s --> %s" % (src, dst)) + if not dry_run: + shutil.copytree(src, dst) + else: + print("Skipping %s (does not exist)" % src) + + def move_dir(src, dst, check_dst=True): + if os.path.isdir(src): + print("Moving %s --> %s" % (src, dst)) + if not dry_run: + shutil.move(src, dst) + else: + print("Skipping %s (does not exist)" % src) + + + if cluster_export_dir is None: + cluster_export_dir = os.path.join(settings.MEDIA_ROOT, "cluster_migrations") + + media_project_dir = os.path.join(settings.MEDIA_ROOT, "project") + print("Cluster export directory: %s" % cluster_export_dir) + print("Media project directory: %s" % media_project_dir) + + if action == "export": + print("Exporting") + try: + user = User.objects.get(username=username) + except User.DoesNotExist: + die("User does not exist") + + print("User: %s" % user.username) + user_export_dir = os.path.join(cluster_export_dir, str(user.username)) + projects_export_dir = os.path.join(user_export_dir, "projects") + + make_dir(cluster_export_dir) + make_dir(user_export_dir) + make_dir(projects_export_dir) + + print("User export directory: %s" % user_export_dir) + print("Projects export directory: %s" % projects_export_dir) + + # Get list of projects for this user + user_projects = Project.objects.filter(owner=user).order_by('created_at') + user_tasks = Task.objects.filter(project__owner=user).order_by('created_at') + user_presets = Preset.objects.filter(owner=user, system=False).order_by('created_at') + user_plugindatum = PluginDatum.objects.filter(user=user).order_by('id') + + print("Total projects: %s" % len(user_projects)) + print([p.id for p in user_projects]) + + print("Total tasks: %s" % len(user_tasks)) + print("Total presets: %s" % len(user_presets)) + print("Total plugin data: %s" % len(user_plugindatum)) + + + if len(list_safe(projects_export_dir)) > 0: + print("Export directory not empty, removing/recreating") + remove_dir(projects_export_dir) + make_dir(projects_export_dir) + + db = { + 'version': settings.VERSION, + 'projects': [serialize(p) for p in user_projects], + 'tasks': [serialize(t) for t in user_tasks], + 'profile': serialize(user.profile), + 'user': serialize(user), + 'presets': [serialize(p) for p in user_presets], + 'plugin_datum': [serialize(pd) for pd in user_plugindatum] + } + + db_dump_file = os.path.join(user_export_dir, "db.json") + db_dump = json.dumps(db) + + print("Writing %s" % db_dump_file) + if not dry_run: + with open(db_dump_file, "w", encoding="utf-8") as f: + f.write(db_dump) + + # Copy all project folders (note some do not exist) + for p in user_projects: + copy_dir(p.get_project_dir(), os.path.join(projects_export_dir, str(p.id))) + + elif action == "import": + print("Importing") + username = username.replace("..", "").replace("/", "") + try: + user = User.objects.get(username=username) + except User.DoesNotExist: + user = None + + print("User: %s%s" % (username, " [EXISTS]" if user is not None else " [NEW]")) + if user is not None and not merge: + die("Pass --merge to attempt to merge imported results with an existing user") + + user_import_dir = os.path.join(cluster_export_dir, username) + projects_import_dir = os.path.join(user_import_dir, "projects") + + for d in [cluster_export_dir, user_import_dir]: + if not os.path.isdir(d): + die("%s does not exist" % d) + + db_dump_file = os.path.join(user_import_dir, "db.json") + with open(db_dump_file, "r", encoding="utf-8") as f: + db = json.loads(f.read()) + + print("Version: %s" % db['version']) + print("Projects: %s" % len(db['projects'])) + print("Tasks: %s" % len(db['tasks'])) + print("Presets: %s" % len(db['presets'])) + print("Plugin Datum: %s" % len(db['plugin_datum'])) + for k in ['profile', 'user']: + if k in db: + print("%s: yes" % k.capitalize()) + else: + die("Missing key '%s'" % k) + + if db['version'] != settings.VERSION: + die("Version mismatch: %s vs %s" % (db['version'], settings.VERSION)) + + # Validate project directories + project_ids = list_safe(projects_import_dir) + print("Project folders: %s" % len(project_ids)) + + for pid in project_ids: + if os.path.isdir(os.path.join(media_project_dir, pid)): + print("Cannot import project %s because it conflicts with an existing project directory in %s" % (pid, media_project_dir)) + if not dry_run: + exit(1) + + # User + + imp_user = deserialize(db['user'], username=username) + + if user is not None: + assert user.pk == imp_user.object.pk + assert user.username == imp_user.object.username + else: + assert User.objects.filter(pk=imp_user.object.pk).count() == 0 + + try: + with transaction.atomic(): + print("Importing user") + imp_user.save() + if user is None: + user = User.objects.get(pk=imp_user.object.pk) + + print("Importing profile") + profile = deserialize(db['profile'], user=user) + profile.save() + + existing_presets = Preset.objects.filter(owner=user, system=False) + if existing_presets.count() > 0: + print("Deleting %s existing presets" % existing_presets.count()) + existing_presets.delete() + + print("Importing presets") + for preset in db['presets']: + p = deserialize(preset, user=user) + p.save() + + + existing_pd = PluginDatum.objects.filter(user=user) + if existing_pd.count() > 0: + print("Deleting %s existing plugin datum" % existing_pd.count()) + existing_pd.delete() + + print("Importing plugin datum") + for pd in db['plugin_datum']: + pd = deserialize(pd, user=user) + pd.save() + + print("Importing projects") + for project in db['projects']: + try: + existing_project = Project.objects.get(pk=project['pk']) + if existing_project.owner.username == user.username: + print("Overriding existing project") + else: + print("Cannot import project %s because a project with the same ID already exists and is owned by a different user (%s)" % (project['pk'], existing_project.owner.username)) + raise Exception("Project import failed") + except Project.DoesNotExist: + pass + + permissions = project['permissions'] + del project['permissions'] + + p = deserialize(project, user=user) + p.save() + p = Project.objects.get(pk=p.object.pk) + + # Quick check, the owner should have default permissions + # to the project + for perm in permissions: + assert user.has_perm(perm, p) + + print("[%s] (%s)" % (str(p), p.id)) + + print("Importing tasks") + for task in db['tasks']: + t = deserialize(task, user=user) + t.save() + + print("%s (%s)" % (str(t.object), t.object.project.id)) + + if dry_run: + raise DryRunException() + except DryRunException: + print("Dry run, rolling back") + + # Move projects from import folder + for pid in project_ids: + src = os.path.join(projects_import_dir, pid) + dst = os.path.join(media_project_dir, pid) + move_dir(src, dst) + + # Cleanup + remove_dir(user_import_dir) + +def redirect(username, to_cluster, dry_run=False, delete_projects_tasks=False): + if settings.CLUSTER_ID == to_cluster: + die("Cannot redirect to itself (this server's cluster ID is %s)" % to_cluster) + + try: + user = User.objects.get(username=username) + except User.DoesNotExist: + die("User does not exist") + + try: + with transaction.atomic(): + user.profile.cluster_id = to_cluster + user.profile.save() + + print ("Saved user profile cluster ID (%s)" % to_cluster) + + if dry_run: + raise DryRunException() + except DryRunException: + print("Dry run, rolling back") + + if delete_projects_tasks: + print("Setting up redirects for: %s" % user.username) + user_projects = Project.objects.filter(owner=user).order_by('created_at') + user_tasks = Task.objects.filter(project__owner=user).order_by('created_at') + + print("Target cluster: %s" % to_cluster) + print("Projects: %s" % len(user_projects)) + print("Tasks: %s" % len(user_tasks)) + + try: + count = 0 + with transaction.atomic(): + for p in user_projects: + Redirect.objects.create(project_id=p.id, project_public_id=p.public_id, cluster_id=to_cluster) + count += 1 + for p in user_tasks: + Redirect.objects.create(task_id=p.id, cluster_id=to_cluster) + count += 1 + + print("Setup %s redirects" % count) + + for p in user_projects: + if not dry_run: + try: + p.delete() + except Exception as e: + print("Cannot delete project %s: %s" % (p.id, str(e))) + else: + print("Deleting project %s (dry run)" % (p.id)) + + if dry_run: + raise DryRunException() + except DryRunException: + print("Dry run, rolling back") + class Command(BaseCommand): requires_system_checks = [] def add_arguments(self, parser): - parser.add_argument("action", type=str, choices=['stagger', 'getref']) + parser.add_argument("action", type=str, choices=['stagger', 'getref', 'export', 'import', 'redirect', 'delete']) parser.add_argument("--refs", required=False, help="JSON array of reference dictionaries") parser.add_argument("--id-buffer", required=False, default=1000, help="ID increment buffer when assigning next seq IDs") parser.add_argument("--dry-run", required=False, action="store_true", help="Don't actually modify tables, just test") - - + parser.add_argument("--user", required=False, default=None, help="User ID to migrate") + parser.add_argument("--cluster-export-dir", required=False, default=None, help="Override default export cluster dir") + parser.add_argument("--merge", required=False, action="store_true", help="Try to merge imported results for a user if the user already exist") + parser.add_argument("--delete", required=False, action="store_true", help="Permanently delete user projects and tasks after setting up redirect") + parser.add_argument("--to-cluster", required=False, default=-1, help="Cluster ID to redirect to") + super(Command, self).add_arguments(parser) def handle(self, **options): - if settings.CLUSTER_ID is None: - print("CLUSTER_ID is not set") - exit(1) - dry_run = options.get('dry_run', False) + action = options.get('action') + + if action == 'stagger': + if settings.CLUSTER_ID is None: + die("CLUSTER_ID is not set") - if options.get('action') == 'stagger': refs = json.loads(options.get('refs')) id_buffer = int(options.get('id_buffer')) if not isinstance(refs, list): - print("Invalid refs, must be an array") - exit(1) + die("Invalid refs, must be an array") if len(refs) <= 1: - print("Invalid refs, must have 2 or more items") - exit(1) + die("Invalid refs, must have 2 or more items") max_project_id = max([r['next_project_id'] for r in refs]) start_project_id = max_project_id + id_buffer @@ -59,7 +442,10 @@ def handle(self, **options): print("Dry run, not executing") - elif options.get('action') == 'getref': + elif action == 'getref': + if settings.CLUSTER_ID is None: + die("CLUSTER_ID is not set") + with connection.cursor() as c: c.execute("SELECT last_value FROM app_project_id_seq") next_project_id = c.fetchone()[0] @@ -72,7 +458,23 @@ def handle(self, **options): print(json.dumps(ref)) - + elif action == 'export' or action == 'import': + user = options.get('user') + if user is None: + die("--user is required") + importexport_user(action, user, dry_run=dry_run, cluster_export_dir=options.get('cluster_export_dir'), merge=options.get('merge')) + + elif action == 'redirect': + user = options.get('user') + if user is None: + die("--user is required") + to_cluster = options.get('to_cluster') + if to_cluster is None or to_cluster == -1: + die("--to-cluster is required") + to_cluster = int(to_cluster) + redirect(user, to_cluster, dry_run=dry_run, delete_projects_tasks=options.get('delete', False)) + else: + print("Invalid action %s" % options.get('action')) diff --git a/app/management/commands/ept.py b/app/management/commands/ept.py new file mode 100644 index 000000000..c22dc691d --- /dev/null +++ b/app/management/commands/ept.py @@ -0,0 +1,33 @@ +import os +from django.core.management.base import BaseCommand +from app.models import Task + +class Command(BaseCommand): + requires_system_checks = [] + + def add_arguments(self, parser): + parser.add_argument("--all", action="store_true", required=False, default=False, help="Confirm that you want to check all tasks") + parser.add_argument("--user", required=False, default=None, help="Check tasks belonging to this username") + parser.add_argument("--threads", type=int, required=False, default=1, help="Number of threads to use for generating EPT data") + + super(Command, self).add_arguments(parser) + + def handle(self, **options): + if options.get('user'): + tasks = Task.objects.filter(project__owner__username=options.get('user')) + elif options.get('all'): + tasks = Task.objects.all() + else: + print("Specify either --user or --all") + exit(1) + + print("Checking %s tasks" % tasks.count()) + + count = 0 + for t in tasks: + if t.check_ept(threads=options.get('threads')): + print(str(t)) + count += 1 + + print("Built %s EPT" % count) + \ No newline at end of file diff --git a/app/migrations/0045_redirect.py b/app/migrations/0045_redirect.py new file mode 100644 index 000000000..a695a4357 --- /dev/null +++ b/app/migrations/0045_redirect.py @@ -0,0 +1,27 @@ +# Generated by Django 2.2.27 on 2025-08-16 20:28 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('app', '0044_task_console_link'), + ] + + operations = [ + migrations.CreateModel( + name='Redirect', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('project_id', models.IntegerField(blank=True, db_index=True, default=None, help_text='Project Id', null=True, unique=True, verbose_name='Project Id')), + ('project_public_id', models.UUIDField(blank=True, db_index=True, default=None, help_text='Public identifier of the project', null=True, unique=True, verbose_name='Public Id')), + ('task_id', models.UUIDField(blank=True, db_index=True, default=None, help_text='Task Id', null=True, unique=True, verbose_name='Task Id')), + ('cluster_id', models.IntegerField(help_text='Cluster Id to redirect to', verbose_name='Cluster Id')), + ], + options={ + 'verbose_name': 'Redirect', + 'verbose_name_plural': 'Redirects', + }, + ), + ] diff --git a/app/migrations/0046_profile_cluster_id.py b/app/migrations/0046_profile_cluster_id.py new file mode 100644 index 000000000..6f8834d7e --- /dev/null +++ b/app/migrations/0046_profile_cluster_id.py @@ -0,0 +1,18 @@ +# Generated by Django 2.2.27 on 2025-08-20 14:47 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('app', '0045_redirect'), + ] + + operations = [ + migrations.AddField( + model_name='profile', + name='cluster_id', + field=models.IntegerField(blank=True, help_text='Cluster Id this user is assigned to', null=True, verbose_name='Cluster Id'), + ), + ] diff --git a/app/models/__init__.py b/app/models/__init__.py index a9d64a244..a7fa9d67d 100644 --- a/app/models/__init__.py +++ b/app/models/__init__.py @@ -6,6 +6,7 @@ from .plugin_datum import PluginDatum from .plugin import Plugin from .profile import Profile +from .redirect import Redirect # deprecated def image_directory_path(image_upload, filename): diff --git a/app/models/profile.py b/app/models/profile.py index 54e227d1d..51f74d30a 100644 --- a/app/models/profile.py +++ b/app/models/profile.py @@ -13,6 +13,7 @@ class Profile(models.Model): user = models.OneToOneField(User, on_delete=models.CASCADE) quota = models.FloatField(default=-1, blank=True, help_text=_("Maximum disk quota in megabytes"), verbose_name=_("Quota")) + cluster_id = models.IntegerField(blank=True, null=True, help_text=_("Cluster Id this user is assigned to"), verbose_name=_("Cluster Id")) def has_quota(self): return self.quota != -1 diff --git a/app/models/redirect.py b/app/models/redirect.py new file mode 100644 index 000000000..3cbe1c613 --- /dev/null +++ b/app/models/redirect.py @@ -0,0 +1,25 @@ +from django.conf import settings +from django.db import models +from django.utils.translation import gettext_lazy as _ + +class Redirect(models.Model): + project_id = models.IntegerField(db_index=True, default=None, unique=True, blank=True, null=True, help_text=_("Project Id"), verbose_name=_("Project Id")) + project_public_id = models.UUIDField(db_index=True, default=None, unique=True, blank=True, null=True, help_text=_("Public identifier of the project"), verbose_name=_("Public Id")) + task_id = models.UUIDField(db_index=True, default=None, unique=True, blank=True, null=True, help_text=_("Task Id"), verbose_name=_("Task Id")) + + cluster_id = models.IntegerField(blank=False, null=False, help_text=_("Cluster Id to redirect to"), verbose_name=_("Cluster Id")) + + def __str__(self): + parts = [] + if self.project_id is not None: + parts.append("P:%s" % self.project_id) + if self.project_public_id is not None: + parts.append("PP:%s" % self.project_public_id) + if self.task_id is not None: + parts.append("T:%s" % self.task_id) + + return "|".join(parts) + " --> %s" % self.cluster_id + + class Meta: + verbose_name = _("Redirect") + verbose_name_plural = _("Redirects") diff --git a/app/models/task.py b/app/models/task.py index ac21acd2f..a6e9519d6 100644 --- a/app/models/task.py +++ b/app/models/task.py @@ -3,6 +3,7 @@ import shutil import time import struct +import tempfile from datetime import datetime import uuid as uuid_module from zipstream.ng import ZipStream @@ -1027,6 +1028,8 @@ def extract_assets_and_complete(self): logger.info("Populated extent field with {} for {}".format(raster_path, self)) + self.check_ept() + # Flushes the changes to the *_extent fields # and immediately reads them back into Python # This is required because GEOS screws up the X/Y conversion @@ -1061,6 +1064,43 @@ def extract_assets_and_complete(self): from app.plugins import signals as plugin_signals plugin_signals.task_completed.send_robust(sender=self.__class__, task_id=self.id) + def check_ept(self, threads=1): + # Make sure that the entwine_pointcloud/ept.json file exists + # and generate it otherwise + ept_file = self.assets_path("entwine_pointcloud", "ept.json") + if os.path.isfile(ept_file): + return + + point_cloud = self.get_point_cloud() + if point_cloud is None: + return + + # We have the point cloud, but no EPT. Generate EPT. + entwine = shutil.which('entwine') + if not entwine: + logger.warning("Cannot create EPT, entwine program is missing") + return None + + ept_dir = self.assets_path("entwine_pointcloud") + try: + if not os.path.exists(settings.MEDIA_TMP): + os.makedirs(settings.MEDIA_TMP) + + tmp_ept_path = tempfile.mkdtemp('_ept', dir=settings.MEDIA_TMP) + params = [entwine, "build", "--threads", str(threads), + "--tmp", quote(tmp_ept_path), + "-i", quote(point_cloud), + "-o", quote(ept_dir)] + + subprocess.run(params, timeout=12*60*60) + + if os.path.isdir(tmp_ept_path): + shutil.rmtree(tmp_ept_path) + return True + except Exception as e: + logger.warning("Cannot create EPT for %s (%s). 3D point cloud will not display properly." % (point_cloud, str(e))) + + def get_extent_fields(self): return [ (os.path.realpath(self.assets_path("odm_orthophoto", "odm_orthophoto.tif")), @@ -1076,6 +1116,11 @@ def get_reference_raster(self): for file, field in extent_fields: if getattr(self, field) is not None: return file + + def get_point_cloud(self): + f = os.path.realpath(self.assets_path(self.ASSETS_MAP["georeferenced_model.laz"])) + if os.path.isfile(f): + return f def get_tile_path(self, tile_type, z, x, y): return self.assets_path("{}_tiles".format(tile_type), z, x, "{}.png".format(y)) diff --git a/app/static/app/js/components/ExportAssetDialog.jsx b/app/static/app/js/components/ExportAssetDialog.jsx index 6288cfff4..d20c7b095 100644 --- a/app/static/app/js/components/ExportAssetDialog.jsx +++ b/app/static/app/js/components/ExportAssetDialog.jsx @@ -23,7 +23,9 @@ class ExportAssetDialog extends React.Component { } handleSave = (cb) => { - this.exportAssetPanel.handleExport()(cb); + this.exportAssetPanel.handleExport()(cb, (progress) => { + if (this.exportAssetFormDialog) this.exportAssetFormDialog.updateSaveProgress(progress); + }); } render(){ @@ -38,6 +40,7 @@ class ExportAssetDialog extends React.Component { savingLabel={_("Downloading…")} saveLabel={_("Download")} saveAction={() => {}} + ref={(domNode) => { this.exportAssetFormDialog = domNode; }} handleSaveFunction={this.handleSave} onHide={this.props.onHide}> { if (!format) format = this.state.format; - return (cb) => { + return (cb, onProgress) => { if (typeof cb !== 'function') cb = undefined; + if (typeof onProgress !== 'function') onProgress = undefined; const { task } = this.props; this.setState({exporting: true, error: "", progress: null}); @@ -155,6 +156,7 @@ export default class ExportAssetPanel extends React.Component { } }, (_, progress) => { this.setState({progress}); + if (onProgress !== undefined) onProgress(progress); }); }else if (result.url){ // Simple download diff --git a/app/static/app/js/components/FormDialog.jsx b/app/static/app/js/components/FormDialog.jsx index 608d7608b..c42bd246a 100644 --- a/app/static/app/js/components/FormDialog.jsx +++ b/app/static/app/js/components/FormDialog.jsx @@ -41,6 +41,7 @@ class FormDialog extends React.Component { showModal: props.show, saving: false, deleting: false, + saveProgress: null, error: "" }; @@ -90,7 +91,7 @@ class FormDialog extends React.Component { show(){ if (this.props.reset) this.props.reset(); - this.setState({showModal: true, saving: false, error: ""}); + this.setState({showModal: true, saving: false, error: "", saveProgress: null}); } hide(){ @@ -102,6 +103,11 @@ class FormDialog extends React.Component { } } + updateSaveProgress = progress => { + if (progress === undefined) progress = null; + this.setState({saveProgress: progress}); + } + handleEnter = e => { if (e.key === 'Enter' || e.keyCode === 13){ this.handleSave(e); @@ -111,7 +117,7 @@ class FormDialog extends React.Component { handleSave(e){ e.preventDefault(); - this.setState({saving: true, error: ""}); + this.setState({saving: true, error: "", saveProgress: null}); if (this.props.handleSaveFunction){ this.props.handleSaveFunction(err => { @@ -200,7 +206,7 @@ class FormDialog extends React.Component {