diff --git a/Dockerfile b/Dockerfile index d6f6ea784..027a3207a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -15,15 +15,15 @@ WORKDIR /webodm RUN printf "deb http://old-releases.ubuntu.com/ubuntu/ hirsute main restricted\ndeb http://old-releases.ubuntu.com/ubuntu/ hirsute-updates main restricted\ndeb http://old-releases.ubuntu.com/ubuntu/ hirsute universe\ndeb http://old-releases.ubuntu.com/ubuntu/ hirsute-updates universe\ndeb http://old-releases.ubuntu.com/ubuntu/ hirsute multiverse\ndeb http://old-releases.ubuntu.com/ubuntu/ hirsute-updates multiverse\ndeb http://old-releases.ubuntu.com/ubuntu/ hirsute-backports main restricted universe multiverse" > /etc/apt/sources.list # Install Node.js using new Node install method -RUN apt-get -qq update && apt-get -qq install -y --no-install-recommends wget curl && \ - apt-get install -y ca-certificates gnupg && \ +RUN apt-get -qq update && apt-get -o Acquire::Retries=3 -qq install -y --no-install-recommends wget curl && \ + apt-get -o Acquire::Retries=3 install -y ca-certificates gnupg && \ mkdir -p /etc/apt/keyrings && \ curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg && \ NODE_MAJOR=20 && \ echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list && \ - apt-get -qq update && apt-get -qq install -y nodejs && \ + apt-get -o Acquire::Retries=3 -qq update && apt-get -o Acquire::Retries=3 -qq install -y nodejs && \ # Install Python3, GDAL, PDAL, nginx, letsencrypt, psql - apt-get -qq update && apt-get -qq install -y --no-install-recommends python3 python3-pip python3-setuptools python3-wheel git g++ python3-dev python2.7-dev libpq-dev binutils libproj-dev gdal-bin pdal libgdal-dev python3-gdal nginx certbot gettext-base cron postgresql-client-13 gettext tzdata && \ + apt-get -o Acquire::Retries=3 -qq update && apt-get -o Acquire::Retries=3 -qq install -y --no-install-recommends python3 python3-pip python3-setuptools python3-wheel git g++ python3-dev python2.7-dev libpq-dev binutils libproj-dev gdal-bin pdal libgdal-dev python3-gdal nginx certbot gettext-base cron postgresql-client-13 gettext tzdata && \ update-alternatives --install /usr/bin/python python /usr/bin/python2.7 1 && update-alternatives --install /usr/bin/python python /usr/bin/python3.9 2 && \ # Install pip reqs pip install -U pip && pip install -r requirements.txt "boto3==1.14.14" && \ diff --git a/app/models/task.py b/app/models/task.py index ab156dd5c..44c449eaf 100644 --- a/app/models/task.py +++ b/app/models/task.py @@ -462,7 +462,7 @@ def write_backup_file(self): 'name': self.name, 'processing_time': self.processing_time, 'options': self.options, - 'created_at': self.created_at.timestamp(), + 'created_at': self.created_at.astimezone(timezone.utc).timestamp(), 'public': self.public, 'resize_to': self.resize_to, 'potree_scene': self.potree_scene, @@ -480,7 +480,7 @@ def read_backup_file(self): self.name = backup.get('name', self.name) self.processing_time = backup.get('processing_time', self.processing_time) self.options = backup.get('options', self.options) - self.created_at = datetime.fromtimestamp(backup.get('created_at', self.created_at.timestamp())) + self.created_at = datetime.fromtimestamp(backup.get('created_at', self.created_at.astimezone(timezone.utc).timestamp()), tz=timezone.utc) self.public = backup.get('public', self.public) self.resize_to = backup.get('resize_to', self.resize_to) self.potree_scene = backup.get('potree_scene', self.potree_scene) @@ -960,11 +960,12 @@ def extract_assets_and_complete(self): self.update_size() self.potree_scene = {} self.running_progress = 1.0 - self.console += gettext("Done!") + "\n" self.status = status_codes.COMPLETED if is_backup: self.read_backup_file() + else: + self.console += gettext("Done!") + "\n" self.save() diff --git a/app/tests/test_api_task_import.py b/app/tests/test_api_task_import.py index 16b362e6b..04e2bf82e 100644 --- a/app/tests/test_api_task_import.py +++ b/app/tests/test_api_task_import.py @@ -217,3 +217,95 @@ def test_task(self): self.assertEqual(file_import_task.import_url, "file://all.zip") self.assertEqual(file_import_task.images_count, 1) + def test_backup(self): + client = APIClient() + + with start_processing_node(): + user = User.objects.get(username="testuser") + self.assertFalse(user.is_superuser) + project = Project.objects.create( + owner=user, + name="test backup" + ) + + image1 = open("app/fixtures/tiny_drone_image.jpg", 'rb') + image2 = open("app/fixtures/tiny_drone_image_2.jpg", 'rb') + + # Create processing node + pnode = ProcessingNode.objects.create(hostname="localhost", port=11223) + client.login(username="testuser", password="test1234") + + # Create task + res = client.post("/api/projects/{}/tasks/".format(project.id), { + 'images': [image1, image2] + }, format="multipart") + image1.close() + image2.close() + task = Task.objects.get(id=res.data['id']) + + # Wait for completion + c = 0 + while c < 10: + worker.tasks.process_pending_tasks() + task.refresh_from_db() + if task.status == status_codes.COMPLETED: + break + c += 1 + time.sleep(1) + + # Assign some fields + task.name = "Backup test" + task.potree_scene = {'saved': True} + task.public = True + task.save() + + self.assertEqual(task.status, status_codes.COMPLETED) + + # Download task backup + task_uuid = task.uuid + res = client.get("/api/projects/{}/tasks/{}/backup".format(project.id, task.id)) + self.assertEqual(res.status_code, status.HTTP_200_OK) + + if not os.path.exists(settings.MEDIA_TMP): + os.mkdir(settings.MEDIA_TMP) + + assets_path = os.path.join(settings.MEDIA_TMP, "backup.zip") + + with open(assets_path, 'wb') as f: + f.write(res.content) + + assets_file = open(assets_path, 'rb') + + # Import with file upload method + res = client.post("/api/projects/{}/tasks/import".format(project.id), { + 'file': [assets_file] + }, format="multipart") + self.assertEqual(res.status_code, status.HTTP_201_CREATED) + assets_file.close() + + file_import_task = Task.objects.get(id=res.data['id']) + # Wait for completion + c = 0 + while c < 10: + worker.tasks.process_pending_tasks() + file_import_task.refresh_from_db() + if file_import_task.status == status_codes.COMPLETED: + break + c += 1 + time.sleep(1) + + self.assertEqual(file_import_task.import_url, "file://all.zip") + self.assertEqual(file_import_task.images_count, 1) + self.assertEqual(file_import_task.processing_node, None) + self.assertEqual(file_import_task.auto_processing_node, False) + self.assertEqual(file_import_task.name, "Backup test") + self.assertTrue('saved' in file_import_task.potree_scene) + self.assertEqual(file_import_task.public, True) + + # Can access assets + res = client.get("/api/projects/{}/tasks/{}/assets/odm_orthophoto/odm_orthophoto.tif".format(project.id, file_import_task.id)) + self.assertEqual(res.status_code, status.HTTP_200_OK) + + self.assertTrue(valid_cogeo(file_import_task.assets_path(task.ASSETS_MAP["orthophoto.tif"]))) + self.assertTrue(valid_cogeo(file_import_task.assets_path(task.ASSETS_MAP["dsm.tif"]))) + self.assertTrue(valid_cogeo(file_import_task.assets_path(task.ASSETS_MAP["dtm.tif"])))