From 80df88f25d095ea7f7fc9cf8c8d0bf9cc7c2bf1f Mon Sep 17 00:00:00 2001 From: Kevin Chung Date: Thu, 21 May 2020 02:15:16 -0400 Subject: [PATCH] Revert "Use a ThreadPoolExecutor to concurrently store files dut string import (#1427)" (#1428) This reverts commit 2245df85f4315503462800c30efd9ee683331fe5. --- CTFd/utils/exports/__init__.py | 27 +++++++++------------------ 1 file changed, 9 insertions(+), 18 deletions(-) diff --git a/CTFd/utils/exports/__init__.py b/CTFd/utils/exports/__init__.py index 9f6bd2cc..00a86f29 100644 --- a/CTFd/utils/exports/__init__.py +++ b/CTFd/utils/exports/__init__.py @@ -8,7 +8,6 @@ import zipfile import dataset import six from alembic.util import CommandError -from concurrent import futures from flask import current_app as app from flask_migrate import upgrade as migration_upgrade from sqlalchemy.exc import OperationalError, ProgrammingError @@ -312,25 +311,17 @@ def import_ctf(backup, erase=True): # Extracting files files = [f for f in backup.namelist() if f.startswith("uploads/")] uploader = get_uploader() + for f in files: + filename = f.split(os.sep, 1) - awaitables = [] - with futures.ThreadPoolExecutor() as executor: - for f in files: - filename = f.split(os.sep, 1) + if ( + len(filename) < 2 or os.path.basename(filename[1]) == "" + ): # just an empty uploads directory (e.g. uploads/) or any directory + continue - # just an empty uploads directory (e.g. uploads/) or any directory - if len(filename) < 2 or os.path.basename(filename[1]) == "": - continue - - # Get the second entry in the list (the actual filename) - filename = filename[1] - source = backup.open(f) - - # Parallelize the storage requests - awaitables.append(executor.submit(uploader.store, source, filename)) - - # Await the storage requests - futures.wait(awaitables, return_when=futures.FIRST_EXCEPTION) + filename = filename[1] # Get the second entry in the list (the actual filename) + source = backup.open(f) + uploader.store(fileobj=source, filename=filename) # Alembic sqlite support is lacking so we should just create_all anyway try: