Revert "Use a ThreadPoolExecutor to concurrently store files dut string import (#1427)" (#1428)

This reverts commit 2245df85f4.
This commit is contained in:
Kevin Chung
2020-05-21 02:15:16 -04:00
committed by GitHub
parent 2245df85f4
commit 80df88f25d

View File

@@ -8,7 +8,6 @@ import zipfile
import dataset import dataset
import six import six
from alembic.util import CommandError from alembic.util import CommandError
from concurrent import futures
from flask import current_app as app from flask import current_app as app
from flask_migrate import upgrade as migration_upgrade from flask_migrate import upgrade as migration_upgrade
from sqlalchemy.exc import OperationalError, ProgrammingError from sqlalchemy.exc import OperationalError, ProgrammingError
@@ -312,25 +311,17 @@ def import_ctf(backup, erase=True):
# Extracting files # Extracting files
files = [f for f in backup.namelist() if f.startswith("uploads/")] files = [f for f in backup.namelist() if f.startswith("uploads/")]
uploader = get_uploader() uploader = get_uploader()
for f in files:
filename = f.split(os.sep, 1)
awaitables = [] if (
with futures.ThreadPoolExecutor() as executor: len(filename) < 2 or os.path.basename(filename[1]) == ""
for f in files: ): # just an empty uploads directory (e.g. uploads/) or any directory
filename = f.split(os.sep, 1) continue
# just an empty uploads directory (e.g. uploads/) or any directory filename = filename[1] # Get the second entry in the list (the actual filename)
if len(filename) < 2 or os.path.basename(filename[1]) == "": source = backup.open(f)
continue uploader.store(fileobj=source, filename=filename)
# Get the second entry in the list (the actual filename)
filename = filename[1]
source = backup.open(f)
# Parallelize the storage requests
awaitables.append(executor.submit(uploader.store, source, filename))
# Await the storage requests
futures.wait(awaitables, return_when=futures.FIRST_EXCEPTION)
# Alembic sqlite support is lacking so we should just create_all anyway # Alembic sqlite support is lacking so we should just create_all anyway
try: try: