Verified Commit e34dad80 authored by Huste, Tobias's avatar Huste, Tobias
Browse files

browsing: call url_for only once to improve performance

parent 7f0637f2
Pipeline #2652 passed with stage
in 10 minutes and 13 seconds
......@@ -24,7 +24,7 @@ import os
import socket
import stat
from io import StringIO
from urllib.parse import urlparse
from urllib.parse import urlencode, urlparse
import humanize
import paramiko
......@@ -45,7 +45,7 @@ from .errors import AuthenticationError, FileDoesNotExist, FileTooLargeError, \
SSHKeyNotFoundError, UnsupportedProtocolError
from .models import RemoteServer, SSHKey
from .tasks import download_files, download_via_sftp
from .utils import create_key_from_bucket
from .utils import create_key_from_bucket, limit
_redisstore = LocalProxy(
lambda: current_app.extensions['invenio-uploadbyurl'].redisstore)
......@@ -199,31 +199,29 @@ class SFTPBrowserAPI(ContentNegotiatedMethodView):
)
sftp = client.open_sftp()
dirlist = []
base_url = url_for(
'invenio_uploadbyurl.uploadbyurl_api_sftp_browse',
remote_server=remote.name,
path='/',
_external=True,
).split('?')[0] + '?'
if path != '/':
endpoint = url_for(
'invenio_uploadbyurl.uploadbyurl_api_sftp_browse',
remote_server=remote.name,
path=os.path.split(path)[0],
_external=True,
)
full_path = os.path.split(path)[0]
endpoint = base_url + urlencode({'path': full_path})
dirlist = [dict(
short_path='..',
path=os.path.split(path)[0],
path=full_path,
isdir=True,
endpoint=endpoint,
)]
for element in sftp.listdir_iter(path):
endpoint = url_for(
'invenio_uploadbyurl.uploadbyurl_api_sftp_browse',
remote_server=remote.name,
path=os.path.join(path, element.filename),
_external=True,
)
# get file size
full_path = os.path.join(path, element.filename)
endpoint = base_url + urlencode({'path': full_path})
size = humanize.naturalsize(element.st_size, binary=True)
node = dict(
short_path=element.filename,
path=os.path.join(path, element.filename),
path=full_path,
isdir=stat.S_ISDIR(element.st_mode),
endpoint=endpoint,
size=size,
......
......@@ -45,6 +45,14 @@ def create_key_from_bucket(bucket_id):
str(bucket_id))
def limit(iterator, limit=500):
"""Generator to limit number of iterations."""
n = 0
while n < limit:
yield iterator.__next__()
n += 1
def delete_all_keys():
"""Delete all keys from kv-store."""
for key in _redisstore.iter_keys():
......
......@@ -150,7 +150,7 @@ setup(
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment