Using the aioftp with asyncio for downloading files to local repository

Question:

recently I have been studying the application of Python’s Package "aioftp" in order to download asynchronously some data from a remote server.
So far, I have had no success in my efforts. Generally, the Error Message that returns to me regards the access to the local file in my computer, as if more than one thread were trying to write into it at the same time. Other times, I get a ConnectionResetError: "Connection lost".

Here is an example code for reference. This code should evaluate all available files within a remote repository (‘ftp.star.nesdis.noaa.gov’) and asynchronously download the selected files to one’s local computer:

import os
import asyncio
import aiofiles
from pathlib import Path
from queue import Queue, Empty as QueueEmptyException
from threading import Semaphore
import aioftp
from ftplib import FTP


def get_parents(level=2):
    
    cwd = os.getcwd()
    
    for i in range(level):
        cwd = os.path.dirname(cwd)
    return cwd



def getUrlsFromFTP(serverFTP="",
                   remoteDirName=""
                   ):
    urlsToDownload = []
    ftp = FTP(serverFTP)
    ftp.login()

    ftp.cwd(remoteDirName)
    try:
        files = list(sorted([file for file in ftp.nlst()]))

        urlsToDownload = [os.path.join(remoteDirName,
                                       str(f)
                                       ).replace("\", "/")
                                for f in files]


        print("N° of files found: ", len(urlsToDownload))

    except Exception as err:
        print(err)
    
    finally:
        ftp.close()

    return urlsToDownload


class FTPFileDownloader:
    def __init__(self,
                 serverFTP: str,
                 remoteURLFolder="",
                 user="",
                 password="",
                 dirname=r"dataFolder",
                 urlsToDownload=list(),
                 maxConcurrentThreads=20):
        self.serverFTP = serverFTP
        self.user = user
        self.password = password
        self.remoteURLFolder = remoteURLFolder
        self.chunk_size = 1024  # 1 KB
        self.dirnameToSave = dirname
        print("Saving in: ", self.dirnameToSave)
        self.downloadSemaphore = Semaphore(maxConcurrentThreads)
        self.downloadQueue = Queue()
        for url in urlsToDownload:
            localFileName = Path(url).name
            self.downloadQueue.put( (localFileName, url) )


    async def download(self, client):
        self.downloadSemaphore.acquire()
        while not self.downloadQueue.empty():
            try:
                localfilename, url = self.downloadQueue.get()
                
                print("Downloading: ", localfilename)
                async with aiofiles.open(localfilename, "wb") as file:
                    async with client.download_stream(url) as stream:
                        async for block in stream.iter_by_block(self.chunk_size):
                            content = await block.read()
                            await file.write(content)

            except QueueEmptyException:
                print("Queue is Empty")

        self.downloadSemaphore.release()

    async def downloadURLFiles(self):
        async with aioftp.Client.context(self.serverFTP,
                                         # user=self.user,
                                         # password=self.password
                                         ) as client:

            tasks = []
            for i in range(self.downloadQueue.qsize()):
                task = asyncio.create_task(self.download(client))
                tasks.append(task)
            
            for t in tasks:
                await t.result()

    def run(self):
        asyncio.run(self.downloadURLFiles())


def main():
    serverFTP = 'ftp.star.nesdis.noaa.gov'
    
    GOES_URLS = getUrlsFromFTP(
        serverFTP,
        remoteDirName = r"/pub/sod/mecb/crw/data/5km/v3.1/nc/v1.0/daily/sst/",
        
    )
    dirname = os.path.join(get_parents(1),
                           "Downloads")
   
    Downloader = FTPFileDownloader(serverFTP,
                                   urlsToDownload=GOES_URLS,
                                   dirname=dirname)
    
    Downloader.run()
    

if __name__ == "__main__":
    import nest_asyncio
    nest_asyncio.apply()
    main()
    

So far, All I got was the "RuntimeError: This event loop is already running".

Any insight is welcome.

Sincerely,

Answers:

The code has many issues other than asynchronously download…

Not sure what is np.concatenate but I assume is numpy.
So, import numpy as np

You have a mutable default argument in getUrlsFromFTP.

Unless this is intended, it is not recommended to have such a construction for a function definition.
see this page for a brief.

def getUrlsFromFTP(serverFTP,
                   varname="sst",
                   yearrange=None
                   ):
    if yearrange is None:
        yearrange = {"lower": 2010,
                     "higher": 2015}

get_parents, pathExists, DownloadProgressBar are not defined.

if "__main__" == __name__: should read if __name__ == "__main__":

I made a few changes to your code that should get your moving.

import os
import asyncio
from pathlib import Path

import aioftp
from ftplib import FTP

import numpy as np


def getUrlsFromFTP(serverFTP,
                   varname="sst",
                   yearrange=None
                   ):
    if yearrange is None:
        yearrange = {"lower": 2010,
                     "higher": 2015}
    GOES_URLS = []
    ftp = FTP(serverFTP)
    ftp.login()

    baseftp = ("/pub/sod/mecb/crw/data/5km/v3.1/nc/v1.0/daily/{0}/".format(
        varname)
    )

    ftp.cwd(baseftp)
    try:
        years = list(sorted([int(y) for y in ftp.nlst()]))

        years = [y for y in years if all([y >= yearrange["lower"],
                                          y < yearrange["higher"],
                                          ])
                 ]
        print("Years to be evaluated: ", years)

        def getfilesFromFTPFolder(url):
            print("url: ", url)
            ftp.cwd(url)
            ncfiles = ftp.nlst()

            ncfilesURLS = [os.path.join(url, f).replace("\", "/")
                           for f in ncfiles]
            return ncfilesURLS

        sst_GOES_Annual_URLS = [os.path.join(baseftp,
                                             str(x)
                                             ) for x in years]
        GOES_SST_FileNames = list(
            map(lambda url: getfilesFromFTPFolder(url),
                sst_GOES_Annual_URLS)
        )

        GOES_URLS = np.concatenate(GOES_SST_FileNames).tolist()

        print("N° of images found: ", len(GOES_URLS))

    except Exception as err:
        print(err)

    ftp.close()

    return GOES_URLS


class FTPFileDownloader:
    def __init__(self,
                 serverFTP: str,
                 remoteURLFolder="",
                 user="",
                 password="",
                 dirname=r"dataFolder"):
        self.serverFTP = serverFTP
        self.user = user
        self.password = password
        self.remoteURLFolder = remoteURLFolder
        self.chunk_size = 1024  # 1 KB
        self.dirnameToSave = os.path.join(get_parents(3),
                                          dirname)
        print("Saving in: ", self.dirnameToSave)
        # pathExists(self.dirnameToSave)

    # def progressBar(self, url, totalSize):
    #     return DownloadProgressBar(unit='iB',
    #                                unit_scale=True,
    #                                total=totalSize,
    #                                miniters=1,
    #                                unit_divisor=self.chunk_size,
    #                                desc=url.split('/')[-1])

    async def download(self, url_path, client, localfilename):
        print("Downloading: ", localfilename)

        with open(localfilename, "wb") as file:
            async with client.download_stream(url_path) as stream:
                async for block in stream.iter_by_block(self.chunk_size):
                    file.write(block)

        # with open(localfilename, "wb") as file:
        #     async with client.download_stream(url) as stream:
        #         async for block in stream.iter_by_block(self.chunk_size):
        #             while True:
        #                 data = block.read()
        #                 if not data:
        #                     break
        #                 file.write(data)

    async def downloadURLFiles(self, url, localfilename):
        async with aioftp.Client.context(self.serverFTP,
                                         # user=self.user,
                                         # password=self.password
                                         ) as client:
            local = await self.download(url, client, localfilename)
        return local
        # try:
        #
        #     tasks = []
        #     for url in urls:
        #         localfilename = os.path.join(self.dirnameToSave,
        #                                      os.path.basename(url))
        #
        #         task = self.download(url, client, localfilename)
        #         tasks.append(task)
        #     await asyncio.gather(*tasks)
        # except:
        #     client.close()

    def run(self, urls):
        loop = asyncio.get_event_loop()
        tasks = [self.downloadURLFiles(url, Path(url).name)
                 for url in urls]
        return loop.run_until_complete(asyncio.gather(*tasks))
        # asyncio.set_event_loop(asyncio.new_event_loop())
        # loop = asyncio.get_event_loop()
        # loop.run_until_complete(self.downloadURLFiles(urls))


if __name__ == "__main__":
    serverFTP = 'ftp.star.nesdis.noaa.gov'
    yearrange = {"lower": 2010,
                 "higher": 2021}

    GOES_URLS = getUrlsFromFTP(
        serverFTP,
        yearrange=yearrange
    )

    Downloader = FTPFileDownloader(serverFTP)
    downloaded_files = Downloader.run(GOES_URLS)

Please not that generally, servers impose a limited number of connections per user.
Meaning that, there is a max # of concurrent connections and asynchronous downloads you can make.

One of the solutions for that is to use a semaphore in your construction. See this page for details.

Your downloadURLFiles would look

async def downloadURLFiles(self, url, localfilename):
    async with sem:
        if sem.locked():
            print(f"MaxConnectionReached, WAITING...")
            await asyncio.sleep(1)
        async with aioftp.Client.context(self.serverFTP,
                                         # user=self.user,
                                         # password=self.password
                                         ) as client:
            local = await self.download(url, client, localfilename)
    return local

where sem is a semaphore with max concurrent connections, e.g., sem = asyncio.Semaphore(10).

Note that you also have md5 files in your list of urls. These are cheksum files. You can use the hashlib library to compare the md5 digest of your downloaded file with that of the server. If they are != your download was not successful.

Answered By: ermaure

Body


After dwelling deep within the async packages used in the above script question, I finally managed to create a functional routine for downloading data from ftp connection-based servers.

Below, I present two approaches for downloading asynchronously remote data:

  1. Single-threaded
  2. multi-threaded

Both approaches were only possible after considering the following Issues:

  1. Usage of the aioftp.pathio.AsyncPathIO class for writing the stream of data to a local file, instead of using the original "open" – which blocks the thread.
  2. Better understanding of the asyncio.gather(*tasks) and asyncio.get_event_loop().run_until_complete(future) methods.
  3. Usage of the pathlib.Path element at the "aioftp.pathio.AsyncPathIO().open" method. From what I have observed from the aioftp documentation, it is not totally evident that one must use a Path instance instead of a raw string (perhaps the aioftp’s authors could reinforce this point in the package’s documentation).
  4. Better usage of the Semaphore for ensuring a limit of concurrent downloads being executed at the "same time" from the remote server
  5. A better usage of the loop instance.
  6. Usage of the third-party package called "nest_asyncio". This was the current only solution found so far that solves the "RuntimeError: This event loop is already running in python" problem"
  7. Avoidance of loop closures (e.g., asyncio.get_event_loop().close()). This case in particular is discussed elsewhere.

Single-Threaded asynchronous Approach

Description

One will notice that the "Single-Threaded Script" example below shows different approaches for retrieving the desired asynchronous results: a) by means of the "Loop AbstractClass"; b) by means of the "await future"; c) by means of the "await coRoutine". This way, I hope to show all possible solutions for the same problem.


Single-Threaded Asynchronous Script

import os
import asyncio
from pathlib import Path
from threading import Semaphore
import aioftp
import numpy as np
from ftplib import FTP


def get_parents(level=2):
    
    cwd = os.getcwd()
    
    for i in range(level):
        cwd = os.path.dirname(cwd)
    return cwd



def getUrlsFromFTP(serverFTP="",
                   remoteDirName="",
                   yearrange=None
                   ):
    
    if yearrange is None:
        yearrange = {"lower": 2010,
                     "higher": 2015}
    
    urlsToDownload = []
    ftp = FTP(serverFTP)
    ftp.login()

    ftp.cwd(remoteDirName)
    try:
        years = list(sorted([int(y) for y in ftp.nlst()]))

        years = [y for y in years if all([y >= yearrange["lower"],
                                          y < yearrange["higher"],
                                          ])
                 ]
        print("Years to be evaluated: ", years)

        def getfilesFromFTPFolder(url):
            ftp.cwd(url)
            ncfiles = ftp.nlst()

            ncfilesURLS = [os.path.join(url, f).replace("\", "/")
                           for f in ncfiles]
            return ncfilesURLS

        sst_GOES_Annual_URLS = [os.path.join(remoteDirName,
                                             str(x)
                                             ) for x in years]
        urlsToDownload = list(
            map(lambda url: getfilesFromFTPFolder(url),
                sst_GOES_Annual_URLS)
        )

        urlsToDownload = np.concatenate(urlsToDownload).tolist()

        print("N° of images found: ", len(urlsToDownload))

    except Exception as err:
        print(err)
    
    finally:
        ftp.close()

    return urlsToDownload


class FTPFileDownloader:
    def __init__(self,
                 serverFTP: str,
                 remoteURLFolder="",
                 user="",
                 password="",
                 dirname=r"dataFolder",
                 urlsToDownload=list(),
                 maxDownloadsPerTime=4):
        self.serverFTP = serverFTP
        self.user = user
        self.password = password
        self.remoteURLFolder = remoteURLFolder
        self.chunk_size = 1024*10  # 10 KB
        if not os.path.exists(dirname):
            os.makedirs(dirname, exist_ok=True)
        else:
            pass
        self.dirnameToSave = dirname
        print("Saving in: ", self.dirnameToSave)
        self.downloadSemaphore = Semaphore(maxDownloadsPerTime)
        self.downloadList = []
        for url in urlsToDownload:
            path = Path(url)
            localFileName = path.name
            toFileName = Path(os.path.join(self.dirnameToSave, localFileName))
            self.downloadList.append( (toFileName, url) )

    async def download(self,
                       client:aioftp.Client,
                       toFileName:Path,
                       url:str):
        
        self.downloadSemaphore.acquire()
        
        print("tt Downloading: ", toFileName.name)
        
        async with aioftp.pathio.AsyncPathIO().open(toFileName, mode="wb") as file:
            async with client.download_stream(url) as stream:
                async for block in stream.iter_by_block(self.chunk_size):
                    await file.write(block)

    async def downloadURLFiles(self, useGatherWithTasks=True, useLoop=True):
        
        try:
            async with aioftp.Client.context(self.serverFTP,
                                             # user=self.user,
                                             # password=self.password
                                             ) as client:
                
                # O exemplo abaixo não funciona
                if useGatherWithTasks:
                    print("nn", "-"*50, "n")
                    print("Using Tasks")
                    print("n", "-"*50, "n")
                    tasks = []
                    for (toFileName, url) in self.downloadList:
                        
                        task = asyncio.create_task(self.download(client, toFileName, url))
                        tasks.append(task)
                        
                    future = asyncio.gather(*tasks)
                    
                    if useLoop:
                        print("nnt", "-"*40, "n")
                        print("t Using Loop")
                        print("nt", "-"*40, "n")
                        loop = asyncio.get_event_loop()
                        results = loop.run_until_complete(future)
                        # Não é possível fechar o loop devido a um
                        # erro sistêmico do Python: https://github.com/python/cpython/issues/77650
                        
                    else:
                        print("nnt", "-"*40, "n")
                        print("t No Loop")
                        print("nt", "-"*40, "n")
                        
                        results = await future
                    
                    return results

                else:
                    print("nn", "-"*50, "n")
                    print("NO TASKS: awaiting each Coroutine")
                    print("n", "-"*50, "n")
                    
                    for (toFileName, url) in self.downloadList:
                        
                        await self.download(client, toFileName, url)
                        

        except KeyboardInterrupt:
            pass
                
    def run(self, useGatherWithTasks=True, useLoop=True):
        
        asyncio.run(self.downloadURLFiles(useGatherWithTasks, useLoop))

        

def main(maxDownloadsPerTime = 4):
    serverFTP = 'ftp.star.nesdis.noaa.gov'
    
    urlsToDownload = getUrlsFromFTP(serverFTP,
                                    remoteDirName = r"/pub/sod/mecb/crw/data/5km/v3.1/nc/v1.0/daily/sst/",
                                    )[:12]

    dirname = os.path.join(get_parents(1),
                           "Downloads")

    Downloader = FTPFileDownloader(serverFTP,
                                   urlsToDownload=urlsToDownload,
                                   dirname=dirname,
                                   maxDownloadsPerTime=maxDownloadsPerTime)

    Downloader.run()
    

if __name__ == "__main__":
    import nest_asyncio
    nest_asyncio.apply()
    main()


Multi-Threaded asynchronous Approach

Below I managed to create a MultiThreaded Asynchronous Download example. One may notice that some options were similar to the single-threaded; nevertheless, in this latter example, I had to use a lambda function in order to wrap the target function into Threads’ tasks. Furthermore, I opted for a manager-Labor approach; therefore, I am using two classes: one for effectively downloading the remote data (urls), and the other for managing the whole MultiThreading configuration.


Multi-Threaded Asynchronous Script


import os
import asyncio
from pathlib import Path
import aioftp
import numpy as np
from ftplib import FTP
from queue import Queue, Empty as QueueEmptyException
import threading
from threading import Semaphore ,  Thread
from os import getpid
import nest_asyncio
import concurrent


def get_parents(level=2):
    
    cwd = os.getcwd()
    for i in range(level):
        cwd = os.path.dirname(cwd)
    return cwd


def listToQueue(listOfUrls:list, dirnameToSave:str):
    QueueOfUrlsToDownload = Queue()

    for url in listOfUrls:
        path = Path(url)
        localFileName = path.name
        toFileName = Path(os.path.join(dirnameToSave, localFileName))
        QueueOfUrlsToDownload.put( (toFileName, url) )
    
    return QueueOfUrlsToDownload

def getUrlsFromFTP(serverFTP="",
                   remoteDirName="",
                   yearrange=None
                   ):
    
    if yearrange is None:
        yearrange = {"lower": 2010,
                     "higher": 2015}
    
    urlsToDownload = []
    ftp = FTP(serverFTP)
    ftp.login()

    ftp.cwd(remoteDirName)
    try:
        years = list(sorted([int(y) for y in ftp.nlst()]))

        years = [y for y in years if all([y >= yearrange["lower"],
                                          y < yearrange["higher"],
                                          ])
                 ]
        print("Years to be evaluated: ", years)

        def getfilesFromFTPFolder(url):
            ftp.cwd(url)
            ncfiles = ftp.nlst()

            ncfilesURLS = [os.path.join(url, f).replace("\", "/")
                           for f in ncfiles]
            return ncfilesURLS

        sst_GOES_Annual_URLS = [os.path.join(remoteDirName,
                                             str(x)
                                             ) for x in years]
        urlsToDownload = list(
            map(lambda url: getfilesFromFTPFolder(url),
                sst_GOES_Annual_URLS)
        )

        urlsToDownload = np.concatenate(urlsToDownload).tolist()

        print("N° of images found: ", len(urlsToDownload))

    except Exception as err:
        print(err)
    
    finally:
        ftp.close()

    return urlsToDownload


class FTPFileDownloader:
    def __init__(self,
                 client: aioftp.Client,
                 user: str,
                 password: str,
                 dirnameToSave: str,
                 urlsToDownload: Queue,
                 semaphore: Semaphore):
        
        self.client = client
        self.user = user
        self.password = password
        self.chunk_size = 1024*10  # 10 KB
        if not os.path.exists(dirnameToSave):
            os.makedirs(dirnameToSave, exist_ok=True)
        else:
            pass
        self.dirnameToSave = dirnameToSave
        self.downloadSemaphore = semaphore
        self.urlsToDownload = urlsToDownload

    async def download(self,
                       client:aioftp.Client,
                       toFileName:Path,
                       url:str):
        
        self.downloadSemaphore.acquire()
        try:
            currentThread = threading.current_thread()
            print("Process: {0} - Thread {1}".format(getpid(), currentThread.name) + 
                  " is Downloading: ", toFileName.name)
            
            if not os.path.exists(toFileName):
                
                async with aioftp.pathio.AsyncPathIO().open(toFileName, mode="wb") as file:
                    async with client.download_stream(url) as stream:
                        async for block in stream.iter_by_block(self.chunk_size):
                            await file.write(block)
                            
            else:
                pass
                 # print("File {0} already downloaded".format(toFileName))
        
        except aioftp.errors.StatusCodeError:
            print("Download abortado")
            # deleting aborted file that was partially downloaded
            
            os.remove(toFileName)
        
        finally:
            self.downloadSemaphore.release()

    async def downloadURLFiles(self):
            
            try:
                while not self.urlsToDownload.empty():
                
                    QueueElement = self.urlsToDownload.get(block=False)
                    if QueueElement:
                        toFileName, url = QueueElement
                        await self.download(self.client, toFileName, url)
                        self.urlsToDownload.task_done()
                    
                    else:
                        break

            except QueueEmptyException:
                print('Queue empty')
                
            finally:
                self.urlsToDownload.put(None)

    async def run(self):
        
        try:
            await self.downloadURLFiles()
        
        except (KeyboardInterrupt, RuntimeError):
            pass


class multiThreadDownloadManager:
    def __init__(self,
                 semaphore: Semaphore,
                 maxDownloadsPerTime: int,
                 serverFTP = 'ftp.star.nesdis.noaa.gov',
                 remoteDirName = r"/pub/sod/mecb/crw/data/5km/v3.1/nc/v1.0/daily/sst/",
                 connectionsPerThread = 10,
                 maxThreadsPerTime=10):
        
        self.semaphore = semaphore
        self.maxDownloadsPerTime = maxDownloadsPerTime
        self.serverFTP = serverFTP
        self.remoteDirName = remoteDirName
        self.connectionsPerThread = connectionsPerThread
        self.maxThreadsPerTime = maxThreadsPerTime
        
        # Post Init
        self.urlsToDownload = getUrlsFromFTP(self.serverFTP,
                                        self.remoteDirName
                                        )
        self.dirnameToSave = os.path.join(get_parents(1),
                               "Downloads")
        
        self.semaphore = Semaphore(self.connectionsPerThread * self.maxThreadsPerTime)
        
        self.QueueOfUrlsToDownload = listToQueue(self.urlsToDownload,
                                                 self.dirnameToSave)
        
        self.client = aioftp.Client()

        print("Data will be saved in: ", self.dirnameToSave)


    async def _download(self):

        try:

            Downloader = FTPFileDownloader(client=self.client,
                                           user="anonimous",
                                           password="",
                                           dirnameToSave = self.dirnameToSave,
                                           urlsToDownload=self.QueueOfUrlsToDownload,
                                           semaphore=self.semaphore)
        
            await Downloader.run()
        
        except ConnectionResetError:
            print("Connection was lost")
            del Downloader

    
    async def download(self,
                       executor: concurrent.futures.ThreadPoolExecutor,
                       loop: asyncio.windows_events._WindowsSelectorEventLoop):
        

        try:
            
            threads = []
            await self.client.connect(self.serverFTP)
            await self.client.login(# user=self.user, 
                                    # password=self.password
                                    )
            for i in range(self.maxThreadsPerTime):
                coRoutine = self._download()
                target = lambda: loop.run_in_executor(executor,
                                                      asyncio.run(coRoutine)    
                                                      )
                
                t = Thread(target=target,
                           name="Thread_{0}".format(i+1))
                t.start()
                threads.append(t)
                
            for t in threads:
                t.join()
                
                
        except ConnectionResetError:
            print("Connection was lost")
        
        finally:
            self.client.close()
    
   
def main(maxDownloadsPerTime: int=50,
         maxThreadsPerTime: int=10):
    
    semaphore = Semaphore(maxDownloadsPerTime * maxThreadsPerTime)
    manager = multiThreadDownloadManager(semaphore,
                                         maxDownloadsPerTime,
                                         maxThreadsPerTime=maxThreadsPerTime
                                         )
    
    loop = asyncio.get_event_loop()
    try:
        executor = concurrent.futures.ThreadPoolExecutor(max_workers=maxDownloadsPerTime)
        mainCoRoutine = manager.download(executor, loop)
        loop.run_until_complete(mainCoRoutine)
    
    except KeyboardInterrupt:
        pass
            
    while not manager.QueueOfUrlsToDownload.empty():
        pass
    
    try:
        loop.stop()
        loop.close()
    except RuntimeError:
        print("Cannot close a running Event Loop")
    
    print("The list of URLS have been fully downloaded")

if __name__ == "__main__":
    nest_asyncio.apply()
    main()


Acknowledgments

Finally, I would like to thank @Ermaure for his/her advices and guidance.

Sincerely,

Categories: questions Tags: ,
Answers are sorted by their score. The answer accepted by the question owner as the best is marked with
at the top-right corner.