Download All Images from a Remote Directory : Python Script

Here’s a quick Python script which you can use to download all images from a remote directory say from www.example.com/dir1/. I have used beautiful soup to do all the heavy-lifting.


from BeautifulSoup import BeautifulSoup as bs
import urlparse
from urllib2 import urlopen
from urllib import urlretrieve
import os
import sys

def main(url, out_folder="D:/deckfinal/"):
    soup = bs(urlopen(url))
    for image in soup.findAll("a"):
        parsed =  url+image['href']
        filename = image['href']
        outpath = os.path.join(out_folder, filename)
        try:
            urlretrieve(parsed, outpath)
        except:
            print "skipping" + parsed
if __name__ == "__main__":
    main("http://www.example.com/dir1/images/", "D://myimages")