火曜日, 1月 27, 2009

MinGW Cross Compilerのzipファイルの取得

MinGW Cross Compiler - The aim of this project is to provide and maintain MinGW (Minimalist GNU-Win32, http://www.mingw.org) cross-compiler package for Linux

Python 3.0でzipファイルを取得するソース:MinGWCC.py


# Python 3.0 Src 2009/01/27
# Retrive zip files from MinGW Cross Compiler

import os
import io
import re
import urllib.request as request

sBaseUrl = "http://sourceforge.net"
def getUrl(sUrl, sProxy = None):
if sProxy is not None:
proxy_handler = request.ProxyHandler({"http" : sProxy})
opener = request.build_opener(proxy_handler)
request.install_opener(opener)
return request.urlopen(sUrl)

def u2u(sUrl):
return sUrl.replace("&", "&")

def saveToFile(sPath, sText):
fSave = open(sPath, "wb")
fSave.write(sText)
fSave.flush()
fSave.close()

def getLvl2FileName(sText):
oPat = re.compile('<a [^>]+ class="sfx_qalogger_element sfx_qalogger_clickable" href="(.+)" [^>]+>(.+\.zip)<\/a>')
oIter = oPat.finditer(sText)
for oMatch in oIter:
sLvl3Url = u2u(u2u(oMatch.group(1)))
print(" " + sLvl3Url)
hSiteLvl2 = getUrl(sLvl3Url)
saveToFile(oMatch.group(2), hSiteLvl2.read())

if __name__ == "__main__":
# Proxy URLのセット
sProxy = "http://xxx.xxxxx.xxx:8080"
sUrl = "http://sourceforge.net/project/showfiles.php?group_id=204414"
hSite = getUrl(sUrl, sProxy)
sText = str(hSite.read(), "utf-8")
oPat = re.compile('<td class="download"[^>]+><a href="(.+)">Download<\/a><\/td>')
oIter = oPat.finditer(sText)
for oMatch in oIter:
sLvl2Url = u2u(sBaseUrl + oMatch.group(1))
print(sLvl2Url)
hSiteLvl2 = getUrl(sLvl2Url)
getLvl2FileName(str(hSiteLvl2.read(), "utf-8"))

0 件のコメント: