aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRafael G. Martins <rafael@rafaelmartins.eng.br>2010-07-31 07:05:58 -0300
committerRafael G. Martins <rafael@rafaelmartins.eng.br>2010-07-31 07:05:58 -0300
commitccbd29b168159e73565888d6628ffc4475fe3ae6 (patch)
tree8e679be57fc2d8b6dae7033a1ab05470fbd4e26a
parentmoved the package database tree to the octave-forge directory (diff)
downloadg-octave-ccbd29b168159e73565888d6628ffc4475fe3ae6.tar.gz
g-octave-ccbd29b168159e73565888d6628ffc4475fe3ae6.tar.bz2
g-octave-ccbd29b168159e73565888d6628ffc4475fe3ae6.zip
rewritten g_octave/fetch.py; fixed other modules from g_octave/
-rw-r--r--g_octave/config.py11
-rw-r--r--g_octave/description_tree.py52
-rw-r--r--g_octave/fetch.py247
3 files changed, 128 insertions, 182 deletions
diff --git a/g_octave/config.py b/g_octave/config.py
index f8068fa..8749949 100644
--- a/g_octave/config.py
+++ b/g_octave/config.py
@@ -32,7 +32,7 @@ class Config(object):
'db': '/var/cache/g-octave',
'overlay': '/usr/local/portage/g-octave',
'categories': 'main,extra,language',
- 'db_mirror': 'http://soc.dev.gentoo.org/~rafaelmartins/g-octave/db/',
+ 'db_mirror': 'github://rafaelmartins/g-octave-db-test',
'trac_user': '',
'trac_passwd': '',
'pkg_cache': '',
@@ -79,13 +79,8 @@ class Config(object):
if not fetch_phase:
- # Cache (JSON)
- cache_file = os.path.join(_db, 'cache.json')
- with open(cache_file) as fp:
- self._cache = json.load(fp)
-
# JSON
- json_file = os.path.join(_db, self._cache['files']['info.json'])
+ json_file = os.path.join(_db, 'info.json')
with open(json_file) as fp:
self._info = json.load(fp)
@@ -96,8 +91,6 @@ class Config(object):
return self._getattr(attr)
elif attr in self._info:
return self._info[attr]
- elif attr == 'cache' and 'files' in self._cache:
- return self._cache['files']
else:
raise ConfigException('Invalid option: %s' % attr)
diff --git a/g_octave/description_tree.py b/g_octave/description_tree.py
index 3851b18..69da40d 100644
--- a/g_octave/description_tree.py
+++ b/g_octave/description_tree.py
@@ -19,6 +19,8 @@ __all__ = ['DescriptionTree']
import os
import re
+from portage.versions import vercmp
+
from .config import Config
from .description import *
from .exception import ConfigException, DescriptionTreeException
@@ -58,21 +60,24 @@ class DescriptionTree(object):
self.pkg_list[cat] = []
pkgs = os.listdir(catdir)
for pkg in pkgs:
- mypkg = re_pkg_atom.match(pkg)
- if mypkg == None:
- log.error('Invalid Atom: %s' % mypkg)
- raise DescriptionTreeException('Invalid Atom: %s' % mypkg)
- try:
- blacklist = conf.blacklist
- except ConfigException:
- # blacklist isn't mandatory
- blacklist = []
- if mypkg.group(1) not in blacklist or not parse_sysreq:
- self.categories[mypkg.group(1)] = cat
- self.pkg_list[cat].append({
- 'name': mypkg.group(1),
- 'version': mypkg.group(2),
- })
+ pkgdir = os.path.join(catdir, pkg)
+ for desc_file in os.listdir(pkgdir):
+ pkg_p = desc_file[:-len('.DESCRIPTION')]
+ mypkg = re_pkg_atom.match(pkg_p)
+ if mypkg == None:
+ log.error('Invalid Atom: %s' % mypkg)
+ raise DescriptionTreeException('Invalid Atom: %s' % mypkg)
+ try:
+ blacklist = conf.blacklist
+ except ConfigException:
+ # blacklist isn't mandatory
+ blacklist = []
+ if mypkg.group(1) not in blacklist or not parse_sysreq:
+ self.categories[mypkg.group(1)] = cat
+ self.pkg_list[cat].append({
+ 'name': mypkg.group(1),
+ 'version': mypkg.group(2),
+ })
def __getitem__(self, key):
@@ -90,8 +95,8 @@ class DescriptionTree(object):
pkgfile = os.path.join(
self._db_path,
cat,
- '%s-%s' % (pkg['name'], pkg['version']),
- 'DESCRIPTION'
+ pkg['name'],
+ '%s-%s.DESCRIPTION' % (pkg['name'], pkg['version']),
)
return Description(
pkgfile,
@@ -122,16 +127,11 @@ class DescriptionTree(object):
def version_compare(self, versions):
- max = ('0',)
- maxstr = None
-
+ max = '0'
for version in versions:
- tmp = tuple(version.split('.'))
- if tmp > max:
- max = tmp
- maxstr = version
-
- return maxstr
+ if vercmp(max, version) < 0:
+ max = version
+ return max
def packages(self):
diff --git a/g_octave/fetch.py b/g_octave/fetch.py
index f12974f..4883442 100644
--- a/g_octave/fetch.py
+++ b/g_octave/fetch.py
@@ -13,14 +13,9 @@
:license: GPL-2, see LICENSE for more details.
"""
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
-__all__ = [
- 'need_update',
- 'check_updates',
- 'download_files',
- 'check_db_cache',
-]
+__all__ = ['fetch']
from .config import Config
conf = Config(True) # fetch phase
@@ -32,151 +27,109 @@ if py3k:
import urllib.request as urllib
else:
import urllib2 as urllib
-import os
+
+import glob
import json
+import os
import re
import shutil
+import subprocess
+import sys
import tarfile
-import portage.output
from contextlib import closing
-out = portage.output.EOutput()
-
-re_files = {
- 'info.json': re.compile(r'info-([0-9]{10})-([0-9]+)\.json'),
- 'octave-forge.db.tar.gz': re.compile(r'octave-forge-([0-9]{10})\.db\.tar\.gz'),
- 'patches.tar.gz': re.compile(r'patches-([0-9]{10})-([0-9]+)\.tar\.gz'),
-}
-
-def need_update():
-
- return not os.path.exists(os.path.join(conf.db, 'update.json'))
-
-
-def check_updates():
-
- try:
- update = download_with_urllib2(
- conf.db_mirror + '/update.json',
- display_info=False
- ).decode('utf-8')
- except Exception as error:
- # if we already have a file, that's ok
- if need_update():
- raise FetchException(error)
- with open_(os.path.join(conf.db, 'update.json')) as fp:
- update = fp.read()
- else:
- with open_(os.path.join(conf.db, 'update.json'), 'w') as fp:
- fp.write(update)
-
- updated_files = json.loads(update)
-
- old_files = []
-
- for _file in updated_files['files']:
- if not os.path.exists(os.path.join(conf.db, _file)):
- old_files.append(_file)
-
- return old_files
-
-
-def download_files(files):
-
- for _file in files:
- download_with_urllib2(conf.db_mirror + '/' + _file, conf.db)
- add_file_to_db_cache(_file)
- extract(_file)
-
-
-def download_with_urllib2(url, dest=None, display_info=True):
-
- my_file = os.path.basename(url)
-
- if display_info:
- out.ebegin('Downloading: %s' % my_file)
- try:
- if dest != None:
- with closing(urllib.urlopen(url)) as fp:
- if not os.path.exists(dest):
- os.makedirs(dest, 0o755)
- with open(os.path.join(dest, my_file), 'wb') as fp_:
- shutil.copyfileobj(fp, fp_)
- else:
- with closing(urllib.urlopen(url)) as fp:
- if display_info:
- out.eend(0)
- return fp.read()
- except Exception as error:
- if display_info:
- out.eend(1)
- raise Exception('Failed to fetch the file (%s): %s' % (my_file, error))
- else:
- if display_info:
- out.eend(0)
-
-
-def add_file_to_db_cache(_file):
-
- my_file = os.path.join(conf.db, 'cache.json')
-
- try:
- with open_(my_file) as fp:
- files = json.load(fp)
- except:
- files = {'files': {}}
-
- for f in re_files:
- if re_files[f].match(_file) != None:
- files['files'][f] = _file
-
- with open_(my_file, 'w') as fp:
- json.dump(files, fp)
-
-
-def check_db_cache():
-
- try:
- with open_(os.path.join(conf.db, 'cache.json')) as fp:
- cache = json.load(fp)
- except:
- cache = {'files': {}}
-
- try:
- with open_(os.path.join(conf.db, 'update.json')) as fp:
- update = json.load(fp)
- except:
- my_cache = os.listdir(conf.db)
- update = {'files': []}
- for f in my_cache:
- for s in ['patches-', 'info-', 'octave-forge-']:
- if f.startswith(s) and f not in update['files']:
- update['files'].append(f)
-
- for _file in update['files']:
- if _file not in list(cache['files'].values()):
- my_file = os.path.join(conf.db, _file)
- if not os.path.exists(my_file):
- download_with_wget(conf.db_mirror + '/' + _file, my_file)
- add_file_to_db_cache(_file)
- extract(_file)
-
+def clean_db():
+ for f in ['info.json', 'patches', 'octave-forge']:
+ current = os.path.join(conf.db, f)
+ if os.path.isdir(current):
+ shutil.rmtree(current)
+ elif os.path.isfile(current):
+ os.unlink(current)
+
+class GitHub:
+
+ re_db_mirror = re.compile(r'github://(?P<user>[^/]+)/(?P<repo>[^/]+)/?')
+
+ def __init__(self, user, repo):
+ self.user = user
+ self.repo = repo
+ self.api_url = u'http://github.com/api/v2/json'
+ self.url = u'http://github.com'
+
+ def need_update(self):
+ return not os.path.exists(os.path.join(
+ conf.db, 'cache', 'commit_id'
+ ))
+
+ def get_commits(self, branch=u'master'):
+ url = '%s/commits/list/%s/%s/%s/' % (
+ self.api_url,
+ self.user,
+ self.repo,
+ branch
+ )
+ commits = {}
+ with closing(urllib.urlopen(url)) as fp:
+ commits = json.load(fp)
+ return commits['commits']
+
+ def fetch_db(self, branch='master'):
+ cache = os.path.join(conf.db, 'cache')
+ commit_id = os.path.join(cache, 'commit_id')
+ if not os.path.exists(cache):
+ os.makedirs(cache)
+ last_commit = self.get_commits()[0]['id']
+ if os.path.exists(commit_id):
+ with open_(commit_id) as fp:
+ if fp.read().strip() == last_commit:
+ return False
+ dest = os.path.join(cache, 'octave-forge-%s.tar.gz' % last_commit)
+ return_value = subprocess.call([
+ 'wget',
+ '--continue',
+ '--output-document', dest,
+ '%s/%s/%s/tarball/%s/' % (
+ self.url,
+ self.user,
+ self.repo,
+ branch
+ )
+ ])
+ if return_value == os.EX_OK:
+ with open_(os.path.join(cache, 'commit_id'), 'w') as fp:
+ fp.write(last_commit)
+ return True
+
+ def extract(self):
+ clean_db()
+ cache = os.path.join(conf.db, 'cache')
+ commit_id = os.path.join(cache, 'commit_id')
+ tarball = None
+ if os.path.exists(commit_id):
+ with open_(commit_id) as fp:
+ tarball = os.path.join(
+ cache,
+ 'octave-forge-%s.tar.gz' % fp.read().strip()
+ )
+ if tarball is not None:
+ if tarfile.is_tarfile(tarball):
+ with closing(tarfile.open(tarball, 'r')) as fp:
+ fp.extractall(conf.db)
+ dirs = glob.glob('%s/%s-%s*' % (conf.db, self.user, self.repo))
+ if len(dirs) != 1:
+ print('Failed to extract the tarball.', file=sys.stderr)
+ return
+ for f in os.listdir(dirs[0]):
+ shutil.move(os.path.join(dirs[0], f), conf.db)
+ os.rmdir(dirs[0])
+
+__modules__ = [
+ GitHub
+]
-def extract(gz_file, display_info=True):
-
- my_file = os.path.join(conf.db, gz_file)
-
- if tarfile.is_tarfile(my_file):
- if display_info:
- out.ebegin('Extracting: %s' % os.path.basename(gz_file))
- try:
- fp = tarfile.open(my_file, 'r:gz')
- fp.extractall(conf.db)
- except Exception as error:
- if display_info:
- out.eend(1)
- raise Exception('Failed to extract the file (%s): %s' % (my_file, error))
- else:
- if display_info:
- out.eend(0)
+def fetch():
+ for module in __modules__:
+ match = module.re_db_mirror.match(conf.db_mirror)
+ if match is not None:
+ return module(**match.groupdict())