aboutsummaryrefslogtreecommitdiff
blob: ebb8bf9a3835e65281351fcac2dcef45681fbac4 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
#!/usr/bin/env python3
# -*- coding:utf-8 -*-

import csv
import argparse
import json


def import_LUT(src):
    with open(src) as srcf:
        return json.loads(srcf.read())


def lookup(LUT, category, pkg, SLOT):
    try:
        return LUT[category][pkg][SLOT]
    except:
        return []


parser = argparse.ArgumentParser(
    description='Lookup [groupId][artifactId] related to [category][pkg name] and fill a new cache')
parser.add_argument('--src-cache', dest='src', action='store',
                    required=True, help='source cache file, with no (groupId, artifactId)')
parser.add_argument('--dst-cache', dest='dst', action='store',
                    required=True, help='the cache have (groupId, artifactId)')
parser.add_argument('--LUT', dest='lut', action='store',
                    required=True, help='the lookup table mapping Gentoo pkgs and Maven pkgs')

if __name__ == '__main__':
    args = parser.parse_args()

    LUT = import_LUT(args.lut)

    old_cache = csv.reader(
        open(args.src, 'r'),
        delimiter=':')
    new_cache = csv.writer(
        open(args.dst, 'w'),
        delimiter=':',
        lineterminator='\n')

    for line in old_cache:
        if len(line) == 1:
            new_cache.writerow(line)
        elif line[0].startswith('#'):
            new_cache.writerow(line)
        elif line[5]:
            # if it already has an equibalent groupId
            new_cache.writerow(line)
        else:
            for equiv_id in lookup(LUT, line[0], line[1], line[3]):
                line[5:7] = equiv_id
                line[7] = line[2]
                new_cache.writerow(line)