1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
|
# vim: set sw=4 sts=4 et :
# Copyright: 2008 Gentoo Foundation
# Author(s): Nirbheek Chauhan <nirbheek.chauhan@gmail.com>
# License: GPL-2
#
# Immortal lh!
#
import os, sys, shutil
import os.path as osp
from autotua import fetch, const, sync, chroot, jobuild
class Jobs:
"""Interface to jobs on the master server that we can do"""
def __init__(self):
self.pubkey = ''
def getjobs(self):
"""
Get a list of jobs
(skeleton code atm)
"""
jobs = []
for job_data in const.job_list:
jobs.append(Job(job_data))
return jobs
class Job:
"""A Job."""
def __init__(self, job_data):
self.maint = job_data['maintainer']
self.name = job_data['name']
self.stage = self._stage_fetchable(job_data)
self.jobdir = osp.join(const.WORKDIR, self.maint, self.name)
self.jobtagerev = job_data['jobtagerev']
self.atoms = job_data['atoms']
self.jobuilds = []
self.chroot = chroot.WorkChroot(self.jobdir, self.stage.filename)
def __repr__(self):
return '<%s: %s>' % (self.name, 'Job object')
def __str__(self):
return '%s object' % self.name
def _generic_arch(self, arch):
"""
Convert specific archs to generic archs
i686 -> x86
mips4 -> mips
"""
if arch in ['alpha', 'amd64', 'ia64', 'x86']:
return arch
if arch == 'i686':
return 'x86'
elif arch == 'sparc64':
return 'sparc'
elif arch in ['hppa1.1', 'hppa2.0']:
return 'hppa'
elif arch.startswith('mips'):
return 'mips'
else:
sys.exit('Invalid arch: '+arch+'\n')
def _stage_fetchable(self, job_data):
stage = job_data['stage']
if stage.startswith('gentoo://'):
job_data['stage'] = stage[9:]
mirrors = const.GENTOO_MIRRORS
else:
# Assume it's a proper URL
return fetch.Fetchable(uri=[stage])
job_data['gen_arch'] = self._generic_arch(job_data['arch'])
filename = const.STAGE_FILENAME % job_data
uri = []
for mirror in mirrors:
mirror += "/" + const.STAGE_MIRROR_PATH % job_data
mirror += "/" + filename
uri.append(mirror)
stage = fetch.Fetchable(uri=uri)
return stage
# - Get jobuild SRC_URI -> tarball dir
def _fetch_src(self):
job_src = []
fetcher = fetch.Fetcher(const.JOBFILE_DIR)
for jobuild in self.jobuilds:
src_uri = jobuild.get_var('SRC_URI').split()
for i in src_uri:
job_src.append(fetch.Fetchable(uri=[i]))
for i in job_src:
fetcher.fetch(uri=[i])
def fetch(self):
# Job metadata stuff
## Get stage3 (if required)
fetcher = fetch.Fetcher(const.STAGE_DIR)
fetcher.fetch(self.stage)
# Sync jobtage tree
sync.Syncer().sync()
# Export from local jobtage tree
sync.Syncer(uri=const.JOBTAGE_DIR, destdir=osp.join(self.jobdir, 'jobtage'),
rev=self.jobtagerev, scheme="bzr-export").sync()
## Read config, get portage snapshot if required
#self._fetch_portage_snapshot()
def prepare(self):
# Create jobuild objects for parsing
for atom in self.atoms:
self.jobuilds.append(jobuild.Jobuild(self.jobdir, atom))
self._fetch_src()
self.chroot.setup()
def run(self):
print "Rheeet, it's running!~ NOT."
def clean(self):
self.chroot.clean()
shutil.rmtree(self.jobdir)
os.removedirs(const.WORKDIR, self.maint)
|