summaryrefslogtreecommitdiffstats
path: root/rpmci
diff options
context:
space:
mode:
authorColin Walters <walters@verbum.org>2010-10-03 12:55:07 -0400
committerColin Walters <walters@verbum.org>2010-10-03 12:55:07 -0400
commitdb8df4a040883b631ff3c719c453246df9085776 (patch)
tree66e00f6726f6b0cdb612b00d069bba49fdd2bc0c /rpmci
parent77ad7371c5906b83db49de309ba39d867b673975 (diff)
downloadrpmci-db8df4a040883b631ff3c719c453246df9085776.tar.gz
rpmci-db8df4a040883b631ff3c719c453246df9085776.tar.xz
rpmci-db8df4a040883b631ff3c719c453246df9085776.zip
rpmci-vcs-mirror: Basically appears to work
Diffstat (limited to 'rpmci')
-rw-r--r--rpmci/__init__.py0
-rw-r--r--rpmci/async_subprocess.py93
-rw-r--r--rpmci/lame_vcs_abstraction.py148
-rw-r--r--rpmci/msgqueue.py98
-rw-r--r--rpmci/rpmci_vcs_mirror_main.py239
-rw-r--r--rpmci/spec.py264
6 files changed, 842 insertions, 0 deletions
diff --git a/rpmci/__init__.py b/rpmci/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/rpmci/__init__.py
diff --git a/rpmci/async_subprocess.py b/rpmci/async_subprocess.py
new file mode 100644
index 0000000..7988724
--- /dev/null
+++ b/rpmci/async_subprocess.py
@@ -0,0 +1,93 @@
+# async_subprocess.py:
+# Run a subprocess asynchronously using GLib
+#
+# Licensed under the new-BSD license (http://www.opensource.org/licenses/bsd-license.php)
+# Copyright (C) 2010 Red Hat, Inc.
+# Written by Colin Walters <walters@verbum.org>
+
+import os
+import logging
+
+import glib
+
+PIPE_STDOUT = 'pipe-stdout'
+
+def spawn_async_log_info(argv, cwd=None, stdout=None):
+ logging.info("Starting subprocess: %r" % (argv, ))
+ return AsyncProcess(argv, cwd=cwd, stdout=stdout)
+
+def spawn_async_output_to_file(argv, output_filepath, on_exited, cwd=None):
+ f = open(output_filepath, 'w')
+ process = AsyncProcess(argv, cwd=cwd, stdout=PIPE_STDOUT)
+ def _on_io(process, buf):
+ if buf is not None:
+ f.write(buf)
+ else:
+ f.close()
+ def _on_exited(process, condition):
+ on_exited(process, condition)
+ process.set_callbacks(_on_io, _on_exited)
+ return process
+
+class AsyncProcess(object):
+ def __init__(self, argv, cwd=None, stdout=None):
+ self.argv = argv
+ target_cwd = cwd or os.getcwd()
+
+ if stdout == PIPE_STDOUT:
+ (self.stdout, self._pipeout) = os.pipe()
+ (pid, stdin, stdout, stderr) = glib.spawn_async(argv,
+ working_directory=target_cwd,
+ child_setup=self._child_setup_pipes,
+ user_data=None,
+ flags=(glib.SPAWN_SEARCH_PATH
+ | glib.SPAWN_DO_NOT_REAP_CHILD))
+ os.close(self._pipeout)
+ else:
+ (pid, stdin, stdout, stderr) = glib.spawn_async(argv,
+ working_directory=target_cwd,
+ child_setup=None,
+ user_data=None,
+ flags=(glib.SPAWN_SEARCH_PATH
+ | glib.SPAWN_DO_NOT_REAP_CHILD
+ | glib.SPAWN_STDOUT_TO_DEV_NULL
+ | glib.SPAWN_STDERR_TO_DEV_NULL))
+ self.stdout = None
+ assert stdout is None
+ assert stderr is None
+
+ assert stdin is None
+ self.pid = pid
+ self._child_watch_id = glib.child_watch_add(pid, self._on_child_exited)
+ self._output_callback = None
+ self._exited_callback = None
+
+ def _child_setup_pipes(self, *args):
+ # Sadly we have to do a child setup function to get merged stdout/stderr
+ os.dup2(self._pipeout, 1)
+ os.dup2(self._pipeout, 2)
+
+ def set_callbacks(self, output_callback, exited_callback):
+ assert self._output_callback is None
+ assert self._exited_callback is None
+ self._output_callback = output_callback
+ self._exited_callback = exited_callback
+ if self.stdout is not None:
+ glib.io_add_watch(self.stdout, glib.IO_IN | glib.IO_ERR | glib.IO_HUP,
+ self._on_io)
+
+ def _on_child_exited(self, pid, condition):
+ logging.info("Child pid %d exited with code %r" % (pid, condition))
+ self._exited_callback(self, condition)
+
+ def _on_io(self, source, condition):
+ have_read = condition & glib.IO_IN
+ if have_read:
+ buf = os.read(source, 8192)
+ self._output_callback(self, buf)
+ if ((condition & glib.IO_ERR) > 0
+ or (condition & glib.IO_HUP) > 0):
+ self._output_callback(self, None)
+ os.close(source)
+ return False
+ return have_read
diff --git a/rpmci/lame_vcs_abstraction.py b/rpmci/lame_vcs_abstraction.py
new file mode 100644
index 0000000..a96f23d
--- /dev/null
+++ b/rpmci/lame_vcs_abstraction.py
@@ -0,0 +1,148 @@
+#!/usr/bin/python
+# lame_vcs_abstraction.py:
+#
+# Licensed under the new-BSD license (http://www.opensource.org/licenses/bsd-license.php)
+# Copyright (C) 2010 Red Hat, Inc.
+# Written by Colin Walters <walters@verbum.org>
+
+# Feel free to replace the bits here with something better...
+
+import os
+import sys
+import re
+import urlparse
+import getopt
+import subprocess
+import shutil
+import hashlib
+import logging
+
+from . import async_subprocess
+
+class Vcs(object):
+ def __init__(self, parsedurl, directory=None):
+ self._parsed_url = parsedurl
+ # Deliberately drop params/query
+ self._nonfragment_url_string = urlparse.urlunparse((parsedurl.scheme,
+ parsedurl.netloc,
+ parsedurl.path,
+ '', '', ''))
+ self._branch = self._parsed_url.fragment
+
+ self._dir = directory
+
+ def get_url(self):
+ return self._parsed_url
+
+ def get_base_url_string(self):
+ return self._nonfragment_url_string
+
+ def checkout_async(self, destdir):
+ """Retrieve a new copy of the source tree, saving as destdir"""
+ raise Exception("not implemented")
+
+ def set_directory(self, dirpath):
+ """Set the checkout directory."""
+ self._dir = dirpath
+
+ def get_directory(self):
+ return self._dir
+
+ def update_async(self):
+ """Update directory from the latest upstream"""
+ raise Exception("not implemented")
+
+ def export_archive(self, prefix, target_filename):
+ """Export a tarball with minimal (or no) version control data."""
+ raise Exception("not implemented")
+
+ def get_scheme(self):
+ return self._parsed_url.scheme
+
+ def get_id(self):
+ raise Exception("not implemented")
+
+ def get_abbreviated_id(self):
+ raise Exception("not implemented")
+
+ def _vcs_exec_sync_log_error(self, args):
+ logging.info("Synchronously executing: %r in cwd %r" % (args, self._dir))
+ return subprocess.check_output(args, stderr=subprocess.STDOUT, close_fds=True, cwd=self._dir)
+
+ def _vcs_exec_async(self, args, logfile_path, on_exited):
+ logging.info("Asynchronously executing: %r in cwd %r" % (args, self._dir))
+ return async_subprocess.spawn_async_output_to_file(args, logfile_path, on_exited, cwd=self._dir)
+
+ @classmethod
+ def new_from_spec(cls, spec):
+ """See http://maven.apache.org/scm/scm-url-format.html ; we use this format,
+ but without the "scm:" prefix."""
+ # Hack for backwards compatibility
+ if spec.startswith('git://'):
+ (vcstype, url) = ('git', spec)
+ else:
+ (vcstype, url) = spec.split(':', 1)
+ orig = urlparse.urlsplit(url)
+ # We want to support fragments, even if the URL type isn't recognized. So change the
+ # scheme to http temporarily.
+ temp = urlparse.urlunsplit(('http', orig.netloc, orig.path, orig.query, orig.fragment))
+ new = urlparse.urlsplit(temp)
+ combined = urlparse.SplitResult(orig.scheme, new.netloc, new.path, new.query, new.fragment)
+ if vcstype == 'git':
+ return GitVcs(combined)
+
+class GitVcs(Vcs):
+ vcstype = "git"
+
+ def checkout_async(self, destdir, logfile, on_exited):
+ assert self._dir is None
+ args = ['git', 'clone']
+ if self._branch:
+ args.extend(['-b', self._branch])
+ args.extend([self._nonfragment_url_string, destdir])
+ def _wrap_on_exited(process, condition):
+ if condition == 0:
+ self._dir = destdir
+ on_exited(process, condition)
+ return self._vcs_exec_async(args, logfile, _wrap_on_exited)
+
+ def update_async(self, logfile, on_exited):
+ assert self._dir is not None
+ return self._vcs_exec_async(['git', 'pull', '-r'], logfile, on_exited)
+
+ def export_archive(self, prefix, target_filename, logfile):
+ if not prefix.endswith('/'):
+ prefix += '/'
+ args = ['git', 'archive', '--format=tar', '--prefix=%s' % (prefix,), 'HEAD']
+ logging.info("Synchronously executing: %r" % (args, ))
+ log_f = open(logfile, 'w')
+ gitproc = subprocess.Popen(args, cwd=src_directory, stdout=subprocess.PIPE, stderr=log_f)
+ if target_filename.endswith('.bz2'):
+ zipbin = 'bzip2'
+ elif target_filename.endswith('.gz'):
+ zipbin = 'gzip'
+ else:
+ raise ValueError("Unknown compression for filename %r" % (target_filename,))
+ args = [zipbin, '-c']
+ logging.info("Synchronously executing: %r" % (args, ))
+ f = open(target_filename, 'w')
+ zipproc = subprocess.Popen(args, cwd=src_directory, stdout=f, stdin=gitproc.stdout, stderr=log_f)
+ zipproc.wait()
+
+ def get_commit_as_patch(self, commitid, destfile):
+ output = self._vcs_exec_sync_log_error(['git', 'format-patch', '--stdout', commitid + '^..' + commitid])
+ f = open(destfile, 'w')
+ f.write(output)
+ f.close()
+
+ def get_id(self):
+ output = self._vcs_exec_sync_log_error(['git', 'show', '--format=%H'])
+ return output.split('\n')[0]
+
+ def get_abbreviated_id(self):
+ full_id = self.get_id()
+ return full_id[0:8]
+
+ def get_commit_summary_as_filename(self, commitid):
+ output = self._vcs_exec_sync_log_error(['git', 'show', '--format=%f', commitid])
+ return output.split('\n')[0]
diff --git a/rpmci/msgqueue.py b/rpmci/msgqueue.py
new file mode 100644
index 0000000..7148773
--- /dev/null
+++ b/rpmci/msgqueue.py
@@ -0,0 +1,98 @@
+# msgqueue.py: Persistent message queue bassed on files
+#
+# Licensed under the new-BSD license (http://www.opensource.org/licenses/bsd-license.php)
+# Copyright (C) 2010 Red Hat, Inc.
+# Written by Colin Walters <walters@verbum.org>
+
+import os
+import sys
+
+import hashlib
+import json
+
+import gio
+
+class Message(object):
+ def __init__(self, ident, headers, payload):
+ self.headers = headers
+ self.payload = payload
+ if ident is None:
+ (self.ident, serialized) = self.serialize()
+ else:
+ self.ident = ident
+
+ def serialize(self):
+ serialized = json.dumps({'headers': self.headers,
+ 'payload': self.payload})
+ digest = hashlib.md5()
+ digest.update(serialized)
+ hexdigest = digest.hexdigest()
+ return (hexdigest, serialized)
+
+ def __cmp__(self, other):
+ return cmp(self.ident, other.ident)
+
+ @classmethod
+ def parse(cls, md5sum, stream):
+ contents = json.load(stream)
+ if 'headers' in contents:
+ headers = contents['headers']
+ else:
+ headers = {}
+ if 'payload' in contents:
+ payload = contents['payload']
+ else:
+ raise ValueError("Missing 'payload' in message %r" % (string, ))
+ return cls(md5sum, headers, payload)
+
+class MessageQueue(object):
+ def __init__(self, dirpath):
+ self._dirpath = dirpath
+ if not os.path.isdir(self._dirpath):
+ os.makedirs(self._dirpath)
+ self._dir_gfile = gio.File(path=dirpath)
+ self._monitor = self._dir_gfile.monitor(gio.FILE_MONITOR_NONE)
+ self._monitor.connect('changed', self._on_dir_changed)
+ self._subscribers = []
+ self._consumed = []
+
+ def connect(self, callback):
+ self._subscribers.append(callback)
+
+ def consume(self, message):
+ self._consumed.append(message)
+
+ def append(self, message):
+ (hexdigest, serialized) = message.serialize()
+ filename = os.path.join(self._dirpath, hexdigest)
+ temp_filename = os.path.join(self._dirpath, '_' + hexdigest)
+ f = open(temp_filename, 'w')
+ f.write(serialized)
+ f.close()
+ os.rename(temp_filename, filename)
+
+ def _on_dir_changed(self, mon, gfile, other, event):
+ dir_contents = os.listdir(self._dirpath)
+ messages = set()
+ for filename in dir_contents:
+ if filename.startswith('_'):
+ continue
+ file_path = os.path.join(self._dirpath, filename)
+ if not os.path.isfile(file_path):
+ continue
+ f = open(file_path)
+ message = Message.parse(filename, f)
+ f.close()
+ messages.add(message)
+
+ if len(messages) == 0:
+ return
+
+ self._consumed = []
+ for subscriber in self._subscribers:
+ subscriber(self, iter(messages))
+
+ for msg in self._consumed:
+ messages.remove(msg)
+ os.unlink(os.path.join(self._dirpath, msg.ident))
+ self._consumed = []
diff --git a/rpmci/rpmci_vcs_mirror_main.py b/rpmci/rpmci_vcs_mirror_main.py
new file mode 100644
index 0000000..85615a3
--- /dev/null
+++ b/rpmci/rpmci_vcs_mirror_main.py
@@ -0,0 +1,239 @@
+#!/usr/bin/python
+
+# rpmci_vcs_mirror_main.py:
+# Implementation of rpm-vcs-mirror
+#
+# Licensed under the new-BSD license (http://www.opensource.org/licenses/bsd-license.php)
+# Copyright (C) 2010 Red Hat, Inc.
+# Written by Colin Walters <walters@verbum.org>
+
+import os
+import sys
+import time
+import shutil
+import optparse
+from ConfigParser import SafeConfigParser
+import logging
+import urllib
+
+import glib
+import gobject
+import gio
+
+from . import msgqueue
+from . import lame_vcs_abstraction
+
+class VCSMirror(object):
+ def __init__(self, config, urls):
+ self.config = config
+ self.urls = urls
+
+ self._dir = config.get('VCS', 'mirror_dir')
+ self._process_dir = config.get('VCS', 'process_logdir')
+ if not os.path.isdir(self._process_dir):
+ os.makedirs(self._process_dir)
+ self._timeout_seconds = int(config.get('VCS', 'poll_seconds'))
+ self._max_host_concurrency = int(config.get('VCS', 'max_host_concurrency'))
+ self._max_concurrency = int(config.get('VCS', 'max_concurrency'))
+ msgqueue_dir = config.get('VCS', 'msgqueue')
+ self._msgqueue = msgqueue.MessageQueue(msgqueue_dir)
+
+ self._vcslist = []
+ for url in urls:
+ url = url.strip()
+ self._vcslist.append(lame_vcs_abstraction.Vcs.new_from_spec(url))
+
+ self._vcs_by_host = {}
+ for vcs in self._vcslist:
+ host = self._host_for_vcs(vcs)
+ if not host in self._vcs_by_host:
+ self._vcs_by_host[host] = []
+ self._vcs_by_host[host].append(vcs)
+
+ self._vcs_queue = []
+ self._jobs_by_host = {} # string host -> Job
+ self._active_queue_timeout_seconds = 0
+ self._active_queue_timeout_id = 0
+
+ def _host_for_vcs(self, vcs):
+ return vcs.get_url().netloc
+
+ def _escape_vcs_url(self, vcs):
+ return urllib.quote(vcs.get_base_url_string(), '')
+
+ def _cachedir_for_vcs(self, vcs):
+ return os.path.join(self._dir, self._escape_vcs_url(vcs))
+
+ def start(self):
+ logging.info("Starting poll of %d repositories on %d unique hosts"
+ % (len(self._vcslist), len(list(self._vcs_by_host.iterkeys()))))
+
+ # Schedule jobs for now
+ for vcs in self._vcslist:
+ self._vcs_queue.append((vcs, 0))
+ for host in self._vcs_by_host:
+ self._jobs_by_host[host] = []
+ self._active_queue_timeout_seconds = self._timeout_seconds
+ self._poll()
+
+ def _on_job_exited(self, process, condition):
+ vcs = process.__vcs
+ text = "VCS %r task %r exited with code %d" % (vcs, process, condition)
+ failed = condition != 0
+ if failed:
+ logging.warning(text)
+ else:
+ logging.info(text)
+ host = self._host_for_vcs(vcs)
+ jobs = self._jobs_by_host[host]
+ previous_id = None
+ index = -1
+ for i, (iter_process, previous_id) in enumerate(jobs):
+ if iter_process == process:
+ index = i
+ break
+ assert index >= 0
+ del jobs[i]
+
+ if not failed:
+ vcsdir = vcs.get_directory()
+ if vcsdir.endswith('.tmp'):
+ newname = vcsdir[:-4]
+ os.rename(vcsdir, newname)
+ vcs.set_directory(newname)
+ logging.info("vcs %r: Checkout complete in %r" % (vcs, newname))
+ current_id = vcs.get_id()
+ if current_id != previous_id:
+ logging.info("vcs %r: New commit id %r differs from previous %r" % (vcs, current_id, previous_id))
+ msg = msgqueue.Message(None, {'type': 'update'}, {'id': current_id})
+ self._msgqueue.append(msg)
+ else:
+ logging.info("No changes in %r from previous commit id %r" % (vcs, previous_id))
+
+ target_time = int(time.time() + self._timeout_seconds)
+ self._vcs_queue.append((vcs, target_time))
+
+ self._poll()
+
+ def _num_active_jobs(self):
+ active = 0
+ for host in self._jobs_by_host:
+ active += len(self._jobs_by_host[host])
+ return active
+
+ def _job_for_vcs(self, vcs):
+ for job in self._jobs_by_host[self._host_for_vcs(vcs)]:
+ (iter_process, previous_id) = job
+ iter_vcs = iter_process.__vcs
+ if iter_vcs == vcs:
+ return job
+ return None
+
+ def _adjust_timeout(self):
+ current_time = int(time.time())
+ timeout = self._active_queue_timeout_seconds
+ for (vcs, target_time) in self._vcs_queue:
+ time_delta = target_time - current_time
+ if time_delta > 0:
+ if time_delta < timeout:
+ timeout = time_delta
+ continue
+ if timeout < self._active_queue_timeout_seconds:
+ logging.info("Rescheduling poll for %d seconds" % (timeout, ))
+ if self._active_queue_timeout_id > 0:
+ glib.source_remove(self._active_queue_timeout_id)
+ self._active_queue_timeout_seconds = timeout
+ self._active_queue_timeout_id = glib.timeout_add_seconds(timeout, self._poll)
+
+
+ def _poll(self):
+ current_time = int(time.time())
+
+ logging.info("Doing poll (%d active tasks)" % (self._num_active_jobs(), ))
+ logging.debug("Queue: %r" % (self._vcs_queue, ))
+ processed = []
+ for vcs, target_time in self._vcs_queue:
+ active = self._job_for_vcs(vcs)
+ assert active is None
+
+ if target_time > current_time:
+ continue
+
+ host = self._host_for_vcs(vcs)
+ host_job_count = len(self._jobs_by_host[host])
+ if host_job_count >= self._max_host_concurrency:
+ logging.debug("Skipping job %r; at concurrency limit %d"
+ % (vcs, self._max_host_concurrency))
+ continue
+ else:
+ logging.debug("Have %d active jobs for host %r"
+ % (host_job_count, host))
+
+ vcsdir = self._cachedir_for_vcs(vcs)
+ job_logname = '%s-update-%s-%d.log' % (vcs.vcstype, self._escape_vcs_url(vcs),
+ int(time.time()), )
+ job_logpath = os.path.join(self._process_dir, job_logname)
+ if not os.path.isdir(vcsdir):
+ previous_id = None
+ logging.info("Doing initial checkout for %r" % (vcs.get_base_url_string(), ))
+ vcs_tempdir = vcsdir + '.tmp'
+ process = vcs.checkout_async(vcs_tempdir, job_logpath, self._on_job_exited)
+ else:
+ vcs.set_directory(vcsdir)
+ previous_id = vcs.get_id()
+ logging.info("Doing update for %r (starting from commit id %s)" % (vcs.get_base_url_string(), previous_id))
+ process = vcs.update_async(job_logpath, self._on_job_exited)
+ process.__vcs = vcs
+ self._jobs_by_host[host].append((process, previous_id))
+ processed.append(vcs)
+ while processed:
+ vcs = processed[0]
+ del processed[0]
+ index = -1
+ for i, (iter_vcs, target_time) in enumerate(self._vcs_queue):
+ if iter_vcs == vcs:
+ index = i
+ break
+ assert index >= 0
+ del self._vcs_queue[index]
+
+ self._adjust_timeout()
+
+ return False
+
+
+def main():
+ glib.threads_init()
+
+ opts = optparse.OptionParser("usage: %prog [options]")
+ opts.add_option('-c', '--config', dest='config', help="Path to configuration file")
+ opts.add_option('', '--debug', action='store_true', help="Print verbose debugging")
+
+ (options, args) = opts.parse_args()
+
+ if options.config is None:
+ print "Must specify --config"
+ sys.exit(1)
+
+ config = SafeConfigParser({'home': os.environ['HOME']})
+ config.read(options.config)
+ level = logging.DEBUG if options.debug else logging.INFO
+ logging.basicConfig(stream=sys.stderr, level=level)
+
+ mirrordir = config.get('VCS', 'mirror_dir')
+
+ vcslist_path = os.path.join(mirrordir, 'vcs.txt')
+
+ if not os.path.isfile(vcslist_path):
+ print "Missing mirror configuration file %r" % (vcslist_path, )
+ sys.exit(1)
+
+ f = open(vcslist_path)
+ urls = f.readlines()
+ f.close()
+
+ mirror = VCSMirror(config, urls)
+ mirror.start()
+
+ loop = glib.MainLoop()
+ loop.run()
diff --git a/rpmci/spec.py b/rpmci/spec.py
new file mode 100644
index 0000000..40ef6af
--- /dev/null
+++ b/rpmci/spec.py
@@ -0,0 +1,264 @@
+#!/usr/bin/python
+# spec.py: Read and write RPM .spec files
+#
+# Licensed under the new-BSD license (http://www.opensource.org/licenses/bsd-license.php)
+# Copyright (C) 2010 Red Hat, Inc.
+# Written by Colin Walters <walters@verbum.org>
+
+import os
+import sys
+import re
+import urlparse
+import getopt
+import subprocess
+import shutil
+import hashlib
+
+class Spec(object):
+ # These two constants were cribbed from rpm-spec-mode.el.
+ SECTIONS = ('%preamble', '%description', '%prep', '%setup',
+ '%build', '%install', '%check', '%clean',
+ '%changelog', '%files')
+ SCRIPTS = ('%pre', '%post', '%preun', '%postun',
+ '%trigger', '%triggerin', '%treiggerprein',
+ '%triggerun', '%triggerpostun', '%pretrans',
+ '%posttrans')
+ def __init__(self, filename):
+ self._filename = filename
+ f = open(filename)
+ self._lines = self._read_lines_joining_backslash(f)
+ f.close()
+ self._saved = False
+ self._append_buildrequires = []
+ self._new_release = None
+ self._source_dirname = None
+ self._source_archivename = None
+ self._substitutions = []
+ # Map from section name (e.g. '%build') -> (list of functions)
+ self._section_filters = {}
+ self._added_patches = []
+
+ def _read_lines_joining_backslash(self, f):
+ lines = f.readlines()
+ concat_line = None
+ out_lines = []
+ for line in lines:
+ if line.endswith('\\\n'):
+ if concat_line is None:
+ concat_line = line[:-2]
+ else:
+ concat_line += line[:-2]
+ elif concat_line:
+ out_lines.append(concat_line + line)
+ concat_line = None
+ else:
+ out_lines.append(line)
+ if concat_line:
+ out_lines.append(concat_line + '\n')
+ return out_lines
+
+ def get_name(self):
+ return self._filename[:-5]
+
+ def add_buildrequires(self, new_buildrequires):
+ assert not self._saved
+ current_buildrequires = self.get_key_allvalues('BuildRequires')
+ new_buildrequires = filter(lambda x: x not in current_buildrequires, new_buildrequires)
+ self._append_buildrequires = new_buildrequires
+
+ def increment_release_snapshot(self, identifier):
+ assert not self._saved
+ cur_release = self.get_key('Release')
+ release_has_dist = cur_release.endswith('%{?dist}')
+ if release_has_dist:
+ cur_release = cur_release[:-8]
+ snapshot_release_re = re.compile(r'^([0-9]+)\.([0-9]+)\.')
+ numeric_re = re.compile(r'^([0-9]+)$')
+ match = snapshot_release_re.match(cur_release)
+ if match:
+ firstint = int(match.group(1))
+ relint = int(match.group(2)) + 1
+ new_release = '%d.%d.%s' % (firstint, relint, identifier)
+ else:
+ match = numeric_re.match(cur_release)
+ if not match:
+ raise ValueError("Can't handle Release value: %r" % (cur_release, ))
+ new_release = '%s.0.%s' % (cur_release, identifier)
+ if release_has_dist:
+ new_release += '%{?dist}'
+
+ self._new_release = new_release
+
+ def set_source(self, dirname, archivename):
+ assert not self._saved
+ self._source_dirname = dirname
+ self._source_archivename = archivename
+
+ def add_section_filter(self, name, function):
+ if not (name in self.SECTIONS or name in self.SCRIPTS):
+ raise KeyError("Invalid section name %r" % (name, ))
+ if name not in self._section_filters:
+ self._section_filters[name] = []
+ self._section_filters[name].append(function)
+
+ def _line_is_section(self, line):
+ for section in self.SECTIONS:
+ if line.startswith(section):
+ return True
+ for section in self.SCRIPTS:
+ if line.startswith(section):
+ return True
+ return False
+
+ def _get_range_for_section(self, name):
+ if not (name in self.SECTIONS or name in self.SCRIPTS):
+ raise KeyError("Invalid section name %r" % (name, ))
+ section_start = -1
+ section_end = -1
+ for i, line in enumerate(self._lines):
+ if section_start == -1 and line.startswith(name):
+ section_start = i
+ elif section_start >= 0:
+ if self._line_is_section(line):
+ section_end = i
+ break
+ if section_start >= 0:
+ section_end = len(self._lines) - 1
+ return (section_start, section_end)
+
+ def replace_key_line(self, key, new_value, line):
+ """Takes a line of the form "Release: 42 # foo" and replaces
+the 42 with new_value, preserving the comment # foo."""
+ comment = line.rfind('#')
+ if comment >= 0:
+ return '%s: %s %s\n' % (key, new_value, line[comment:])
+ else:
+ return '%s: %s\n' % (key, new_value)
+
+ def add_patch(self, filename):
+ patches = self.get_patches()
+ if len(patches) == 0:
+ patchnum = 0
+ else:
+ patchnums = map(lambda a: a[0], patches)
+ patchnum = max(patchnums)
+ self._added_patches.append(filename)
+
+ def save(self):
+ self._saved = True
+ tmpname = self._filename + '.tmp'
+ self.save_as(tmpname)
+ os.rename(tmpname, self._filename)
+
+ def save_as(self, new_filename):
+ wrote_buildrequires = False
+ output = open(new_filename, 'w')
+
+ apply_patchmeta_at_line = -1
+ apply_patch_apply_at_line = -1
+ source_re = re.compile(r'^Source([0-9]*):')
+ patch_re = re.compile(r'^Patch([0-9]+):')
+ apply_re = re.compile(r'^%patch')
+ highest_patchnum = -1
+
+ output_lines = self._lines
+
+ for i,line in enumerate(output_lines):
+ match = patch_re.search(line)
+ if match:
+ apply_patchmeta_at_line = i
+ highest_patchnum = int(match.group(1))
+ continue
+ match = source_re.search(line)
+ if match:
+ apply_patchmeta_at_line = i
+ if highest_patchnum == -1:
+ highest_patchnum = 0
+ continue
+ if line.startswith('%setup'):
+ apply_patch_apply_at_line = i + 1
+ continue
+ match = apply_re.search(line)
+ if match:
+ apply_patch_apply_at_line = i + 1
+ continue
+ if apply_patchmeta_at_line == -1:
+ print "Error: Couldn't determine where to add Patch:"
+ sys.exit(1)
+ if apply_patch_apply_at_line == -1:
+ print "Error: Couldn't determine where to add %patch"
+ sys.exit(1)
+
+ for section,filters in self._section_filters.iteritems():
+ (start, end) = self._get_range_for_section(section)
+ for i,line in enumerate(output_lines[start:end]):
+ for f in filters:
+ result = f(line)
+ if result is not None:
+ output_lines[start+i] = line = result
+
+ for i,line in enumerate(output_lines):
+ if i == apply_patchmeta_at_line:
+ for pnum,patch in enumerate(self._added_patches):
+ output.write('Patch%d: %s\n' % (highest_patchnum + pnum + 1, patch))
+ elif i == apply_patch_apply_at_line:
+ for pnum,patch in enumerate(self._added_patches):
+ output.write('%%patch%d -p1\n' % (highest_patchnum + pnum + 1, ))
+
+ if line.startswith('%setup') and self._source_dirname: # This is dumb, need to automate this in RPM
+ output.write('%%setup -q -n %s\n' % self._source_dirname)
+ elif ':' in line:
+ key, value = line.split(':', 1)
+ if key == 'Release' and self._new_release:
+ output.write(self.replace_key_line(key, self._new_release, line))
+ elif (line.startswith('Source0:') or line.startswith('Source:')) and self._source_archivename:
+ output.write(self.replace_key_line(key, self._source_archivename, line))
+ elif key == 'BuildRequires' and not wrote_buildrequires:
+ output.write(line)
+ for req in self._append_buildrequires:
+ output.write('BuildRequires: %s\n' % req)
+ wrote_buildrequires = True
+ else:
+ output.write(line)
+ else:
+ output.write(line)
+
+ output.close()
+
+ def get_patches(self):
+ patchre = re.compile(r'^Patch([0-9]+):')
+ patches = []
+ for line in self._lines:
+ match = patchre.search(line)
+ if not match:
+ continue
+ patches.append((int(match.group(1)), line.split(':', 1)[1].strip()))
+ return patches
+
+ def get_version(self):
+ return self.get_key('Version')
+
+ def get_vcs(self):
+ for line in self._lines:
+ if line.startswith('#VCS:'):
+ return line[5:].strip()
+ raise ValueError("No such key #VCS in file %r" % (self._filename, ))
+
+ def get_key(self, key):
+ key = key + ':'
+ for line in self._lines:
+ if line.startswith(key):
+ return line[len(key):].strip()
+ raise ValueError("No such key %r in file %r" % (key, self._filename))
+
+ def get_key_allvalues(self, key):
+ key = key + ':'
+ result = []
+ for line in self._lines:
+ if line.startswith(key):
+ result.append(line[len(key):].strip())
+ return result
+
+ def __str__(self):
+ return self._filename
+