#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2012 Sofian Brabez # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS # OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY # OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF # # $FreeBSD: branches/pkg_install/Tools/scripts/getpatch 362667 2014-07-23 12:14:32Z sbz $ # # MAINTAINER= sbz@FreeBSD.org import argparse import codecs import re import sys if sys.version_info.major == 3: import urllib.request as urllib2 else: import urllib2 """ FreeBSD getpatch handles Gnats and Bugzilla patch attachments """ class GetPatch(object): def __init__(self, pr, category='ports'): self.pr = pr self.category = category self.patchs = [] self.url = "" self.patch = "" self.output_stdout = False self.default_locale = sys.getdefaultencoding() def fetch(self, *largs, **kwargs): raise NotImplementedError() def write(self, filename, data): if filename.endswith(('.patch', '.txt')): filename = filename[:filename.rindex('.')]+'.diff' f=codecs.open(filename, encoding=self.default_locale, mode='w') f.write(data.decode(self.default_locale)) f.close() self.out("[+] %s created" % filename) def get(self,only_last=False, output_stdout=False): self.output_stdout = output_stdout self.fetch(self.pr, category=self.category) if len(self.patchs) == 0: self.out("[-] No patch found") sys.exit(1) if only_last: self.patchs = [self.patchs.pop()] for patch in self.patchs: url = patch['url'] p = patch['name'] data = urllib2.urlopen(url).read() if self.output_stdout: sys.stdout.write(data.decode(self.default_locale)) else: self.write(p, data) def add_patch(self, url, name): self.patchs.append({'url': url, 'name': name}) def out(self, s): if not self.output_stdout: print(s) class GnatsGetPatch(GetPatch): URL_BASE = 'https://www.freebsd.org/cgi' URL = '%s/query-pr.cgi?pr=' % URL_BASE REGEX = r'Download ([^<]*)' def __init__(self, pr, category): GetPatch.__init__(self, pr, category) def fetch(self, *largs, **kwargs): category = kwargs['category'] target = ("%s/%s" % (category, self.pr), "%s" % self.pr)[category==''] self.out("[+] Fetching patch for pr %s" % target) pattern = re.compile(self.REGEX) u = urllib2.urlopen(self.URL+'%s' % target) data = u.read() if data == None: self.out("[-] No patch found") sys.exit(1) for patchs in re.findall(pattern, str(data)): self.add_patch(patchs[0], patchs[1]) class BzGetPatch(GetPatch): URL_BASE= 'https://bugs.freebsd.org/bugzilla/' URL_SHOW = '%s/show_bug.cgi?id=' % URL_BASE REGEX_URL = r'Details' REGEX = r'
([^ ]+) \(text/plain(?:; charset=[-\w]+)?\)' def __init__(self, pr, category): GetPatch.__init__(self, pr, category) def _get_patch_name(self, url): match = re.search(self.REGEX, urllib2.urlopen(url).read()) if match is None: return None return match.group(1) def _get_patch_urls(self, data): patch_urls = {} for url in re.findall(self.REGEX_URL, data): url = '%s/%s' % (self.URL_BASE, url) file_name = self._get_patch_name(url) if file_name is None: self.out("[-] Could not determine the patch file name in %s. " "Skipping." % url) continue download_url = url[:url.find('&')] patch_urls[download_url] = file_name return patch_urls def fetch(self, *largs, **kwargs): category = kwargs['category'] self.out("[+] Fetching patch for pr %s/%s" % (category, self.pr)) u = urllib2.urlopen(self.URL_SHOW+'%s' % self.pr) data = u.read() if data == None: self.out("[-] No patch found") sys.exit(1) patch_urls = self._get_patch_urls(data) if not patch_urls: self.out("[-] No patch found") sys.exit(1) for url, file_name in patch_urls.iteritems(): self.add_patch(url, file_name) def main(): parser = argparse.ArgumentParser( description='Gets patch attachments from a Bug Tracking System' ) parser.add_argument('pr', metavar='pr', type=str, nargs=1, help='Pr id number') parser.add_argument('--mode', type=str, choices=['gnats','bz'], default='bz', help='available modes to retrieve patch') parser.add_argument('--last', action='store_true', help='only retrieve the latest iteration of a patch') parser.add_argument('--stdout', action='store_true', help='dump patch on stdout') if len(sys.argv) == 1: parser.print_help() sys.exit(1) args = parser.parse_args() category = "" pr = str(args.pr[0]) if '/' in pr and pr is not None: category, pr = pr.split('/') Clazz = globals()['%sGetPatch' % args.mode.capitalize()] gp = Clazz(pr, category) gp.get(only_last=args.last, output_stdout=args.stdout) if __name__ == '__main__': main()