Преглед изворни кода

Partial updates to make custom buildbot code work with latest Buildbot release (0.7.5)

[SVN r38474]
Rene Rivera пре 18 година
родитељ
комит
46338609a7

+ 77 - 0
tools/buildbot/config/master/alpha/master.cfg

@@ -0,0 +1,77 @@
+c = BuildmasterConfig = {}
+
+####### BUILDSLAVES
+
+c['slavePortnum'] = 9091
+
+c['bots'] = []
+c['bots'].append( ('linux-x86-rsi-droid', 'boost1234') )
+
+
+####### CHANGESOURCES
+
+from buildbot.changes.svnpoller import SVNPoller
+from buildbot.changes.pb import PBChangeSource
+
+c['sources'] = []
+c['sources'].append(SVNPoller(
+    'http://svn.boost.org/svn/boost/trunk'
+    ))
+c['sources'].append(PBChangeSource())
+
+
+####### SCHEDULERS
+
+from buildbot.scheduler import AnyBranchScheduler
+
+c['schedulers'] = []
+c['schedulers'].append(AnyBranchScheduler(
+    name = 'testing',
+    branches = ['trunk'],
+    treeStableTimer = 2*60,
+    builderNames = [
+        'Linux x86 Alpha'
+        ]
+    ))
+
+
+####### BUILDERS
+
+from boost.bot.factory import Boost_BuildFactory, action
+
+factoryFull = Boost_BuildFactory(
+    action('svn',root='http://svn.boost.org/svn/boost/')
+    )
+
+c['builders'] = []
+c['builders'].append({
+    'name': 'Linux x86 Alpha',
+    'slavename': 'linux-x86-rsi-droid',
+    'builddir': 'Linux-x86-Alpha',
+    'factory': factoryFull })
+
+
+####### STATUS TARGETS
+
+from buildbot.status import html
+from buildbot.status import words
+
+c['status'] = []
+c['status'].append(html.Waterfall(
+    http_port=9090
+    ))
+c['status'].append(words.IRC(
+    host="irc.freenode.net",
+    nick="buildbot_alpha",
+    channels=["#boost"]
+    ))
+
+
+####### DEBUGGING OPTIONS
+
+
+####### PROJECT IDENTITY
+
+c['projectName'] = "Boost"
+c['projectURL'] = "http://boost.org/"
+c['buildbotURL'] = "http://droid.borg.redshift-software.com:9090/"

+ 9 - 0
tools/buildbot/src/boost/bot/__init__.py

@@ -0,0 +1,9 @@
+
+# Copyright Redshift Software, Inc. 2005-2007
+#
+# Distributed under the Boost Software License, Version 1.0. 
+# (See accompanying file LICENSE_1_0.txt or copy at 
+# http://www.boost.org/LICENSE_1_0.txt)
+
+modified = '$Date: 2007-05-09 10:49:32 -0500 (Wed, 09 May 2007) $'
+revision = '$Revision: 37651 $'

+ 19 - 0
tools/buildbot/src/boost/bot/char_translation_table.py

@@ -0,0 +1,19 @@
+
+# Copyright Redshift Software, Inc. 2005-2007
+#
+# Distributed under the Boost Software License, Version 1.0. 
+# (See accompanying file LICENSE_1_0.txt or copy at 
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import string
+
+def chr_or_question_mark( c ):
+    if chr(c) in string.printable and c < 128 and c not in ( 0x09, 0x0b, 0x0c ):
+        return chr(c)
+    else:
+        return '?'
+
+char_translation_table = string.maketrans( 
+      ''.join( map( chr, range(0, 256) ) )
+    , ''.join( map( chr_or_question_mark, range(0, 256) ) )
+    )

+ 295 - 0
tools/buildbot/src/boost/bot/factory.py

@@ -0,0 +1,295 @@
+
+# Copyright Redshift Software, Inc. 2005-2007
+#
+# Distributed under the Boost Software License, Version 1.0. 
+# (See accompanying file LICENSE_1_0.txt or copy at 
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import boost.bot.step
+import buildbot
+import buildbot.process.base
+import buildbot.process.factory
+import buildbot.process.buildstep
+import buildbot.steps.source
+import os.path
+import re
+import string
+import time
+import twisted.python
+import types
+import urllib
+
+
+def action(_action,*_args,**_kwargs):
+    _args = _args or []
+    _kwargs = _kwargs or {}
+    return (_action,_args,_kwargs)
+
+def defaults(_defaults = {},**_kwargs):
+    _defaults.update({
+        'haltOnFailure': _kwargs.get('haltOnFailure',False),
+        'flunkOnWarnings': _kwargs.get('flunkOnWarnings',False),
+        'flunkOnFailure': _kwargs.get('flunkOnFailure',True),
+        'warnOnWarnings': _kwargs.get('warnOnWarnings',False),
+        'warnOnFailure': _kwargs.get('warnOnFailure',False),
+        'timeout': _kwargs.get('timeout',30*60)
+        })
+    return _defaults
+
+def s(steptype, **kwargs):
+    return (steptype, kwargs)
+
+class Boost_BuildFactory(buildbot.process.factory.BuildFactory):
+    
+    def __init__(self, *actions, **args):
+        buildbot.process.factory.BuildFactory.__init__(self)
+        self.actions = actions or []
+        self.options = args or {}
+        #~ --
+        self.steps = []
+        self.treeStableTimer = 5*60
+        self.buildClass = Boost_Build
+    
+    def newBuild(self,request):
+        b = buildbot.process.factory.BuildFactory.newBuild(self,request)
+        b.setOptions(self.options)
+        steps = []
+        files = []
+        for (_action,_args,_kwargs) in self.actions:
+            action_call = getattr(self,'action_%s' % _action,None)
+            if callable(action_call):
+                for k in _kwargs.keys():
+                    if _kwargs[k] == None: del _kwargs[k]
+                _kwargs.update(self.options)
+                (action_steps,action_files) = action_call(b,*_args,**_kwargs)
+                steps = steps + action_steps
+                files = files + action_files
+        b.important_files = files
+        b.setSteps(steps)
+        return b
+    
+    def action_cvs(self,b,*args,**kwargs):
+        opt = {
+            'cvsmodule'         : kwargs.get('module',"boost"),
+            'global_options'    : ["-z9"],
+            'mode'              : kwargs.get('mode',"copy"),
+            'branch'            : kwargs.get('branch','HEAD'),
+            'cvsroot'           : kwargs.get('root')
+            }
+        if kwargs.has_key('passwd'):
+            opt['login'] = kwargs['passwd'] or ""
+        opt.update(defaults(**kwargs))
+        return (
+            [ s(buildbot.steps.source.CVS,**opt) ],
+            kwargs.get('files',[".*"]) )
+    
+    def action_svn(self,b,*args,**kwargs):
+        opt = {
+            'mode'              : kwargs.get('mode',"update"),
+            'defaultBranch'     : kwargs.get('branch','trunk'),
+            'baseURL'           : kwargs.get('root')
+            }
+        opt.update(defaults(**kwargs))
+        return (
+            [ s(buildbot.steps.source.SVN,**opt) ],
+            kwargs.get('files',[".*"]) )
+
+    def action_tarball(self,b,*args,**kwargs):
+        return (
+            [ s( boost.buildbot.step.Tarball
+                ,description = kwargs.get('description')
+                ,archive = kwargs.get('archive',b.workdir)
+                ,publishdir = kwargs['publishdir']
+                ,branch = kwargs.get('branch','HEAD')
+                ,**defaults(**kwargs)
+                ) ],
+            kwargs.get('files',[]) )
+
+    def action_selfupdate(self,b,*args,**kwargs):
+        return (
+            [ s( boost.buildbot.step.SelfUpdate
+                ,description = kwargs.get('description')
+                ,**defaults(**kwargs)
+                ) ],
+            kwargs.get('files',[]) )
+    
+    def action_bjam_build(self,b,*args,**kwargs):
+        return (
+            [ s( boost.buildbot.step.Boost_Jam_Build
+                ,description = kwargs.get('description')
+                ,workdir = b.workdir
+                ,jam_src = kwargs.get('jam_src','tools/build/jam_src')
+                ,toolset = kwargs.get('toolset',None)
+                ,**defaults(**kwargs)
+                ) ],
+            kwargs.get('files',[]) )
+    
+    def action_bjam(self,b,*args,**kwargs):
+        return (
+            [ s( boost.buildbot.step.Boost_Jam
+                ,description = kwargs.get('description')
+                ,workdir = b.workdir
+                ,bjam = kwargs.get('bjam','tools/build/jam_src/bin/bjam')
+                ,project = kwargs.get('project','.')
+                ,options = kwargs.get('options',[])
+                ,target = kwargs.get('target','all')
+                ,locate = kwargs.get('locate','build')
+                ,env = kwargs.get('env',{})
+                ,logfile = kwargs.get('logfile',False)
+                ,**defaults(**kwargs)
+                ) ],
+            kwargs.get('files',[]) )
+    
+    def action_test_tools_build(self,b,*args,**kwargs):
+        return self.action_bjam( b
+            ,description = kwargs.get('description',['test tools','build'])
+            ,project = 'tools/regression/build'
+            ,options = [
+                '-sBUILD=release',
+                '-sTOOLS=%s' % kwargs['toolset']
+                ] + kwargs.get('options',[])
+            ,target = 'run'
+            ,locate = kwargs.get('locate','build')
+            ,env = kwargs.get('env',{})
+            ,**defaults(**kwargs)
+            )
+
+    def action_btest(self,b,*args,**kwargs):
+        return (
+            [ s( boost.buildbot.step.Boost_Test
+                ,description = kwargs.get('description')
+                ,workdir = b.workdir
+                ,tests = kwargs.get('tests',['.*'])
+                ,bjam = kwargs.get('bjam','tools/build/jam_src/bin/bjam')
+                ,project = kwargs.get('project','status')
+                ,options = kwargs.get('options',[
+                    '--dump-tests',
+                    '--dump-test-targets',
+                    '-sBUILD=%s' % kwargs.get('build','debug'),
+                    '-sTOOLS=%s' % kwargs['toolset']
+                    ] + kwargs.get('options',[]))
+                ,target = 'nothing'
+                ,locate = kwargs.get('locate','build')
+                ,env = kwargs.get('env',{})
+                ,logfile = kwargs.get('logfile','bjam.log')
+                ,**defaults(**kwargs)
+                ) ],
+            kwargs.get('files',[]) )
+
+    def action_btest_all(self,b,*args,**kwargs):
+        return self.action_bjam( b
+            ,description = kwargs.get('description',['btest','all'])
+            ,project = kwargs.get('project','status')
+            ,options = [
+                '--dump-tests',
+                '--dump-test-targets',
+                '-sBUILD=%s' % kwargs.get('build','debug'),
+                '-sTOOLS=%s' % kwargs['toolset']
+                ] + kwargs.get('options',[])
+            ,target = 'test'
+            ,locate = kwargs.get('locate','build')
+            ,env = kwargs.get('env',{})
+            ,logfile = kwargs.get('logfile','bjam.log')
+            ,files = kwargs.get('files',['boost.*','libs.*','status.*'])
+            ,**defaults(**kwargs)
+            )
+
+    def action_process_jam_log(self,b,*args,**kwargs):
+        return (
+            [ s( boost.buildbot.step.Boost_Process_Jam_Log
+                ,description = kwargs.get('description',['process log'])
+                ,workdir = b.workdir
+                ,projcess_jam_log = kwargs.get('projcess_jam_log','tools/regression/build/run/process_jam_log')
+                ,locate = kwargs.get('locate','build')
+                ,logfile = kwargs.get('logfile','bjam.log')
+                ,**defaults(**kwargs)
+                ) ],
+            kwargs.get('files',[]) )
+    
+    def action_collect_results(self,b,*args,**kwargs):
+        return (
+            [ s( boost.buildbot.step.Boost_Collect_Results
+                ,description = kwargs.get('description')
+                ,workdir = b.workdir
+                ,locate = kwargs.get('locate',b.options.get('locate','build'))
+                ,runner = kwargs['runner']
+                ,branch = kwargs['branch']
+                ,source_type = kwargs['source_type']
+                ,**defaults(**kwargs)
+                ) ],
+            kwargs.get('files',[]) )
+    
+    def action_publish_results(self,b,*args,**kwargs):
+        return (
+            [ s( boost.buildbot.step.Boost_Publish_Results
+                ,description = kwargs.get('description')
+                ,workdir = b.workdir
+                ,locate = kwargs.get('locate',b.options.get('locate','build'))
+                ,runner = kwargs['runner']
+                ,branch = kwargs['branch']
+                ,source_type = kwargs['source_type']
+                ,publish_location = kwargs['publish_location']
+                ,proxy = kwargs.get('proxy')
+                ,**defaults(**kwargs)
+                ) ],
+            kwargs.get('files',[]) )
+
+class Boost_Build(buildbot.process.base.Build):
+    
+    def __init__(self,requests):
+        buildbot.process.base.Build.__init__(self,requests)
+        self.important_files = []
+        self.important_re = None
+    
+    def isFileImportant(self, filename):
+        if self.important_re == None:
+            self.important_re = []
+            for file in self.important_files:
+                self.important_re.append(re.compile(file))
+        for file_re in self.important_re:
+            if file_re.search(filename):
+                return 1;
+        return 0
+    
+    def setOptions(self,options = {}):
+        self.options = options or {}
+        self.workdir = self.options.get('workdir','build')
+
+    def setupBuild(self, expectations):
+        #~ Hack the stamp as an allowed arg for steps.
+        if 'stamp' not in buildbot.process.buildstep.BuildStep.parms:
+            buildbot.process.buildstep.BuildStep.parms.append('stamp')
+        
+        return buildbot.process.base.Build.setupBuild(self,expectations)
+    
+    def getNextStep(self):
+        s = buildbot.process.base.Build.getNextStep(self)
+        if s:
+            #~ Add a stamp arg for the steps to use as needed.
+            stamp = self._get_stamp()
+            s.stamp = stamp
+            if hasattr(s,'cmd'):
+                if hasattr(s.cmd,'args'):
+                    s.cmd.args.update( { 'stamp' : stamp } )
+        return s
+    
+    def _get_stamp(self):
+        #~ The default is to use the revision sequence as the "time".
+        #~ If not available, because of a forced build for example, we 
+        #~ use the current time.
+        stamp = time.strftime( '%Y-%m-%dT%H:%M:%S', time.gmtime() )
+        revision, patch = self.getSourceStamp()
+        if not revision:
+            changes = self.allChanges()
+            if changes:
+                last_change_time = max([c.when for c in changes])
+                last_change_revision = max([c.revision for c in changes])
+                #~ Prefer using the revision change if present. If it's not
+                #~ it's likely a CVS like time sequence, so use the time sequence
+                #~ int that case (adjusted with the tree timer).
+                if last_change_revision:
+                    stamp = last_change_revision
+                else:
+                    stamp = time.strftime( '%Y-%m-%dT%H:%M:%S',
+                        time.gmtime(last_change_time + self.treeStableTimer / 2) )
+        return stamp

+ 521 - 0
tools/buildbot/src/boost/bot/remote.py

@@ -0,0 +1,521 @@
+
+# Copyright Redshift Software, Inc. 2005-2007
+#
+# Distributed under the Boost Software License, Version 1.0. 
+# (See accompanying file LICENSE_1_0.txt or copy at 
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import boost.bot.char_translation_table
+import ftplib
+import platform
+import re
+import os
+import os.path
+import shutil
+import string
+import sys
+import tarfile
+import urlparse
+import xml.sax.saxutils
+import zipfile
+
+from buildbot.slave.commands import Command, AbandonChain, ShellCommand
+from buildbot.slave.registry import registerSlaveCommand
+from twisted.internet import reactor, defer
+from twisted.python import failure, log, runtime
+
+
+_ver = '$Revision: 37651 $'[1+len("Revision: "):-2]
+
+class LoggedShellCommand(ShellCommand):
+    
+    def __init__(self, builder, command, workdir, **kwargs):
+        ShellCommand.__init__(self,builder,command,workdir
+            ,environ = kwargs.get('environ',{})
+            ,sendStdout = kwargs.get('sendStdout',True)
+            ,sendStderr = kwargs.get('sendStderr',True)
+            ,sendRC = kwargs.get('sendRC',True)
+            ,timeout = kwargs.get('timeout',None)
+            ,stdin = kwargs.get('stdin',None)
+            ,keepStdout = kwargs.get('keepStdout',False)
+            )
+        self.logfile = None
+        logfile = kwargs.get('logfile')
+        if logfile:
+            logdir = os.path.dirname(logfile)
+            if not os.path.exists(logdir):
+                os.makedirs(logdir)
+            if kwargs.get('appendToLog',False) and os.path.exists(logfile):
+                self.logfile = file(logfile,"a")
+            else:
+                self.logfile = file(logfile,"w")
+    
+    def addStdout(self, data):
+        ShellCommand.addStdout(self,data)
+        if self.logfile: self.logfile.write(data)
+    
+    def addStdout(self, data):
+        ShellCommand.addStdout(self,data)
+        if self.logfile: self.logfile.write(data)
+    
+    def finished(self, sig, rc):
+        if self.logfile: self.logfile.close()
+        ShellCommand.finished(self,sig,rc)
+
+def c(callback, *args, **kwargs):
+    args = args or []
+    kwargs = kwargs or {}
+    return (callback,args,kwargs)
+
+class NoOpCommand(Command):
+
+    def start(self):
+        return self._start("noop",c(self.doNoOp))
+
+    def doNoOp(self):
+        self.stdout("do noop")
+        return 0
+
+    def stdout(self, message):
+        self.sendStatus({'stdout': message+"\n"})
+
+    def interrupt(self):
+        self.interrupted = True
+
+    def _start(self, name, *callbacks):
+        d = defer.Deferred()
+        self.stdout("starting %s operation" % name)
+        self.name = name
+        self.command = None
+        for call,args,kwargs in callbacks:
+            d.addCallbacks(self._do_call,None,[call]+args,kwargs)
+            d.addCallback(self._result_check)
+        d.addCallbacks(self._success,self._failure)
+        reactor.callLater(2,d.callback,0)
+        return d
+
+    def _do_call(self, rc, call, *args, **kwargs):
+        return call(*args,**kwargs)
+    
+    def _result_check(self, rc):
+        if self.interrupted:
+            raise AbandonChain(-1)
+        if rc != 0:
+            raise AbandonChain(rc)
+        return 0
+
+    def _success(self, rc):
+        self.sendStatus({'rc': 0})
+        return None
+
+    def _failure(self, fail):
+        fail.trap(AbandonChain)
+        self.sendStatus({'rc': fail.value.args[0]})
+        return None
+
+registerSlaveCommand("noop", NoOpCommand, _ver)
+
+class SelfUpdateCommand(NoOpCommand):
+
+    def start(self):
+        return self._start("selfupdate",c(self.doUpdateCommandRegistry))
+    
+    def doUpdateCommandRegistry(self):
+        import buildbot.slave.registry
+        import buildbot.slave.commands
+        import boost.buildbot.remote
+
+        self.stdout("updating command registry")
+        reload(buildbot.slave.registry)
+        self.stdout("reloading standard commands")
+        reload(buildbot.slave.commands)
+        self.stdout("reloading boost commands")
+        reload(boost.buildbot.remote)
+        self.stdout("command registry update complete")
+
+        self.stdout("commands:")
+        for name, (factory, version) in buildbot.slave.registry.commandRegistry.items():
+            self.stdout("  %s (%s)" % (name,version))
+
+        return 0
+
+registerSlaveCommand("selfupdate", SelfUpdateCommand, _ver)
+
+class TarballCommand(NoOpCommand):
+
+    def start(self):
+        stamp = self.args.get('stamp','')
+        stamp = stamp.replace(' ','-')
+        stamp = stamp.replace(':','_')
+        archive_stamped = os.path.normpath(os.path.join(self.builder.basedir,
+            "%s-%s-%s" % (self.args['archive'],self.args.get('branch','X'),stamp)))
+        return self._start( "tarball",
+            c( self.doCleanRepository,
+                repository = os.path.normpath(os.path.join(self.builder.basedir, self.args['workdir'])) ),
+            c( self.doArchive,
+                source = os.path.normpath(os.path.join(self.builder.basedir, self.args['workdir'])),
+                archive = archive_stamped ),
+            c( self.doPublish,
+                archive = archive_stamped,
+                publishdir = os.path.normpath(self.args['publishdir']) ) )
+    
+    def doCleanRepository(self,*args,**kwargs):
+        
+        self.stdout("cleaning repository at %s..." % kwargs['repository'])
+        
+        self._clean_r(kwargs['repository'])
+        return 0
+    
+    def doArchive(self,*args,**kwargs):
+        source_path = kwargs['source']
+        archive_path = "%s.tar.bz2" % kwargs['archive']
+        archive_dir = os.path.basename( kwargs['archive'] )
+        
+        self.stdout("creating archive %s for %s" % ( archive_path, source_path ))
+        
+        previous_umask = os.umask(0022)
+        tar = tarfile.open(archive_path, 'w:bz2')
+        #~ Disabling posix allows for longer names and hence deeper directories. 
+        tar.Posix = False
+        tar.add(source_path, archive_dir)
+        tar.close()
+        os.umask(previous_umask)
+        
+        return 0
+    
+    def doPublish(self,*args,**kwargs):
+        archive_path = "%s.tar.bz2" % kwargs['archive']
+        
+        self.stdout("publishing archive %s to %s" % ( archive_path, kwargs['publishdir'] ))
+        
+        previous_umask = os.umask(0022)
+        try:
+            os.makedirs(kwargs['publishdir'],0755)
+        except:
+            pass
+        #~ shutil.move is available on py2.3, consider copy/rename implementation to
+        #~ support py2.2. Or possibly do an external async "mv" command.
+        shutil.move(archive_path,kwargs['publishdir'])
+        self._clean_archives( kwargs['publishdir'], '[^\.]+\.tar\.bz2',
+            ( os.path.basename(archive_path) ) )
+        os.umask(previous_umask)
+        return 0
+    
+    def _clean_r(self,dir):
+        names = os.listdir(dir)
+        names.sort()
+        for name in names:
+            entry = os.path.join(dir,name)
+            if name == 'CVS':
+                self.stdout("[REMOVE] %s" % entry)
+                shutil.rmtree( entry )
+            elif os.path.isdir(entry):
+                self._clean_r(entry)
+    
+    def _clean_archives(self,dir,m,exclude):
+        m_re = re.compile(m)
+        names = os.listdir(dir)
+        names.sort()
+        for name in names:
+            if m_re.search(name) and name not in exclude:
+                entry = os.path.join(dir,name)
+                self.stdout("[REMOVE] %s" % entry)
+                os.remove( entry )
+
+registerSlaveCommand("tarball", TarballCommand, _ver)
+
+class Command_Boost_Jam_Build(NoOpCommand):
+
+    def start(self):
+        return self._start( "boost.bjam.build",
+            c( self.doBJamBuild,
+                jam_src = os.path.normpath(os.path.join(
+                    self.builder.basedir, self.args['workdir'], self.args['jam_src'])),
+                toolset = self.args.get('toolset',None),
+                timeout = self.args.get('timeout',60*5))
+            )
+    
+    def doBJamBuild(self,*args,**kwargs):
+        self.stdout("building bjam at %s..." % kwargs['jam_src'])
+        if runtime.platformType != 'posix':
+            command = [ '.\build.bat' ]
+        else:
+            command = [ 'sh', './build.sh' ]
+        if kwargs['toolset']:
+            command.append(kwargs['toolset'])
+        self.command = ShellCommand(self.builder, command,
+            kwargs['jam_src'], { 'LOCATE_TARGET' : 'bin' },
+            sendRC = False, timeout = kwargs['timeout'] )
+        return self.command.start()
+
+registerSlaveCommand("boost.jam.build", Command_Boost_Jam_Build, _ver)
+
+class Command_Boost_Jam(NoOpCommand):
+
+    def start(self):
+        _env = self.args.get('env',{})
+        _env.update({
+            'ALL_LOCATE_TARGET': os.path.normpath(os.path.join(
+                self.builder.basedir,self.args.get('locate','build'))),
+            'BOOST_BUILD_PATH': "%s:%s:%s" % (
+                os.path.normpath(self.builder.basedir),
+                os.path.normpath(os.path.join(self.builder.basedir,'..')),
+                _env.get('BOOST_BUILD_PATH','.') )
+            })
+        _logfile = False
+        if self.args.get('logfile'):
+            _logfile = os.path.normpath(os.path.join(
+                _env['ALL_LOCATE_TARGET'],self.args['logfile']))
+        return self._start( "boost.bjam",
+            c( self.doBJam
+                ,bjam = os.path.normpath(os.path.join(self.builder.basedir,
+                    self.args['workdir'], self.args['bjam']))
+                ,project = os.path.normpath(os.path.join(self.builder.basedir, 
+                    self.args['workdir'], self.args.get('project','.')))
+                ,options = self.args.get('options',[])
+                ,target = self.args.get('target','all')
+                ,env = _env
+                ,logfile = _logfile
+                ,appendToLog = self.args.get('appendToLog',False)
+                ,timeout = self.args.get('timeout',60*5)
+                )
+            )
+    
+    def doBJam(self,*args,**kwargs):
+        self.stdout("bjam %s..." % kwargs['target'])
+        self.stdout("  env:")
+        env = os.environ.copy()
+        env.update(kwargs['env'])
+        for item in env.items():
+            self.stdout("    %s = '%s'" % item)
+        
+        command = [ kwargs['bjam'] ] + kwargs['options'] + [ kwargs['target'] ]
+        self.command = LoggedShellCommand(self.builder
+            ,command
+            ,kwargs['project']
+            ,environ = kwargs['env']
+            ,sendRC = False
+            ,timeout = kwargs['timeout']
+            ,logfile = kwargs['logfile']
+            ,appendToLog = kwargs['appendToLog']
+            )
+        return self.command.start()
+
+registerSlaveCommand("boost.jam", Command_Boost_Jam, _ver)
+
+class Command_Boost_ProcessJamLog(NoOpCommand):
+    
+    def start(self):
+        return self._start( "boost.process_jam_log"
+            ,c( self.doProcessJamLog
+                ,process_jam_log = os.path.normpath(os.path.join(
+                    self.builder.basedir,self.args.get('locate','build'),
+                    self.args.get('process_jam_log','tools/regression/build/run/process_jam_log')))
+                ,boostroot = os.path.normpath(os.path.join(
+                    self.builder.basedir,self.args.get('boostroot',self.args.get('workdir','.'))))
+                ,logfile = os.path.normpath(os.path.join(
+                    self.builder.basedir,self.args.get('locate','build'),
+                    self.args.get('logfile','bjam.log')))
+                ,locate = os.path.normpath(os.path.join(
+                    self.builder.basedir,self.args.get('locate','build')))
+                ,timeout = self.args.get('timeout',60*15)
+                )
+            )
+    
+    def doProcessJamLog(self,*args,**kwargs):
+        self.stdout("processing the regression test results...")
+        if runtime.platformType != 'posix':
+            command = 'type "%s" | "%s" "%s"' % (kwargs['logfile'], kwargs['process_jam_log'], kwargs['locate'])
+        else:
+            command = 'cat "%s" | "%s" "%s"' % (kwargs['logfile'], kwargs['process_jam_log'], kwargs['locate'])
+        self.command = ShellCommand(self.builder
+            ,command
+            ,kwargs['boostroot']
+            ,timeout = kwargs['timeout']
+            )
+        return self.command.start()
+
+registerSlaveCommand("boost.process_jam_log", Command_Boost_ProcessJamLog, _ver)
+
+class Command_Boost_CollectResults(NoOpCommand):
+
+    def start(self):
+        return self._start( "boost.collect_results",
+            c( self.doCollectResults
+                ,results = os.path.normpath(os.path.join(
+                    self.builder.basedir,self.args.get('locate','build'),
+                    '%s.xml' % self.args['runner']))
+                ,locate = os.path.normpath(os.path.join(
+                    self.builder.basedir,self.args.get('locate','build')))
+                ,runner = self.args['runner']
+                ,timestamp = string.replace(self.args['stamp'],'T',' ')
+                ,tag = '%s-%s' % (self.args['source_type'],self.args['branch'])
+                ,source = self.args['source_type']
+                ,comments = self.args.get('comments',
+                    os.path.normpath(os.path.join(self.builder.basedir,'..','comments.html')))
+                ,platform = self.args.get('platform',platform.system())
+                ,timeout = self.args.get('timeout',60*15)
+                ),
+            c( self.doZipArchive
+                ,source = os.path.normpath(os.path.join(
+                    self.builder.basedir,self.args.get('locate','build'),
+                    '%s.xml' % self.args['runner']))
+                ,archive = os.path.normpath(os.path.join(
+                    self.builder.basedir,self.args.get('locate','build'),
+                    '%s.zip' % self.args['runner']))
+                ,timeout = self.args.get('timeout',60*15)
+                )
+            )
+    
+    def doCollectResults(self,*args,**kwargs):
+        self.stdout("collecting the regression test results...")
+        result = 0
+        previous_umask = os.umask(0022)
+        results_writer = open( kwargs['results'], 'w' )
+        self.stdout( 'Collecting test logs into "%s"...' % kwargs['results'] )
+        
+        results_xml = xml.sax.saxutils.XMLGenerator( results_writer )
+        results_xml.startDocument()
+        results_xml.startElement( 'test-run' ,{
+            'tag': kwargs['tag']
+            ,'platform': kwargs['platform']
+            ,'runner': kwargs['runner']
+            ,'timestamp': kwargs['timestamp']
+            ,'source': kwargs['source']
+            ,'run-type': 'incremental'
+            })
+        
+        self._copy_comments( results_xml, kwargs['comments'] )
+        self._collect_test_logs( [ kwargs['locate'] ], results_writer )
+    
+        results_xml.endElement( "test-run" )
+        results_xml.endDocument()
+        results_writer.close()
+        self.stdout( 'Done writing "%s".' % kwargs['results'] )
+        os.umask(previous_umask)
+        return result
+
+    def _copy_comments(self,results_xml,comment_file):
+        results_xml.startElement( 'comment', {} )
+    
+        if os.path.exists( comment_file ):
+            self.stdout( 'Reading comments file "%s"...' % comment_file )
+            f = open( comment_file, 'r' )
+            try:
+                results_xml.characters( f.read() )
+            finally:
+                f.close()    
+        else:
+            self.stdout( 'Warning: comment file "%s" is not found.' % comment_file )
+     
+        results_xml.endElement( 'comment' )
+
+    def _collect_test_logs(self,input_dirs,test_results_writer):
+        self.stdout( 'Collecting test logs ...' )
+        for input_dir in input_dirs:
+            self.stdout( 'Walking directory "%s" ...' % input_dir )
+            os.path.walk( input_dir, self._process_test_log_files, test_results_writer )
+
+    def _process_test_log_files(self,output_file,dir,names):
+        for file in names:
+            if os.path.basename( file ) == 'test_log.xml':
+                self._process_xml_file( os.path.join( dir, file ), output_file )
+
+    def _process_xml_file(self,input_file,output_file):
+        self.stdout( 'Processing test log "%s"' % input_file )
+        
+        f = open( input_file, 'r' )
+        xml = f.readlines()
+        f.close()
+        
+        for i in range( 0, len(xml)):
+            xml[i] = string.translate( xml[i], boost.buildbot.char_translation_table.char_translation_table )
+    
+        output_file.writelines( xml )
+    
+    def doZipArchive(self,*args,**kwargs):
+        source_path = kwargs['source']
+        archive_path = kwargs['archive']
+        self.stdout("creating archive %s for %s" % ( archive_path, source_path ))
+        result = 0
+        previous_umask = os.umask(0022)
+        try:
+            z = zipfile.ZipFile( archive_path, 'w', zipfile.ZIP_DEFLATED )
+            z.write( source_path, os.path.basename( source_path ) )
+            z.close()
+            self.stdout( 'Done writing "%s".'% archive_path )
+        except Exception, msg:
+            self.stdout( 'Warning: Compressing failed (%s)' % msg )
+            self.stdout( '         Trying to compress using a platform-specific tool...' )
+            try: import zip_cmd
+            except ImportError:
+                script_dir = os.path.dirname( os.path.abspath( sys.argv[0] ) )
+                self.stdout( 'Could not find \'zip_cmd\' module in the script directory (%s).' % script_dir )
+                result = -1
+            else:
+                if os.path.exists( archive_path ):
+                    os.unlink( archive_path )
+                    self.stdout( 'Removing stale "%s".' % archive_path )
+                    
+                zip_cmd.main( source_path, archive_path )
+                self.stdout( 'Done compressing "%s".' % archive_path )
+        os.umask(previous_umask)
+        return result
+
+registerSlaveCommand("boost.collect_results", Command_Boost_CollectResults, _ver)
+
+class Command_Boost_PublishResults(NoOpCommand):
+
+    def start(self):
+        return self._start( "boost.publish_results",
+            c( self.doPublish
+                ,source = os.path.normpath(os.path.join(
+                    self.builder.basedir,self.args.get('locate','build'),
+                    '%s.zip' % self.args['runner']))
+                ,target = '%s/%s-%s' % (self.args['publish_location'],self.args['source_type'],self.args['branch'])
+                ,proxy = self.args.get('proxy')
+                ,timeout = self.args.get('timeout',60*15)
+                )
+            )
+    
+    def doPublish(self,*args,**kwargs):
+        self.stdout("publishing the regression test results...")
+        result = 0
+        
+        (scheme,site,path,query,fragment) = urlparse.urlsplit(kwargs['target'])
+        publish_call = getattr(self,'_publish_%s' % scheme,None)
+        if callable(publish_call):
+            result = publish_call(scheme,site,path,query,fragment,**kwargs)
+        else:
+            self.stdout('unknown publish method "%s"' % scheme)
+            result = -1
+        
+        return result
+
+    def _publish_ftp(self,scheme,site,path,query,fragment,**kwargs):
+        self.stdout( 'Uploading log archive "%s" to %s' % ( kwargs['source'], kwargs['target'] ) )
+        
+        if not kwargs['proxy']:
+            ftp = ftplib.FTP( site )
+            ftp.set_debuglevel( 1 )
+            ftp.login()
+        else:
+            utils.log( '    Connecting through FTP proxy server "%s"' % kwargs['proxy'] )
+            ftp = ftplib.FTP( kwargs['proxy'] )
+            ftp.set_debuglevel( 1 )
+            ftp.set_pasv (0) # turn off PASV mode
+            ftp.login( 'anonymous@%s' % site, 'anonymous@' )
+        
+        ftp.cwd( os.path.dirname(path) )
+        try:
+            ftp.cwd( os.path.basename(path) )
+        except ftplib.error_perm:
+            ftp.mkd( os.path.basename(path) )
+            ftp.cwd( os.path.basename(path) )
+    
+        f = open( kwargs['source'], 'rb' )
+        ftp.storbinary( 'STOR %s' % os.path.basename( kwargs['source'] ), f )
+        ftp.quit()
+        return 0
+
+registerSlaveCommand("boost.publish_results", Command_Boost_PublishResults, _ver)

+ 124 - 0
tools/buildbot/src/boost/bot/step.py

@@ -0,0 +1,124 @@
+
+# Copyright Redshift Software, Inc. 2005-2007
+#
+# Distributed under the Boost Software License, Version 1.0. 
+# (See accompanying file LICENSE_1_0.txt or copy at 
+# http://www.boost.org/LICENSE_1_0.txt)
+
+from buildbot.steps.shell import ShellCommand
+import re
+import string
+import twisted.python
+
+class command_base(ShellCommand):
+    def __init__(self, _name, _description, **kwargs):
+        if kwargs.get('name'): _name = kwargs.get('name')
+        if kwargs.get('description'): _description = kwargs.get('description')
+        
+        ShellCommand.__init__(self,**kwargs)
+        
+        self.name = _name
+        self.description = _description
+        
+        #~ if kwargs.has_key('name'): del kwargs['name']
+        #~ if kwargs.has_key('description'): del kwargs['description']
+        #~ if kwargs.has_key('build'): del kwargs['build']
+        #~ self.cmd = buildbot.process.step.LoggedRemoteCommand(_name,kwargs)
+
+class SelfUpdate(command_base):
+    def __init__(self, **kwargs):
+        command_base.__init__(self, 'selfupdate', ["self","update"], **kwargs)
+
+class Tarball(command_base):
+    def __init__(self, **kwargs):
+        command_base.__init__(self, 'tarball', ["tarball"], **kwargs)
+
+class Boost_Jam_Build(command_base):
+    def __init__(self, **kwargs):
+        command_base.__init__(self, 'boost.jam.build', ["bjam","build"], **kwargs)
+
+class Boost_Jam(command_base):
+    def __init__(self, **kwargs):
+        command_base.__init__(self, 'boost.jam', ["bjam"], **kwargs)
+
+class Boost_Test(command_base):
+    def __init__(self, **kwargs):
+        self.tests = kwargs.get('tests');
+        if kwargs.has_key('tests'): del kwargs['tests']
+        
+        self._kwargs = kwargs
+        
+        command_base.__init__(self, 'boost.jam', ["btest"], **kwargs)
+    
+    def commandComplete(self, cmd):
+        
+        def test_match(t,r):
+            return t or r.match(parts[1])
+        
+        #~ Get the log so we can parse it to find all the targets
+        #~ we can test.
+        out = cmd.log.getText()
+        lines = string.split(out,"\n")
+        test_targets = {}
+        test_re = []
+        for test in self.tests:
+            test_re.append(re.compile(test))
+        for line in lines:
+            parts = re.split('(?:" ")|(?:" ")|(?: ")|(?:" )|(?: [[]")|(?:"[]] )|(?:")',line)
+            if not parts: continue
+            if parts[0] != 'boost-test(TARGET)': continue
+            if not reduce(test_match,test_re,False): continue
+            try:
+                target_i = parts.index(':')+1
+            except:
+                continue
+            twisted.python.log.msg("Boost_Test.commandComplete: TEST = %s -- TARGETS = %s" %
+                (parts[1],string.join(parts[target_i:-1],' ')) )
+            for t in parts[target_i:-1]:
+                test_targets[t] = True
+        test_targets = test_targets.keys()
+        test_targets.sort()
+        
+        #~ Construct new steps for each of the targets we want to test. It would be much
+        #~ better to tell bjam all targets to test in groups instead of one per invocation.
+        #~ But there's no "easy" way to do that. Passing in args can blow the command line
+        #~ limits. Setting an env can also blow that limit, but this may be a higher limit
+        #~ and we could do them piecemeal.
+        kwargs = self._kwargs.copy()
+        kwargs.update({
+            'flunkOnFailure': False,
+            'appendToLog': True
+            })
+        kwargs['options'].remove('--dump-tests')
+        kwargs['options'].remove('--dump-test-targets')
+        count = 0
+        for test_target in test_targets:
+            kwargs['target'] = test_target
+            step = Boost_Jam(**kwargs)
+            count += 1
+            step.name = "%s.%d" % (step.name,count)
+            #~ The steps up to our point have been eaten away already. So we
+            #~ can add to the front so that the additional steps get executed
+            #~ before the rest.
+            self.build.steps.insert(count-1,step)
+            self.build.build_status.addStep(step)
+        #~ Rearrange the steps on the build_status to match the order in the
+        #~ actual build.
+        existing_count = len(self.build.steps)-count
+        new_count = count
+        a = self.build.build_status.steps[0:-new_count-existing_count]
+        c = self.build.build_status.steps[-new_count-existing_count:-new_count]
+        b = self.build.build_status.steps[-new_count:]
+        self.build.build_status.steps = a+b+c
+
+class Boost_Process_Jam_Log(command_base):
+    def __init__(self, **kwargs):
+        command_base.__init__(self, 'boost.process_jam_log', ["process log"], **kwargs)
+
+class Boost_Collect_Results(command_base):
+    def __init__(self, **kwargs):
+        command_base.__init__(self, 'boost.collect_results', ["collect results"], **kwargs)
+
+class Boost_Publish_Results(command_base):
+    def __init__(self, **kwargs):
+        command_base.__init__(self, 'boost.publish_results', ["publish results"], **kwargs)

+ 41 - 1
tools/buildbot/src/boost/patchwork.py

@@ -13,6 +13,8 @@ import zipimport
 import cStringIO
 import zipfile
 import re
+import stat
+import calendar
 
 
 class patchwork_module:
@@ -48,6 +50,9 @@ class patchwork_globals:
 
         #~ The packages to search for, in priority order.
         self.packages_to_search = None
+        
+        #~ The file info for each file in all the zip archives.
+        self.zipinfo = {}
 
 _g_ = patchwork_globals()
 
@@ -80,6 +85,8 @@ def def_modules(dir_and_file,packages):
         zip = module.zip
         files = zip.namelist()
         _g_.importers[path] = zipimport.zipimporter(path)
+        for zipinfo in zip.infolist():
+            _g_.zipinfo[os.path.join(path,zipinfo.filename)] = zipinfo
         for package in packages.keys():
             if os.path.exists(packages[package]):
                 #~ print "--| SRC FILE: %s" % (packages[package]);
@@ -115,6 +122,35 @@ def _file_(filename, mode = 'r', bufsize = -1):
     #~ print "--- patchwork.file(%s,%s,%d)\n" % (filename,mode,bufsize)
     return _open_(filename,mode,bufsize)
 
+def _stat_(filename):
+    #~ print "--- patchwork.stat(%s)\n" % (filename)
+    if filename in _g_.zipinfo:
+        st_size = _g_.zipinfo[filename].file_size
+        st_mtime = calendar.timegm(_g_.zipinfo[filename].date_time)
+        return (
+            #~ st_mode
+            0100444,
+            #~ st_ino
+            0,
+            #~ st_dev
+            0,
+            #~ st_nlink
+            0,
+            #~ st_uid
+            0,
+            #~ st_gid
+            0,
+            #~ st_size
+            st_size,
+            #~ st_atime
+            st_mtime,
+            #~ st_mtime
+            st_mtime,
+            #~ st_ctime
+            st_mtime
+            )
+    return os.stat(filename)
+
 #~ Direct loader of modules, and packages, from other importers.
 class patchwork_loader:
     
@@ -145,6 +181,7 @@ if __builtin__.open == open:
     from boost.patchwork import _open_ as open
 if isinstance(file,type):
     from boost.patchwork import _file_ as file
+from boost.patchwork import _stat_ as stat
 '''
         code = compiler.compile(source,self.path,'exec')
         mod = sys.modules.setdefault(fullname, imp.new_module(fullname))
@@ -153,7 +190,10 @@ if isinstance(file,type):
         if self.path.endswith("__init__.py"):
             mod.__path__ = [ os.path.join(self.importer.archive,os.path.dirname(self.path)) ]
         exec code in mod.__dict__
-        return mod
+        #~ We return the sys.modules entry instead of the mod variable directly
+        #~ because it's possible for the module itself to override the sys.modules
+        #~ entry with a custom one. For example, this is what Twisted 2.5 does.
+        return sys.modules[fullname]
 
 
 #~ Python 2.3 style importer that searches through our package patchwork set

+ 1 - 1
tools/buildbot/src/buildbot.py

@@ -13,7 +13,7 @@ import urllib
 from boost.patchwork import def_modules
 
 #~ The directory this file is in.
-root = os.path.abspath( os.path.dirname(__file__ ) )
+root = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
 
 #~ Download current packages.
 #~ if not os.path.isdir(os.path.join(root,'_packages')):

粤ICP备19079148号