1
2
3 from zope.interface import implements
4 from twisted.python import log
5 from twisted.persisted import styles
6 from twisted.internet import reactor, defer, threads
7 from twisted.protocols import basic
8 from buildbot.process.properties import Properties
9 from buildbot.util import collections
10 from buildbot.util.eventual import eventually
11
12 import weakref
13 import os, shutil, sys, re, urllib, itertools
14 import gc
15 from cPickle import load, dump
16 from cStringIO import StringIO
17 from bz2 import BZ2File
18 from gzip import GzipFile
19
20
21 from buildbot import interfaces, util, sourcestamp
22
23 SUCCESS, WARNINGS, FAILURE, SKIPPED, EXCEPTION, RETRY = range(6)
24 Results = ["success", "warnings", "failure", "skipped", "exception", "retry"]
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45 STDOUT = interfaces.LOG_CHANNEL_STDOUT
46 STDERR = interfaces.LOG_CHANNEL_STDERR
47 HEADER = interfaces.LOG_CHANNEL_HEADER
48 ChunkTypes = ["stdout", "stderr", "header"]
49
51 - def __init__(self, chunk_cb, channels=[]):
52 self.chunk_cb = chunk_cb
53 self.channels = channels
54
56 channel = int(line[0])
57 if not self.channels or (channel in self.channels):
58 self.chunk_cb((channel, line[1:]))
59
61 """What's the plan?
62
63 the LogFile has just one FD, used for both reading and writing.
64 Each time you add an entry, fd.seek to the end and then write.
65
66 Each reader (i.e. Producer) keeps track of their own offset. The reader
67 starts by seeking to the start of the logfile, and reading forwards.
68 Between each hunk of file they yield chunks, so they must remember their
69 offset before yielding and re-seek back to that offset before reading
70 more data. When their read() returns EOF, they're finished with the first
71 phase of the reading (everything that's already been written to disk).
72
73 After EOF, the remaining data is entirely in the current entries list.
74 These entries are all of the same channel, so we can do one "".join and
75 obtain a single chunk to be sent to the listener. But since that involves
76 a yield, and more data might arrive after we give up control, we have to
77 subscribe them before yielding. We can't subscribe them any earlier,
78 otherwise they'd get data out of order.
79
80 We're using a generator in the first place so that the listener can
81 throttle us, which means they're pulling. But the subscription means
82 we're pushing. Really we're a Producer. In the first phase we can be
83 either a PullProducer or a PushProducer. In the second phase we're only a
84 PushProducer.
85
86 So the client gives a LogFileConsumer to File.subscribeConsumer . This
87 Consumer must have registerProducer(), unregisterProducer(), and
88 writeChunk(), and is just like a regular twisted.interfaces.IConsumer,
89 except that writeChunk() takes chunks (tuples of (channel,text)) instead
90 of the normal write() which takes just text. The LogFileConsumer is
91 allowed to call stopProducing, pauseProducing, and resumeProducing on the
92 producer instance it is given. """
93
94 paused = False
95 subscribed = False
96 BUFFERSIZE = 2048
97
99 self.logfile = logfile
100 self.consumer = consumer
101 self.chunkGenerator = self.getChunks()
102 consumer.registerProducer(self, True)
103
105 f = self.logfile.getFile()
106 offset = 0
107 chunks = []
108 p = LogFileScanner(chunks.append)
109 f.seek(offset)
110 data = f.read(self.BUFFERSIZE)
111 offset = f.tell()
112 while data:
113 p.dataReceived(data)
114 while chunks:
115 c = chunks.pop(0)
116 yield c
117 f.seek(offset)
118 data = f.read(self.BUFFERSIZE)
119 offset = f.tell()
120 del f
121
122
123 self.subscribed = True
124 self.logfile.watchers.append(self)
125 d = self.logfile.waitUntilFinished()
126
127
128 if self.logfile.runEntries:
129 channel = self.logfile.runEntries[0][0]
130 text = "".join([c[1] for c in self.logfile.runEntries])
131 yield (channel, text)
132
133
134
135
136
137 d.addCallback(self.logfileFinished)
138
140
141 self.paused = True
142 self.consumer = None
143 self.done()
144
151
154
156
157
158
159
160
161
162
163
164 eventually(self._resumeProducing)
165
167 self.paused = False
168 if not self.chunkGenerator:
169 return
170 try:
171 while not self.paused:
172 chunk = self.chunkGenerator.next()
173 self.consumer.writeChunk(chunk)
174
175
176 except StopIteration:
177
178 self.chunkGenerator = None
179
180
181
182 - def logChunk(self, build, step, logfile, channel, chunk):
183 if self.consumer:
184 self.consumer.writeChunk((channel, chunk))
185
192
194 """Try to remove a file, and if failed, try again in timeout.
195 Increases the timeout by a factor of 4, and only keeps trying for
196 another retries-amount of times.
197
198 """
199 try:
200 os.unlink(filename)
201 except OSError:
202 if retries > 0:
203 reactor.callLater(timeout, _tryremove, filename, timeout * 4,
204 retries - 1)
205 else:
206 log.msg("giving up on removing %s after over %d seconds" %
207 (filename, timeout))
208
210 """A LogFile keeps all of its contents on disk, in a non-pickle format to
211 which new entries can easily be appended. The file on disk has a name
212 like 12-log-compile-output, under the Builder's directory. The actual
213 filename is generated (before the LogFile is created) by
214 L{BuildStatus.generateLogfileName}.
215
216 Old LogFile pickles (which kept their contents in .entries) must be
217 upgraded. The L{BuilderStatus} is responsible for doing this, when it
218 loads the L{BuildStatus} into memory. The Build pickle is not modified,
219 so users who go from 0.6.5 back to 0.6.4 don't have to lose their
220 logs."""
221
222 implements(interfaces.IStatusLog, interfaces.ILogFile)
223
224 finished = False
225 length = 0
226 nonHeaderLength = 0
227 tailLength = 0
228 chunkSize = 10*1000
229 runLength = 0
230
231 logMaxSize = None
232
233 logMaxTailSize = None
234 maxLengthExceeded = False
235 runEntries = []
236 entries = None
237 BUFFERSIZE = 2048
238 filename = None
239 openfile = None
240 compressMethod = "bz2"
241
242 - def __init__(self, parent, name, logfilename):
243 """
244 @type parent: L{BuildStepStatus}
245 @param parent: the Step that this log is a part of
246 @type name: string
247 @param name: the name of this log, typically 'output'
248 @type logfilename: string
249 @param logfilename: the Builder-relative pathname for the saved entries
250 """
251 self.step = parent
252 self.name = name
253 self.filename = logfilename
254 fn = self.getFilename()
255 if os.path.exists(fn):
256
257
258
259
260 log.msg("Warning: Overwriting old serialized Build at %s" % fn)
261 dirname = os.path.dirname(fn)
262 if not os.path.exists(dirname):
263 os.makedirs(dirname)
264 self.openfile = open(fn, "w+")
265 self.runEntries = []
266 self.watchers = []
267 self.finishedWatchers = []
268 self.tailBuffer = []
269
272
273 - def hasContents(self):
274 return os.path.exists(self.getFilename() + '.bz2') or \
275 os.path.exists(self.getFilename() + '.gz') or \
276 os.path.exists(self.getFilename())
277
280
283
293
295 if self.openfile:
296
297
298 return self.openfile
299
300
301 try:
302 return BZ2File(self.getFilename() + ".bz2", "r")
303 except IOError:
304 pass
305 try:
306 return GzipFile(self.getFilename() + ".gz", "r")
307 except IOError:
308 pass
309 return open(self.getFilename(), "r")
310
312
313 return "".join(self.getChunks([STDOUT, STDERR], onlyText=True))
314
316 return "".join(self.getChunks(onlyText=True))
317
318 - def getChunks(self, channels=[], onlyText=False):
319
320
321
322
323
324
325
326
327
328
329
330
331 f = self.getFile()
332 if not self.finished:
333 offset = 0
334 f.seek(0, 2)
335 remaining = f.tell()
336 else:
337 offset = 0
338 remaining = None
339
340 leftover = None
341 if self.runEntries and (not channels or
342 (self.runEntries[0][0] in channels)):
343 leftover = (self.runEntries[0][0],
344 "".join([c[1] for c in self.runEntries]))
345
346
347
348 return self._generateChunks(f, offset, remaining, leftover,
349 channels, onlyText)
350
351 - def _generateChunks(self, f, offset, remaining, leftover,
352 channels, onlyText):
353 chunks = []
354 p = LogFileScanner(chunks.append, channels)
355 f.seek(offset)
356 if remaining is not None:
357 data = f.read(min(remaining, self.BUFFERSIZE))
358 remaining -= len(data)
359 else:
360 data = f.read(self.BUFFERSIZE)
361
362 offset = f.tell()
363 while data:
364 p.dataReceived(data)
365 while chunks:
366 channel, text = chunks.pop(0)
367 if onlyText:
368 yield text
369 else:
370 yield (channel, text)
371 f.seek(offset)
372 if remaining is not None:
373 data = f.read(min(remaining, self.BUFFERSIZE))
374 remaining -= len(data)
375 else:
376 data = f.read(self.BUFFERSIZE)
377 offset = f.tell()
378 del f
379
380 if leftover:
381 if onlyText:
382 yield leftover[1]
383 else:
384 yield leftover
385
387 """Return an iterator that produces newline-terminated lines,
388 excluding header chunks."""
389
390
391
392 alltext = "".join(self.getChunks([channel], onlyText=True))
393 io = StringIO(alltext)
394 return io.readlines()
395
405
409
413
414
415
435
436 - def addEntry(self, channel, text):
437 assert not self.finished
438
439 if isinstance(text, unicode):
440 text = text.encode('utf-8')
441 if channel != HEADER:
442
443 if self.logMaxSize and self.nonHeaderLength > self.logMaxSize:
444
445 if not self.maxLengthExceeded:
446 msg = "\nOutput exceeded %i bytes, remaining output has been truncated\n" % self.logMaxSize
447 self.addEntry(HEADER, msg)
448 self.merge()
449 self.maxLengthExceeded = True
450
451 if self.logMaxTailSize:
452
453 self.tailBuffer.append((channel, text))
454 self.tailLength += len(text)
455 while self.tailLength > self.logMaxTailSize:
456
457 c,t = self.tailBuffer.pop(0)
458 n = len(t)
459 self.tailLength -= n
460 assert self.tailLength >= 0
461 return
462
463 self.nonHeaderLength += len(text)
464
465
466
467 if self.runEntries and channel != self.runEntries[0][0]:
468 self.merge()
469 self.runEntries.append((channel, text))
470 self.runLength += len(text)
471 if self.runLength >= self.chunkSize:
472 self.merge()
473
474 for w in self.watchers:
475 w.logChunk(self.step.build, self.step, self, channel, text)
476 self.length += len(text)
477
484
511
512
514
515 if self.compressMethod == "bz2":
516 compressed = self.getFilename() + ".bz2.tmp"
517 elif self.compressMethod == "gz":
518 compressed = self.getFilename() + ".gz.tmp"
519 d = threads.deferToThread(self._compressLog, compressed)
520 d.addCallback(self._renameCompressedLog, compressed)
521 d.addErrback(self._cleanupFailedCompress, compressed)
522 return d
523
525 infile = self.getFile()
526 if self.compressMethod == "bz2":
527 cf = BZ2File(compressed, 'w')
528 elif self.compressMethod == "gz":
529 cf = GzipFile(compressed, 'w')
530 bufsize = 1024*1024
531 while True:
532 buf = infile.read(bufsize)
533 cf.write(buf)
534 if len(buf) < bufsize:
535 break
536 cf.close()
552 log.msg("failed to compress %s" % self.getFilename())
553 if os.path.exists(compressed):
554 _tryremove(compressed, 1, 5)
555 failure.trap()
556
557
559 d = self.__dict__.copy()
560 del d['step']
561 del d['watchers']
562 del d['finishedWatchers']
563 d['entries'] = []
564 if d.has_key('finished'):
565 del d['finished']
566 if d.has_key('openfile'):
567 del d['openfile']
568 return d
569
576
578 """Save our .entries to a new-style offline log file (if necessary),
579 and modify our in-memory representation to use it. The original
580 pickled LogFile (inside the pickled Build) won't be modified."""
581 self.filename = logfilename
582 if not os.path.exists(self.getFilename()):
583 self.openfile = open(self.getFilename(), "w")
584 self.finished = False
585 for channel,text in self.entries:
586 self.addEntry(channel, text)
587 self.finish()
588 del self.entries
589
591 implements(interfaces.IStatusLog)
592
593 filename = None
594
595 - def __init__(self, parent, name, logfilename, html):
600
605
609 return defer.succeed(self)
610
611 - def hasContents(self):
619
624
627
629 d = self.__dict__.copy()
630 del d['step']
631 return d
632
635
636
654
676
677
679 implements(interfaces.IBuildSetStatus)
680
685
688
689
690
694
702 (external_idstring, reason, ssid, complete, results) = self._get_info()
703 return external_idstring
704
711
715
720
722 (external_idstring, reason, ssid, complete, results) = self._get_info()
723 return complete
724
726 return self.status._buildset_waitUntilSuccess(self.id)
728 return self.status._buildset_waitUntilFinished(self.id)
729
781
782
784 """
785 I represent a collection of output status for a
786 L{buildbot.process.step.BuildStep}.
787
788 Statistics contain any information gleaned from a step that is
789 not in the form of a logfile. As an example, steps that run
790 tests might gather statistics about the number of passed, failed,
791 or skipped tests.
792
793 @type progress: L{buildbot.status.progress.StepProgress}
794 @cvar progress: tracks ETA for the step
795 @type text: list of strings
796 @cvar text: list of short texts that describe the command and its status
797 @type text2: list of strings
798 @cvar text2: list of short texts added to the overall build description
799 @type logs: dict of string -> L{buildbot.status.builder.LogFile}
800 @ivar logs: logs of steps
801 @type statistics: dict
802 @ivar statistics: results from running this step
803 """
804
805
806 implements(interfaces.IBuildStepStatus, interfaces.IStatusEvent)
807 persistenceVersion = 2
808
809 started = None
810 finished = None
811 progress = None
812 text = []
813 results = (None, [])
814 text2 = []
815 watchers = []
816 updates = {}
817 finishedWatchers = []
818 statistics = {}
819
829
831 """Returns a short string with the name of this step. This string
832 may have spaces in it."""
833 return self.name
834
837
840
842 """Returns a list of tuples (name, current, target)."""
843 if not self.progress:
844 return []
845 ret = []
846 metrics = self.progress.progress.keys()
847 metrics.sort()
848 for m in metrics:
849 t = (m, self.progress.progress[m], self.progress.expectations[m])
850 ret.append(t)
851 return ret
852
855
857 return self.urls.copy()
858
860 return (self.started is not None)
861
864
872
873
874
875
884
885
886
887
889 """Returns a list of strings which describe the step. These are
890 intended to be displayed in a narrow column. If more space is
891 available, the caller should join them together with spaces before
892 presenting them to the user."""
893 return self.text
894
896 """Return a tuple describing the results of the step.
897 'result' is one of the constants in L{buildbot.status.builder}:
898 SUCCESS, WARNINGS, FAILURE, or SKIPPED.
899 'strings' is an optional list of strings that the step wants to
900 append to the overall build's results. These strings are usually
901 more terse than the ones returned by getText(): in particular,
902 successful Steps do not usually contribute any text to the
903 overall build.
904
905 @rtype: tuple of int, list of strings
906 @returns: (result, strings)
907 """
908 return (self.results, self.text2)
909
911 """Return true if this step has a value for the given statistic.
912 """
913 return self.statistics.has_key(name)
914
916 """Return the given statistic, if present
917 """
918 return self.statistics.get(name, default)
919
920
921
922 - def subscribe(self, receiver, updateInterval=10):
927
938
946
947
948
949
952
954 log.msg("BuildStepStatus.setColor is no longer supported -- ignoring color %s" % (color,))
955
958
963
981
994
998
1000 self.urls[name] = url
1001
1002 - def setText(self, text):
1003 self.text = text
1004 for w in self.watchers:
1005 w.stepTextChanged(self.build, self, text)
1006 - def setText2(self, text):
1007 self.text2 = text
1008 for w in self.watchers:
1009 w.stepText2Changed(self.build, self, text)
1010
1012 """Set the given statistic. Usually called by subclasses.
1013 """
1014 self.statistics[name] = value
1015
1044
1048
1049
1050
1052 d = styles.Versioned.__getstate__(self)
1053 del d['build']
1054 if d.has_key('progress'):
1055 del d['progress']
1056 del d['watchers']
1057 del d['finishedWatchers']
1058 del d['updates']
1059 return d
1060
1062 styles.Versioned.__setstate__(self, d)
1063
1064
1065
1066 for loog in self.logs:
1067 loog.step = self
1068
1070 if not hasattr(self, "urls"):
1071 self.urls = {}
1072
1074 if not hasattr(self, "statistics"):
1075 self.statistics = {}
1076
1078 result = {}
1079
1080 result['name'] = self.getName()
1081
1082
1083 result['text'] = self.getText()
1084 result['results'] = self.getResults()
1085 result['isStarted'] = self.isStarted()
1086 result['isFinished'] = self.isFinished()
1087 result['statistics'] = self.statistics
1088 result['times'] = self.getTimes()
1089 result['expectations'] = self.getExpectations()
1090 result['eta'] = self.getETA()
1091 result['urls'] = self.getURLs()
1092
1093
1094
1095 return result
1096
1097
1099 implements(interfaces.IBuildStatus, interfaces.IStatusEvent)
1100 persistenceVersion = 3
1101
1102 source = None
1103 reason = None
1104 changes = []
1105 blamelist = []
1106 progress = None
1107 started = None
1108 finished = None
1109 currentStep = None
1110 text = []
1111 results = None
1112 slavename = "???"
1113
1114
1115
1116
1117 watchers = []
1118 updates = {}
1119 finishedWatchers = []
1120 testResults = {}
1121
1136
1139
1140
1141
1143 """
1144 @rtype: L{BuilderStatus}
1145 """
1146 return self.builder
1147
1150
1153
1156
1161
1166
1169
1172
1175
1179
1181 """Return a list of IBuildStepStatus objects. For invariant builds
1182 (those which always use the same set of Steps), this should be the
1183 complete list, however some of the steps may not have started yet
1184 (step.getTimes()[0] will be None). For variant builds, this may not
1185 be complete (asking again later may give you more of them)."""
1186 return self.steps
1187
1190
1191 _sentinel = []
1193 """Summarize the named statistic over all steps in which it
1194 exists, using combination_fn and initial_value to combine multiple
1195 results into a single result. This translates to a call to Python's
1196 X{reduce}::
1197 return reduce(summary_fn, step_stats_list, initial_value)
1198 """
1199 step_stats_list = [
1200 st.getStatistic(name)
1201 for st in self.steps
1202 if st.hasStatistic(name) ]
1203 if initial_value is self._sentinel:
1204 return reduce(summary_fn, step_stats_list)
1205 else:
1206 return reduce(summary_fn, step_stats_list, initial_value)
1207
1210
1218
1219
1220
1221
1231
1234
1235
1236
1237
1238 - def getText(self):
1239 text = []
1240 text.extend(self.text)
1241 for s in self.steps:
1242 text.extend(s.text2)
1243 return text
1244
1247
1250
1253
1263
1264
1265
1266 - def subscribe(self, receiver, updateInterval=None):
1267
1268
1269 self.watchers.append(receiver)
1270 if updateInterval is not None:
1271 self.sendETAUpdate(receiver, updateInterval)
1272
1284
1292
1293
1294
1296 """The Build is setting up, and has added a new BuildStep to its
1297 list. Create a BuildStepStatus object to which it can send status
1298 updates."""
1299
1300 s = BuildStepStatus(self)
1301 s.setName(name)
1302 self.steps.append(s)
1303 return s
1304
1307
1310
1314
1321
1323 """The Build has been set up and is about to be started. It can now
1324 be safely queried, so it is time to announce the new build."""
1325
1326 self.started = util.now()
1327
1328
1329 self.builder.buildStarted(self)
1330
1333
1334 - def setText(self, text):
1335 assert isinstance(text, (list, tuple))
1336 self.text = text
1339
1353
1354
1355
1369
1374
1375
1376
1378
1379 self.steps = []
1380
1381
1382
1384 """Return a filename (relative to the Builder's base directory) where
1385 the logfile's contents can be stored uniquely.
1386
1387 The base filename is made by combining our build number, the Step's
1388 name, and the log's name, then removing unsuitable characters. The
1389 filename is then made unique by appending _0, _1, etc, until it does
1390 not collide with any other logfile.
1391
1392 These files are kept in the Builder's basedir (rather than a
1393 per-Build subdirectory) because that makes cleanup easier: cron and
1394 find will help get rid of the old logs, but the empty directories are
1395 more of a hassle to remove."""
1396
1397 starting_filename = "%d-log-%s-%s" % (self.number, stepname, logname)
1398 starting_filename = re.sub(r'[^\w\.\-]', '_', starting_filename)
1399
1400 unique_counter = 0
1401 filename = starting_filename
1402 while filename in [l.filename
1403 for step in self.steps
1404 for l in step.getLogs()
1405 if l.filename]:
1406 filename = "%s_%d" % (starting_filename, unique_counter)
1407 unique_counter += 1
1408 return filename
1409
1411 d = styles.Versioned.__getstate__(self)
1412
1413
1414 if not self.finished:
1415 d['finished'] = True
1416
1417
1418
1419
1420 for k in 'builder', 'watchers', 'updates', 'finishedWatchers':
1421 if k in d: del d[k]
1422 return d
1423
1432
1445
1448
1454
1469
1475
1496
1523
1524
1525
1527 """I handle status information for a single process.base.Builder object.
1528 That object sends status changes to me (frequently as Events), and I
1529 provide them on demand to the various status recipients, like the HTML
1530 waterfall display and the live status clients. It also sends build
1531 summaries to me, which I log and provide to status clients who aren't
1532 interested in seeing details of the individual build steps.
1533
1534 I am responsible for maintaining the list of historic Events and Builds,
1535 pruning old ones, and loading them from / saving them to disk.
1536
1537 I live in the buildbot.process.base.Builder object, in the
1538 .builder_status attribute.
1539
1540 @type category: string
1541 @ivar category: user-defined category this builder belongs to; can be
1542 used to filter on in status clients
1543 """
1544
1545 implements(interfaces.IBuilderStatus, interfaces.IEventSource)
1546 persistenceVersion = 1
1547
1548
1549
1550
1551 buildCacheSize = 15
1552 eventHorizon = 50
1553
1554
1555 logHorizon = 40
1556 buildHorizon = 100
1557
1558 category = None
1559 currentBigState = "offline"
1560 basedir = None
1561
1562 - def __init__(self, buildername, category=None):
1563 self.name = buildername
1564 self.category = category
1565
1566 self.slavenames = []
1567 self.events = []
1568
1569
1570 self.lastBuildStatus = None
1571
1572
1573 self.currentBuilds = []
1574 self.nextBuild = None
1575 self.watchers = []
1576 self.buildCache = weakref.WeakValueDictionary()
1577 self.buildCache_LRU = []
1578 self.logCompressionLimit = False
1579 self.logCompressionMethod = "bz2"
1580 self.logMaxSize = None
1581 self.logMaxTailSize = None
1582
1583
1584
1586
1587
1588
1589
1590 d = styles.Versioned.__getstate__(self)
1591 d['watchers'] = []
1592 del d['buildCache']
1593 del d['buildCache_LRU']
1594 for b in self.currentBuilds:
1595 b.saveYourself()
1596
1597 del d['currentBuilds']
1598 d.pop('pendingBuilds', None)
1599 del d['currentBigState']
1600 del d['basedir']
1601 del d['status']
1602 del d['nextBuildNumber']
1603 return d
1604
1606
1607
1608 styles.Versioned.__setstate__(self, d)
1609 self.buildCache = weakref.WeakValueDictionary()
1610 self.buildCache_LRU = []
1611 self.currentBuilds = []
1612 self.watchers = []
1613 self.slavenames = []
1614
1615
1616
1622
1624 if hasattr(self, 'slavename'):
1625 self.slavenames = [self.slavename]
1626 del self.slavename
1627 if hasattr(self, 'nextBuildNumber'):
1628 del self.nextBuildNumber
1629
1631 """Scan our directory of saved BuildStatus instances to determine
1632 what our self.nextBuildNumber should be. Set it one larger than the
1633 highest-numbered build we discover. This is called by the top-level
1634 Status object shortly after we are created or loaded from disk.
1635 """
1636 existing_builds = [int(f)
1637 for f in os.listdir(self.basedir)
1638 if re.match("^\d+$", f)]
1639 if existing_builds:
1640 self.nextBuildNumber = max(existing_builds) + 1
1641 else:
1642 self.nextBuildNumber = 0
1643
1645 self.logCompressionLimit = lowerLimit
1646
1648 assert method in ("bz2", "gz")
1649 self.logCompressionMethod = method
1650
1653
1656
1658 for b in self.currentBuilds:
1659 if not b.isFinished:
1660
1661
1662 b.saveYourself()
1663 filename = os.path.join(self.basedir, "builder")
1664 tmpfilename = filename + ".tmp"
1665 try:
1666 dump(self, open(tmpfilename, "wb"), -1)
1667 if sys.platform == 'win32':
1668
1669 if os.path.exists(filename):
1670 os.unlink(filename)
1671 os.rename(tmpfilename, filename)
1672 except:
1673 log.msg("unable to save builder %s" % self.name)
1674 log.err()
1675
1676
1677
1678
1681
1688
1716
1718 gc.collect()
1719
1720
1721 self.events = self.events[-self.eventHorizon:]
1722
1723
1724 if self.buildHorizon:
1725 earliest_build = self.nextBuildNumber - self.buildHorizon
1726 else:
1727 earliest_build = 0
1728
1729 if self.logHorizon:
1730 earliest_log = self.nextBuildNumber - self.logHorizon
1731 else:
1732 earliest_log = 0
1733
1734 if earliest_log < earliest_build:
1735 earliest_log = earliest_build
1736
1737 if earliest_build == 0:
1738 return
1739
1740
1741 build_re = re.compile(r"^([0-9]+)$")
1742 build_log_re = re.compile(r"^([0-9]+)-.*$")
1743
1744 if not os.path.exists(self.basedir):
1745 return
1746
1747 for filename in os.listdir(self.basedir):
1748 num = None
1749 mo = build_re.match(filename)
1750 is_logfile = False
1751 if mo:
1752 num = int(mo.group(1))
1753 else:
1754 mo = build_log_re.match(filename)
1755 if mo:
1756 num = int(mo.group(1))
1757 is_logfile = True
1758
1759 if num is None: continue
1760 if num in self.buildCache: continue
1761
1762 if (is_logfile and num < earliest_log) or num < earliest_build:
1763 pathname = os.path.join(self.basedir, filename)
1764 log.msg("pruning '%s'" % pathname)
1765 try: os.unlink(pathname)
1766 except OSError: pass
1767
1768
1771
1774
1777
1782
1784 return self.currentBuilds
1785
1791
1794
1805
1807 try:
1808 return self.events[number]
1809 except IndexError:
1810 return None
1811
1812 - def generateFinishedBuilds(self, branches=[],
1813 num_builds=None,
1814 max_buildnum=None,
1815 finished_before=None,
1816 max_search=200):
1817 got = 0
1818 for Nb in itertools.count(1):
1819 if Nb > self.nextBuildNumber:
1820 break
1821 if Nb > max_search:
1822 break
1823 build = self.getBuild(-Nb)
1824 if build is None:
1825 continue
1826 if max_buildnum is not None:
1827 if build.getNumber() > max_buildnum:
1828 continue
1829 if not build.isFinished():
1830 continue
1831 if finished_before is not None:
1832 start, end = build.getTimes()
1833 if end >= finished_before:
1834 continue
1835 if branches:
1836 if build.getSourceStamp().branch not in branches:
1837 continue
1838 got += 1
1839 yield build
1840 if num_builds is not None:
1841 if got >= num_builds:
1842 return
1843
1844 - def eventGenerator(self, branches=[], categories=[], committers=[], minTime=0):
1845 """This function creates a generator which will provide all of this
1846 Builder's status events, starting with the most recent and
1847 progressing backwards in time. """
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858 eventIndex = -1
1859 e = self.getEvent(eventIndex)
1860 for Nb in range(1, self.nextBuildNumber+1):
1861 b = self.getBuild(-Nb)
1862 if not b:
1863
1864
1865
1866 if Nb == 1:
1867 continue
1868 break
1869 if b.getTimes()[0] < minTime:
1870 break
1871 if branches and not b.getSourceStamp().branch in branches:
1872 continue
1873 if categories and not b.getBuilder().getCategory() in categories:
1874 continue
1875 if committers and not [True for c in b.getChanges() if c.who in committers]:
1876 continue
1877 steps = b.getSteps()
1878 for Ns in range(1, len(steps)+1):
1879 if steps[-Ns].started:
1880 step_start = steps[-Ns].getTimes()[0]
1881 while e is not None and e.getTimes()[0] > step_start:
1882 yield e
1883 eventIndex -= 1
1884 e = self.getEvent(eventIndex)
1885 yield steps[-Ns]
1886 yield b
1887 while e is not None:
1888 yield e
1889 eventIndex -= 1
1890 e = self.getEvent(eventIndex)
1891 if e and e.getTimes()[0] < minTime:
1892 break
1893
1895
1896
1897
1898
1899 self.watchers.append(receiver)
1900 self.publishState(receiver)
1901
1902 self.status._builder_subscribe(self.name, receiver)
1903
1907
1908
1909
1911 self.slavenames = names
1912
1921
1931
1937
1951
1953 """The Builder has decided to start a build, but the Build object is
1954 not yet ready to report status (it has not finished creating the
1955 Steps). Create a BuildStatus object that it can use."""
1956 number = self.nextBuildNumber
1957 self.nextBuildNumber += 1
1958
1959
1960
1961
1962 s = BuildStatus(self, number)
1963 s.waitUntilFinished().addCallback(self._buildFinished)
1964 return s
1965
1966
1968 """Now the BuildStatus object is ready to go (it knows all of its
1969 Steps, its ETA, etc), so it is safe to notify our watchers."""
1970
1971 assert s.builder is self
1972 assert s.number == self.nextBuildNumber - 1
1973 assert s not in self.currentBuilds
1974 self.currentBuilds.append(s)
1975 self.touchBuildCache(s)
1976
1977
1978
1979
1980 for w in self.watchers:
1981 try:
1982 receiver = w.buildStarted(self.getName(), s)
1983 if receiver:
1984 if type(receiver) == type(()):
1985 s.subscribe(receiver[0], receiver[1])
1986 else:
1987 s.subscribe(receiver)
1988 d = s.waitUntilFinished()
1989 d.addCallback(lambda s: s.unsubscribe(receiver))
1990 except:
1991 log.msg("Exception caught notifying %r of buildStarted event" % w)
1992 log.err()
1993
2009
2010
2011
2012
2013
2014
2015
2016
2017
2018
2019
2020
2021
2022
2030 state = self.currentBigState
2031 if state == "offline":
2032 client.currentlyOffline()
2033 elif state == "idle":
2034 client.currentlyIdle()
2035 elif state == "building":
2036 client.currentlyBuilding()
2037 else:
2038 log.msg("Hey, self.currentBigState is weird:", state)
2039
2040
2041
2042
2044
2045 first = self.events[0].number
2046 if first + len(self.events)-1 != self.events[-1].number:
2047 log.msg(self,
2048 "lost an event somewhere: [0] is %d, [%d] is %d" % \
2049 (self.events[0].number,
2050 len(self.events) - 1,
2051 self.events[-1].number))
2052 for e in self.events:
2053 log.msg("e[%d]: " % e.number, e)
2054 return None
2055 offset = num - first
2056 log.msg(self, "offset", offset)
2057 try:
2058 return self.events[offset]
2059 except IndexError:
2060 return None
2061
2062
2064 if hasattr(self, "allEvents"):
2065
2066
2067
2068
2069 return
2070 self.allEvents = self.loadFile("events", [])
2071 if self.allEvents:
2072 self.nextEventNumber = self.allEvents[-1].number + 1
2073 else:
2074 self.nextEventNumber = 0
2076 self.saveFile("events", self.allEvents)
2077
2078
2079
2089
2091 result = {}
2092
2093
2094 result['basedir'] = os.path.basename(self.basedir)
2095 result['category'] = self.category
2096 result['slaves'] = self.slavenames
2097
2098
2099
2100
2101
2102
2103 current_builds = [b.getNumber() for b in self.currentBuilds]
2104 cached_builds = list(set(self.buildCache.keys() + current_builds))
2105 cached_builds.sort()
2106 result['cachedBuilds'] = cached_builds
2107 result['currentBuilds'] = current_builds
2108 result['state'] = self.getState()[0]
2109
2110 result['pendingBuilds'] = [
2111 b.getSourceStamp().asDict() for b in self.getPendingBuilds()
2112 ]
2113 return result
2114
2115
2117 implements(interfaces.ISlaveStatus)
2118
2119 admin = None
2120 host = None
2121 access_uri = None
2122 version = None
2123 connected = False
2124 graceful_shutdown = False
2125
2127 self.name = name
2128 self._lastMessageReceived = 0
2129 self.runningBuilds = []
2130 self.graceful_callbacks = []
2131
2145 return self._lastMessageReceived
2147 return self.runningBuilds
2148
2160 self._lastMessageReceived = when
2161
2163 self.runningBuilds.append(build)
2166
2171 """Set the graceful shutdown flag, and notify all the watchers"""
2172 self.graceful_shutdown = graceful
2173 for cb in self.graceful_callbacks:
2174 eventually(cb, graceful)
2176 """Add watcher to the list of watchers to be notified when the
2177 graceful shutdown flag is changed."""
2178 if not watcher in self.graceful_callbacks:
2179 self.graceful_callbacks.append(watcher)
2181 """Remove watcher from the list of watchers to be notified when the
2182 graceful shutdown flag is changed."""
2183 if watcher in self.graceful_callbacks:
2184 self.graceful_callbacks.remove(watcher)
2185
2199
2200
2202 """
2203 I represent the status of the buildmaster.
2204 """
2205 implements(interfaces.IStatus)
2206
2207 - def __init__(self, botmaster, basedir):
2208 """
2209 @type botmaster: L{buildbot.master.BotMaster}
2210 @param botmaster: the Status object uses C{.botmaster} to get at
2211 both the L{buildbot.master.BuildMaster} (for
2212 various buildbot-wide parameters) and the
2213 actual Builders (to get at their L{BuilderStatus}
2214 objects). It is not allowed to change or influence
2215 anything through this reference.
2216 @type basedir: string
2217 @param basedir: this provides a base directory in which saved status
2218 information (changes.pck, saved Build status
2219 pickles) can be stored
2220 """
2221 self.botmaster = botmaster
2222 self.db = None
2223 self.basedir = basedir
2224 self.watchers = []
2225 assert os.path.isdir(basedir)
2226
2227 self.logCompressionLimit = 4*1024
2228 self.logCompressionMethod = "bz2"
2229
2230 self.logMaxSize = None
2231 self.logMaxTailSize = None
2232
2233 self._builder_observers = collections.KeyedSets()
2234 self._buildreq_observers = collections.KeyedSets()
2235 self._buildset_success_waiters = collections.KeyedSets()
2236 self._buildset_finished_waiters = collections.KeyedSets()
2237
2239 self.db = db
2240 self.db.subscribe_to("add-build", self._db_builds_changed)
2241 self.db.subscribe_to("add-buildset", self._db_buildset_added)
2242 self.db.subscribe_to("modify-buildset", self._db_buildsets_changed)
2243 self.db.subscribe_to("add-buildrequest", self._db_buildrequest_added)
2244 self.db.subscribe_to("cancel-buildrequest", self._db_buildrequest_cancelled)
2245
2246
2247
2254
2256 prefix = self.getBuildbotURL()
2257 if not prefix:
2258 return None
2259 if interfaces.IStatus.providedBy(thing):
2260 return prefix
2261 if interfaces.ISchedulerStatus.providedBy(thing):
2262 pass
2263 if interfaces.IBuilderStatus.providedBy(thing):
2264 builder = thing
2265 return prefix + "builders/%s" % (
2266 urllib.quote(builder.getName(), safe=''),
2267 )
2268 if interfaces.IBuildStatus.providedBy(thing):
2269 build = thing
2270 builder = build.getBuilder()
2271 return prefix + "builders/%s/builds/%d" % (
2272 urllib.quote(builder.getName(), safe=''),
2273 build.getNumber())
2274 if interfaces.IBuildStepStatus.providedBy(thing):
2275 step = thing
2276 build = step.getBuild()
2277 builder = build.getBuilder()
2278 return prefix + "builders/%s/builds/%d/steps/%s" % (
2279 urllib.quote(builder.getName(), safe=''),
2280 build.getNumber(),
2281 urllib.quote(step.getName(), safe=''))
2282
2283
2284
2285
2286
2287 if interfaces.IStatusEvent.providedBy(thing):
2288 from buildbot.changes import changes
2289
2290 if isinstance(thing, changes.Change):
2291 change = thing
2292 return "%schanges/%d" % (prefix, change.number)
2293
2294 if interfaces.IStatusLog.providedBy(thing):
2295 log = thing
2296 step = log.getStep()
2297 build = step.getBuild()
2298 builder = build.getBuilder()
2299
2300 logs = step.getLogs()
2301 for i in range(len(logs)):
2302 if log is logs[i]:
2303 break
2304 else:
2305 return None
2306 return prefix + "builders/%s/builds/%d/steps/%s/logs/%s" % (
2307 urllib.quote(builder.getName(), safe=''),
2308 build.getNumber(),
2309 urllib.quote(step.getName(), safe=''),
2310 urllib.quote(log.getName()))
2311
2314
2317
2320
2322 if categories == None:
2323 return self.botmaster.builderNames[:]
2324
2325 l = []
2326
2327 for name in self.botmaster.builderNames:
2328 builder = self.botmaster.builders[name]
2329 if builder.builder_status.category in categories:
2330 l.append(name)
2331 return l
2332
2334 """
2335 @rtype: L{BuilderStatus}
2336 """
2337 return self.botmaster.builders[name].builder_status
2338
2340 return self.botmaster.slaves.keys()
2341
2344
2348
2349 - def generateFinishedBuilds(self, builders=[], branches=[],
2350 num_builds=None, finished_before=None,
2351 max_search=200):
2352
2353 def want_builder(bn):
2354 if builders:
2355 return bn in builders
2356 return True
2357 builder_names = [bn
2358 for bn in self.getBuilderNames()
2359 if want_builder(bn)]
2360
2361
2362
2363
2364 sources = []
2365 for bn in builder_names:
2366 b = self.getBuilder(bn)
2367 g = b.generateFinishedBuilds(branches,
2368 finished_before=finished_before,
2369 max_search=max_search)
2370 sources.append(g)
2371
2372
2373 next_build = [None] * len(sources)
2374
2375 def refill():
2376 for i,g in enumerate(sources):
2377 if next_build[i]:
2378
2379 continue
2380 if not g:
2381
2382 continue
2383 try:
2384 next_build[i] = g.next()
2385 except StopIteration:
2386 next_build[i] = None
2387 sources[i] = None
2388
2389 got = 0
2390 while True:
2391 refill()
2392
2393 candidates = [(i, b, b.getTimes()[1])
2394 for i,b in enumerate(next_build)
2395 if b is not None]
2396 candidates.sort(lambda x,y: cmp(x[2], y[2]))
2397 if not candidates:
2398 return
2399
2400
2401 i, build, finshed_time = candidates[-1]
2402 next_build[i] = None
2403 got += 1
2404 yield build
2405 if num_builds is not None:
2406 if got >= num_builds:
2407 return
2408
2415
2416
2417
2418
2423
2425 """
2426 @rtype: L{BuilderStatus}
2427 """
2428 filename = os.path.join(self.basedir, basedir, "builder")
2429 log.msg("trying to load status pickle from %s" % filename)
2430 builder_status = None
2431 try:
2432 builder_status = load(open(filename, "rb"))
2433 styles.doUpgrade()
2434 except IOError:
2435 log.msg("no saved status pickle, creating a new one")
2436 except:
2437 log.msg("error while loading status pickle, creating a new one")
2438 log.msg("error follows:")
2439 log.err()
2440 if not builder_status:
2441 builder_status = BuilderStatus(name, category)
2442 builder_status.addPointEvent(["builder", "created"])
2443 log.msg("added builder %s in category %s" % (name, category))
2444
2445
2446 builder_status.category = category
2447 builder_status.basedir = os.path.join(self.basedir, basedir)
2448 builder_status.name = name
2449 builder_status.status = self
2450
2451 if not os.path.isdir(builder_status.basedir):
2452 os.makedirs(builder_status.basedir)
2453 builder_status.determineNextBuildNumber()
2454
2455 builder_status.setBigState("offline")
2456 builder_status.setLogCompressionLimit(self.logCompressionLimit)
2457 builder_status.setLogCompressionMethod(self.logCompressionMethod)
2458 builder_status.setLogMaxSize(self.logMaxSize)
2459 builder_status.setLogMaxTailSize(self.logMaxTailSize)
2460
2461 for t in self.watchers:
2462 self.announceNewBuilder(t, name, builder_status)
2463
2464 return builder_status
2465
2470
2475
2480
2485
2495
2497 for r in requests:
2498
2499
2500 pass
2501
2504
2506 brid,buildername,buildnum = self.db.get_build_info(bid)
2507 if brid in self._buildreq_observers:
2508 bs = self.getBuilder(buildername).getBuild(buildnum)
2509 if bs:
2510 for o in self._buildreq_observers[brid]:
2511 eventually(o, bs)
2512
2514 self._buildreq_observers.add(brid, observer)
2515
2517 self._buildreq_observers.discard(brid, observer)
2518
2524
2526 d = defer.Deferred()
2527 self._buildset_success_waiters.add(bsid, d)
2528
2529 self._db_buildsets_changed("modify-buildset", bsid)
2530 return d
2532 d = defer.Deferred()
2533 self._buildset_finished_waiters.add(bsid, d)
2534 self._db_buildsets_changed("modify-buildset", bsid)
2535 return d
2536
2538 for bsid in bsids:
2539 self._db_buildset_changed(bsid)
2540
2542
2543
2544 if (bsid not in self._buildset_success_waiters
2545 and bsid not in self._buildset_finished_waiters):
2546 return
2547 successful,finished = self.db.examine_buildset(bsid)
2548 bss = BuildSetStatus(bsid, self, self.db)
2549 if successful is not None:
2550 for d in self._buildset_success_waiters.pop(bsid):
2551 eventually(d.callback, bss)
2552 if finished:
2553 for d in self._buildset_finished_waiters.pop(bsid):
2554 eventually(d.callback, bss)
2555
2557
2558 self._builder_observers.add(buildername, watcher)
2559
2561 self._builder_observers.discard(buildername, watcher)
2562
2564 self._handle_buildrequest_event("added", brids)
2566 self._handle_buildrequest_event("cancelled", brids)
2579
2580
2581
2582
2583