1 # ex:ts=4:sw=4:sts=4:et
2 # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4 BitBake 'Fetch' implementations
6 Classes for obtaining upstream sources for the
10 # Copyright (C) 2003, 2004 Chris Larson
12 # This program is free software; you can redistribute it and/or modify
13 # it under the terms of the GNU General Public License version 2 as
14 # published by the Free Software Foundation.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License along
22 # with this program; if not, write to the Free Software Foundation, Inc.,
23 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25 # Based on functions from the base bb module, Copyright 2003 Holger Schurig
30 from bb import persist_data
32 class FetchError(Exception):
33 """Exception raised when a download fails"""
35 class NoMethodError(Exception):
36 """Exception raised when there is no method to obtain a supplied url or set of urls"""
38 class MissingParameterError(Exception):
39 """Exception raised when a fetch method is missing a critical parameter in the url"""
41 class ParameterError(Exception):
42 """Exception raised when a url cannot be proccessed due to invalid parameters."""
44 class MD5SumError(Exception):
45 """Exception raised when a MD5SUM of a file does not match the expected one"""
47 class InvalidSRCREV(Exception):
48 """Exception raised when an invalid SRCREV is encountered"""
50 def uri_replace(uri, uri_find, uri_replace, d):
51 # bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: operating on %s" % uri)
52 if not uri or not uri_find or not uri_replace:
53 bb.msg.debug(1, bb.msg.domain.Fetcher, "uri_replace: passed an undefined value, not replacing")
54 uri_decoded = list(bb.decodeurl(uri))
55 uri_find_decoded = list(bb.decodeurl(uri_find))
56 uri_replace_decoded = list(bb.decodeurl(uri_replace))
57 result_decoded = ['','','','','',{}]
58 for i in uri_find_decoded:
59 loc = uri_find_decoded.index(i)
60 result_decoded[loc] = uri_decoded[loc]
62 if type(i) == types.StringType:
63 if (re.match(i, uri_decoded[loc])):
64 result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc])
65 if uri_find_decoded.index(i) == 2:
67 localfn = bb.fetch.localpath(uri, d)
69 result_decoded[loc] = os.path.dirname(result_decoded[loc]) + "/" + os.path.basename(bb.fetch.localpath(uri, d))
70 # bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc]))
72 # bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: no match")
76 # FIXME: apply replacements against options
77 return bb.encodeurl(result_decoded)
84 Called to initilize the fetchers once the configuration data is known
85 Calls before this must not hit the cache.
87 pd = persist_data.PersistData(d)
88 # When to drop SCM head revisions controled by user policy
89 srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
90 if srcrev_policy == "cache":
91 bb.msg.debug(1, bb.msg.domain.Fetcher, "Keeping SRCREV cache due to cache policy of: %s" % srcrev_policy)
92 elif srcrev_policy == "clear":
93 bb.msg.debug(1, bb.msg.domain.Fetcher, "Clearing SRCREV cache due to cache policy of: %s" % srcrev_policy)
94 pd.delDomain("BB_URI_HEADREVS")
96 bb.msg.fatal(bb.msg.domain.Fetcher, "Invalid SRCREV cache policy of: %s" % srcrev_policy)
97 # Make sure our domains exist
98 pd.addDomain("BB_URI_HEADREVS")
99 pd.addDomain("BB_URI_LOCALCOUNT")
101 # Function call order is usually:
105 # localpath can be called at any time
107 def init(urls, d, setup = True):
109 fn = bb.data.getVar('FILE', d, 1)
110 if fn in urldata_cache:
111 urldata = urldata_cache[fn]
114 if url not in urldata:
115 urldata[url] = FetchData(url, d)
119 if not urldata[url].setup:
120 urldata[url].setup_localpath(d)
122 urldata_cache[fn] = urldata
128 init must have previously been called
130 urls = d.getVar("SRC_URI", 1).split()
131 urldata = init(urls, d, True)
137 if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5):
138 # File already present along with md5 stamp file
139 # Touch md5 file to show activity
141 os.utime(ud.md5, None)
143 # Errors aren't fatal here
146 lf = bb.utils.lockfile(ud.lockfile)
147 if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5):
148 # If someone else fetched this before we got the lock,
149 # notice and don't try again
151 os.utime(ud.md5, None)
153 # Errors aren't fatal here
155 bb.utils.unlockfile(lf)
159 if not m.forcefetch(u, ud, d):
160 Fetch.write_md5sum(u, ud, d)
161 bb.utils.unlockfile(lf)
166 Check all urls exist upstream
167 init must have previously been called
169 urldata = init([], d, True)
174 bb.msg.note(1, bb.msg.domain.Fetcher, "Testing URL %s" % u)
175 ret = m.checkstatus(u, ud, d)
177 bb.msg.fatal(bb.msg.domain.Fetcher, "URL %s doesn't work" % u)
181 Return a list of the local filenames, assuming successful fetch
184 urldata = init([], d, True)
188 local.append(ud.localpath)
192 srcrev_internal_call = False
196 Return the version string for the current package
197 (usually to be used as PV)
198 Most packages usually only have one SCM so we just pass on the call.
199 In the multi SCM case, we build a value based on SRCREV_FORMAT which must
204 # Ugly code alert. localpath in the fetchers will try to evaluate SRCREV which
205 # could translate into a call to here. If it does, we need to catch this
206 # and provide some way so it knows get_srcrev is active instead of being
207 # some number etc. hence the srcrev_internal_call tracking and the magic
208 # "SRCREVINACTION" return value.
210 # Neater solutions welcome!
212 if bb.fetch.srcrev_internal_call:
213 return "SRCREVINACTION"
217 # Only call setup_localpath on URIs which suppports_srcrev()
218 urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False)
221 if ud.method.suppports_srcrev():
223 ud.setup_localpath(d)
227 bb.msg.error(bb.msg.domain.Fetcher, "SRCREV was used yet no valid SCM was found in SRC_URI")
230 bb.data.setVar('__BB_DONT_CACHE','1', d)
233 return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d)
236 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
238 format = bb.data.getVar('SRCREV_FORMAT', d, 1)
240 bb.msg.error(bb.msg.domain.Fetcher, "The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
244 if 'name' in urldata[scm].parm:
245 name = urldata[scm].parm["name"]
246 rev = urldata[scm].method.sortable_revision(scm, urldata[scm], d)
247 format = format.replace(name, rev)
251 def localpath(url, d, cache = True):
253 Called from the parser with cache=False since the cache isn't ready
254 at this point. Also called from classed in OE e.g. patch.bbclass
258 return ud[url].localpath
261 def runfetchcmd(cmd, d, quiet = False):
263 Run cmd returning the command output
264 Raise an error if interrupted or cmd fails
265 Optionally echo command output to stdout
268 # Need to export PATH as binary could be in metadata paths
269 # rather than host provided
270 # Also include some other variables.
271 # FIXME: Should really include all export varaiables?
272 exportvars = ['PATH', 'GIT_PROXY_HOST', 'GIT_PROXY_PORT', 'GIT_CONFIG', 'http_proxy', 'ftp_proxy', 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', 'HOME']
274 for var in exportvars:
275 val = data.getVar(var, d, True)
277 cmd = 'export ' + var + '=%s; %s' % (val, cmd)
279 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd)
281 # redirect stderr to stdout
282 stdout_handle = os.popen(cmd + " 2>&1", "r")
286 line = stdout_handle.readline()
293 status = stdout_handle.close() or 0
295 exitstatus = status & 0xff
298 raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output))
300 raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output))
304 class FetchData(object):
306 A class which represents the fetcher state for a given URI.
308 def __init__(self, url, d):
310 (self.type, self.host, self.path, self.user, self.pswd, self.parm) = bb.decodeurl(data.expand(url, d))
311 self.date = Fetch.getSRCDate(self, d)
313 if not self.user and "user" in self.parm:
314 self.user = self.parm["user"]
315 if not self.pswd and "pswd" in self.parm:
316 self.pswd = self.parm["pswd"]
319 if m.supports(url, self, d):
322 raise NoMethodError("Missing implementation for url %s" % url)
324 def setup_localpath(self, d):
326 if "localpath" in self.parm:
327 # if user sets localpath for file, use it instead.
328 self.localpath = self.parm["localpath"]
330 bb.fetch.srcrev_internal_call = True
331 self.localpath = self.method.localpath(self.url, self, d)
332 bb.fetch.srcrev_internal_call = False
333 # We have to clear data's internal caches since the cached value of SRCREV is now wrong.
335 bb.data.delVar("ISHOULDNEVEREXIST", d)
336 self.md5 = self.localpath + '.md5'
337 self.lockfile = self.localpath + '.lock'
341 """Base class for 'fetch'ing data"""
343 def __init__(self, urls = []):
346 def supports(self, url, urldata, d):
348 Check to see if this fetch class supports a given url.
352 def localpath(self, url, urldata, d):
354 Return the local filename of a given url assuming a successful fetch.
355 Can also setup variables in urldata for use in go (saving code duplication
356 and duplicate code execution)
360 def setUrls(self, urls):
366 urls = property(getUrls, setUrls, None, "Urls property")
368 def forcefetch(self, url, urldata, d):
370 Force a fetch, even if localpath exists?
374 def suppports_srcrev(self):
376 The fetcher supports auto source revisions (SRCREV)
380 def go(self, url, urldata, d):
383 Assumes localpath was called first
385 raise NoMethodError("Missing implementation for url")
387 def checkstatus(self, url, urldata, d):
389 Check the status of a URL
390 Assumes localpath was called first
392 bb.msg.note(1, bb.msg.domain.Fetcher, "URL %s could not be checked for status since no method exists." % url)
395 def getSRCDate(urldata, d):
397 Return the SRC Date for the component
401 if "srcdate" in urldata.parm:
402 return urldata.parm['srcdate']
404 pn = data.getVar("PN", d, 1)
407 return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
409 return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
410 getSRCDate = staticmethod(getSRCDate)
412 def srcrev_internal_helper(ud, d):
415 a) a source revision if specified
416 b) True if auto srcrev is in action
421 return ud.parm['rev']
424 return ud.parm['tag']
427 if 'name' in ud.parm:
428 pn = data.getVar("PN", d, 1)
429 rev = data.getVar("SRCREV_pn-" + pn + "_" + ud.parm['name'], d, 1)
431 rev = data.getVar("SRCREV", d, 1)
433 raise InvalidSRCREV("Please set SRCREV to a valid value")
436 if rev is "SRCREVINACTION":
440 srcrev_internal_helper = staticmethod(srcrev_internal_helper)
442 def try_mirror(d, tarfn):
444 Try to use a mirrored version of the sources. We do this
445 to avoid massive loads on foreign cvs and svn servers.
446 This method will be used by the different fetcher
449 d Is a bb.data instance
450 tarfn is the name of the tarball
452 tarpath = os.path.join(data.getVar("DL_DIR", d, 1), tarfn)
453 if os.access(tarpath, os.R_OK):
454 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists, skipping checkout." % tarfn)
457 pn = data.getVar('PN', d, True)
458 src_tarball_stash = None
460 src_tarball_stash = (data.getVar('SRC_TARBALL_STASH_%s' % pn, d, True) or data.getVar('CVS_TARBALL_STASH_%s' % pn, d, True) or data.getVar('SRC_TARBALL_STASH', d, True) or data.getVar('CVS_TARBALL_STASH', d, True) or "").split()
463 for stash in src_tarball_stash:
466 ud = FetchData(url, ld)
467 except bb.fetch.NoMethodError:
468 bb.msg.debug(1, bb.msg.domain.Fetcher, "No method for %s" % url)
471 ud.setup_localpath(ld)
474 ud.method.go(url, ud, ld)
476 except (bb.fetch.MissingParameterError,
478 bb.fetch.MD5SumError):
480 (type, value, traceback) = sys.exc_info()
481 bb.msg.debug(2, bb.msg.domain.Fetcher, "Tarball stash fetch failure: %s" % value)
483 try_mirror = staticmethod(try_mirror)
485 def verify_md5sum(ud, got_sum):
487 Verify the md5sum we wanted with the one we got
490 if 'md5sum' in ud.parm:
491 wanted_sum = ud.parm['md5sum']
495 return wanted_sum == got_sum
496 verify_md5sum = staticmethod(verify_md5sum)
498 def write_md5sum(url, ud, d):
499 md5data = bb.utils.md5_file(ud.localpath)
501 if not Fetch.verify_md5sum(ud, md5data):
502 raise MD5SumError(url)
504 md5out = file(ud.md5, 'w')
505 md5out.write(md5data)
507 write_md5sum = staticmethod(write_md5sum)
509 def latest_revision(self, url, ud, d):
511 Look in the cache for the latest revision, if not present ask the SCM.
513 if not hasattr(self, "_latest_revision"):
516 pd = persist_data.PersistData(d)
517 key = self.generate_revision_key(url, ud, d)
518 rev = pd.getValue("BB_URI_HEADREVS", key)
522 rev = self._latest_revision(url, ud, d)
523 pd.setValue("BB_URI_HEADREVS", key, rev)
526 def sortable_revision(self, url, ud, d):
530 has_want_sortable = hasattr(self, "_want_sortable_revision")
531 has_sortable = hasattr(self, "_sortable_revision")
533 if not has_want_sortable and has_sortable:
534 return self._sortable_revision(url, ud, d)
535 elif has_want_sortable and self._want_sortable_revision(url, ud, d) and has_sortable:
536 return self._sortable_revision(url, ud, d)
539 pd = persist_data.PersistData(d)
540 key = self.generate_revision_key(url, ud, d)
542 latest_rev = self._build_revision(url, ud, d)
543 last_rev = pd.getValue("BB_URI_LOCALCOUNT", key + "_rev")
544 count = pd.getValue("BB_URI_LOCALCOUNT", key + "_count")
546 if last_rev == latest_rev:
547 return str(count + "+" + latest_rev)
552 count = str(int(count) + 1)
554 pd.setValue("BB_URI_LOCALCOUNT", key + "_rev", latest_rev)
555 pd.setValue("BB_URI_LOCALCOUNT", key + "_count", count)
557 return str(count + "+" + latest_rev)
559 def generate_revision_key(self, url, ud, d):
560 key = self._revision_key(url, ud, d)
561 return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
575 methods.append(local.Local())
576 methods.append(wget.Wget())
577 methods.append(svn.Svn())
578 methods.append(git.Git())
579 methods.append(cvs.Cvs())
580 methods.append(svk.Svk())
581 methods.append(ssh.SSH())
582 methods.append(perforce.Perforce())
583 methods.append(bzr.Bzr())
584 methods.append(hg.Hg())
585 methods.append(osc.Osc())