12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144 |
- """
- BitBake 'Fetch' implementations
- Classes for obtaining upstream sources for the
- BitBake build tools.
- """
- # Copyright (C) 2003, 2004 Chris Larson
- # Copyright (C) 2012 Intel Corporation
- #
- # SPDX-License-Identifier: GPL-2.0-only
- #
- # Based on functions from the base bb module, Copyright 2003 Holger Schurig
- import os, re
- import signal
- import logging
- import urllib.request, urllib.parse, urllib.error
- if 'git' not in urllib.parse.uses_netloc:
- urllib.parse.uses_netloc.append('git')
- import operator
- import collections
- import subprocess
- import pickle
- import errno
- import bb.utils
- import bb.checksum
- import bb.process
- import bb.event
- __version__ = "2"
- _checksum_cache = bb.checksum.FileChecksumCache()
- _revisions_cache = bb.checksum.RevisionsCache()
- logger = logging.getLogger("BitBake.Fetcher")
- CHECKSUM_LIST = [ "goh1", "md5", "sha256", "sha1", "sha384", "sha512" ]
- SHOWN_CHECKSUM_LIST = ["sha256"]
- class BBFetchException(Exception):
- """Class all fetch exceptions inherit from"""
- def __init__(self, message):
- self.msg = message
- Exception.__init__(self, message)
- def __str__(self):
- return self.msg
- class UntrustedUrl(BBFetchException):
- """Exception raised when encountering a host not listed in BB_ALLOWED_NETWORKS"""
- def __init__(self, url, message=''):
- if message:
- msg = message
- else:
- msg = "The URL: '%s' is not trusted and cannot be used" % url
- self.url = url
- BBFetchException.__init__(self, msg)
- self.args = (url,)
- class MalformedUrl(BBFetchException):
- """Exception raised when encountering an invalid url"""
- def __init__(self, url, message=''):
- if message:
- msg = message
- else:
- msg = "The URL: '%s' is invalid and cannot be interpreted" % url
- self.url = url
- BBFetchException.__init__(self, msg)
- self.args = (url,)
- class FetchError(BBFetchException):
- """General fetcher exception when something happens incorrectly"""
- def __init__(self, message, url = None):
- if url:
- msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
- else:
- msg = "Fetcher failure: %s" % message
- self.url = url
- BBFetchException.__init__(self, msg)
- self.args = (message, url)
- class ChecksumError(FetchError):
- """Exception when mismatched checksum encountered"""
- def __init__(self, message, url = None, checksum = None):
- self.checksum = checksum
- FetchError.__init__(self, message, url)
- class NoChecksumError(FetchError):
- """Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set"""
- class UnpackError(BBFetchException):
- """General fetcher exception when something happens incorrectly when unpacking"""
- def __init__(self, message, url):
- msg = "Unpack failure for URL: '%s'. %s" % (url, message)
- self.url = url
- BBFetchException.__init__(self, msg)
- self.args = (message, url)
- class NoMethodError(BBFetchException):
- """Exception raised when there is no method to obtain a supplied url or set of urls"""
- def __init__(self, url):
- msg = "Could not find a fetcher which supports the URL: '%s'" % url
- self.url = url
- BBFetchException.__init__(self, msg)
- self.args = (url,)
- class MissingParameterError(BBFetchException):
- """Exception raised when a fetch method is missing a critical parameter in the url"""
- def __init__(self, missing, url):
- msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
- self.url = url
- self.missing = missing
- BBFetchException.__init__(self, msg)
- self.args = (missing, url)
- class ParameterError(BBFetchException):
- """Exception raised when a url cannot be processed due to invalid parameters."""
- def __init__(self, message, url):
- msg = "URL: '%s' has invalid parameters. %s" % (url, message)
- self.url = url
- BBFetchException.__init__(self, msg)
- self.args = (message, url)
- class NetworkAccess(BBFetchException):
- """Exception raised when network access is disabled but it is required."""
- def __init__(self, url, cmd):
- msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url)
- self.url = url
- self.cmd = cmd
- BBFetchException.__init__(self, msg)
- self.args = (url, cmd)
- class NonLocalMethod(Exception):
- def __init__(self):
- Exception.__init__(self)
- class MissingChecksumEvent(bb.event.Event):
- def __init__(self, url, **checksums):
- self.url = url
- self.checksums = checksums
- bb.event.Event.__init__(self)
- class URI(object):
- """
- A class representing a generic URI, with methods for
- accessing the URI components, and stringifies to the
- URI.
- It is constructed by calling it with a URI, or setting
- the attributes manually:
- uri = URI("http://example.com/")
- uri = URI()
- uri.scheme = 'http'
- uri.hostname = 'example.com'
- uri.path = '/'
- It has the following attributes:
- * scheme (read/write)
- * userinfo (authentication information) (read/write)
- * username (read/write)
- * password (read/write)
- Note, password is deprecated as of RFC 3986.
- * hostname (read/write)
- * port (read/write)
- * hostport (read only)
- "hostname:port", if both are set, otherwise just "hostname"
- * path (read/write)
- * path_quoted (read/write)
- A URI quoted version of path
- * params (dict) (read/write)
- * query (dict) (read/write)
- * relative (bool) (read only)
- True if this is a "relative URI", (e.g. file:foo.diff)
- It stringifies to the URI itself.
- Some notes about relative URIs: while it's specified that
- a URI beginning with <scheme>:// should either be directly
- followed by a hostname or a /, the old URI handling of the
- fetch2 library did not conform to this. Therefore, this URI
- class has some kludges to make sure that URIs are parsed in
- a way comforming to bitbake's current usage. This URI class
- supports the following:
- file:relative/path.diff (IETF compliant)
- git:relative/path.git (IETF compliant)
- git:///absolute/path.git (IETF compliant)
- file:///absolute/path.diff (IETF compliant)
- file://relative/path.diff (not IETF compliant)
- But it does not support the following:
- file://hostname/absolute/path.diff (would be IETF compliant)
- Note that the last case only applies to a list of
- explicitly allowed schemes (currently only file://), that requires
- its URIs to not have a network location.
- """
- _relative_schemes = ['file', 'git']
- _netloc_forbidden = ['file']
- def __init__(self, uri=None):
- self.scheme = ''
- self.userinfo = ''
- self.hostname = ''
- self.port = None
- self._path = ''
- self.params = {}
- self.query = {}
- self.relative = False
- if not uri:
- return
- # We hijack the URL parameters, since the way bitbake uses
- # them are not quite RFC compliant.
- uri, param_str = (uri.split(";", 1) + [None])[:2]
- urlp = urllib.parse.urlparse(uri)
- self.scheme = urlp.scheme
- reparse = 0
- # Coerce urlparse to make URI scheme use netloc
- if not self.scheme in urllib.parse.uses_netloc:
- urllib.parse.uses_params.append(self.scheme)
- reparse = 1
- # Make urlparse happy(/ier) by converting local resources
- # to RFC compliant URL format. E.g.:
- # file://foo.diff -> file:foo.diff
- if urlp.scheme in self._netloc_forbidden:
- uri = re.sub(r"(?<=:)//(?!/)", "", uri, count=1)
- reparse = 1
- if reparse:
- urlp = urllib.parse.urlparse(uri)
- # Identify if the URI is relative or not
- if urlp.scheme in self._relative_schemes and \
- re.compile(r"^\w+:(?!//)").match(uri):
- self.relative = True
- if not self.relative:
- self.hostname = urlp.hostname or ''
- self.port = urlp.port
- self.userinfo += urlp.username or ''
- if urlp.password:
- self.userinfo += ':%s' % urlp.password
- self.path = urllib.parse.unquote(urlp.path)
- if param_str:
- self.params = self._param_str_split(param_str, ";")
- if urlp.query:
- self.query = self._param_str_split(urlp.query, "&")
- def __str__(self):
- userinfo = self.userinfo
- if userinfo:
- userinfo += '@'
- return "%s:%s%s%s%s%s%s" % (
- self.scheme,
- '' if self.relative else '//',
- userinfo,
- self.hostport,
- self.path_quoted,
- self._query_str(),
- self._param_str())
- def _param_str(self):
- return (
- ''.join([';', self._param_str_join(self.params, ";")])
- if self.params else '')
- def _query_str(self):
- return (
- ''.join(['?', self._param_str_join(self.query, "&")])
- if self.query else '')
- def _param_str_split(self, string, elmdelim, kvdelim="="):
- ret = collections.OrderedDict()
- for k, v in [x.split(kvdelim, 1) if kvdelim in x else (x, None) for x in string.split(elmdelim) if x]:
- ret[k] = v
- return ret
- def _param_str_join(self, dict_, elmdelim, kvdelim="="):
- return elmdelim.join([kvdelim.join([k, v]) if v else k for k, v in dict_.items()])
- @property
- def hostport(self):
- if not self.port:
- return self.hostname
- return "%s:%d" % (self.hostname, self.port)
- @property
- def path_quoted(self):
- return urllib.parse.quote(self.path)
- @path_quoted.setter
- def path_quoted(self, path):
- self.path = urllib.parse.unquote(path)
- @property
- def path(self):
- return self._path
- @path.setter
- def path(self, path):
- self._path = path
- if not path or re.compile("^/").match(path):
- self.relative = False
- else:
- self.relative = True
- @property
- def username(self):
- if self.userinfo:
- return (self.userinfo.split(":", 1))[0]
- return ''
- @username.setter
- def username(self, username):
- password = self.password
- self.userinfo = username
- if password:
- self.userinfo += ":%s" % password
- @property
- def password(self):
- if self.userinfo and ":" in self.userinfo:
- return (self.userinfo.split(":", 1))[1]
- return ''
- @password.setter
- def password(self, password):
- self.userinfo = "%s:%s" % (self.username, password)
- def decodeurl(url):
- """Decodes an URL into the tokens (scheme, network location, path,
- user, password, parameters).
- """
- uri = URI(url)
- path = uri.path if uri.path else "/"
- return uri.scheme, uri.hostport, path, uri.username, uri.password, uri.params
- def decodemirrorurl(url):
- """Decodes a mirror URL into the tokens (scheme, network location, path,
- user, password, parameters).
- """
- m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
- if not m:
- raise MalformedUrl(url)
- type = m.group('type')
- location = m.group('location')
- if not location:
- raise MalformedUrl(url)
- user = m.group('user')
- parm = m.group('parm')
- locidx = location.find('/')
- if locidx != -1 and type.lower() != 'file':
- host = location[:locidx]
- path = location[locidx:]
- elif type.lower() == 'file':
- host = ""
- path = location
- if user:
- path = user + '@' + path
- user = ""
- else:
- host = location
- path = "/"
- if user:
- m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
- if m:
- user = m.group('user')
- pswd = m.group('pswd')
- else:
- user = ''
- pswd = ''
- p = collections.OrderedDict()
- if parm:
- for s in parm.split(';'):
- if s:
- if not '=' in s:
- raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
- s1, s2 = s.split('=', 1)
- p[s1] = s2
- return type, host, urllib.parse.unquote(path), user, pswd, p
- def encodeurl(decoded):
- """Encodes a URL from tokens (scheme, network location, path,
- user, password, parameters).
- """
- type, host, path, user, pswd, p = decoded
- if not type:
- raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
- uri = URI()
- uri.scheme = type
- if user and type != "file":
- uri.username = user
- if pswd:
- uri.password = pswd
- if host and type != "file":
- uri.hostname = host
- if path:
- # Standardise path to ensure comparisons work
- while '//' in path:
- path = path.replace("//", "/")
- uri.path = path
- if type == "file":
- # Use old not IETF compliant style
- uri.relative = False
- if p:
- uri.params = p
- return str(uri)
- def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
- if not ud.url or not uri_find or not uri_replace:
- logger.error("uri_replace: passed an undefined value, not replacing")
- return None
- uri_decoded = list(decodemirrorurl(ud.url))
- uri_find_decoded = list(decodemirrorurl(uri_find))
- uri_replace_decoded = list(decodemirrorurl(uri_replace))
- logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
- result_decoded = ['', '', '', '', '', {}]
- # 0 - type, 1 - host, 2 - path, 3 - user, 4- pswd, 5 - params
- for loc, i in enumerate(uri_find_decoded):
- result_decoded[loc] = uri_decoded[loc]
- regexp = i
- if loc == 0 and regexp and not regexp.endswith("$"):
- # Leaving the type unanchored can mean "https" matching "file" can become "files"
- # which is clearly undesirable.
- regexp += "$"
- if loc == 5:
- # Handle URL parameters
- if i:
- # Any specified URL parameters must match
- for k in uri_find_decoded[loc]:
- if uri_decoded[loc][k] != uri_find_decoded[loc][k]:
- return None
- # Overwrite any specified replacement parameters
- for k in uri_replace_decoded[loc]:
- for l in replacements:
- uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
- result_decoded[loc][k] = uri_replace_decoded[loc][k]
- elif (loc == 3 or loc == 4) and uri_replace_decoded[loc]:
- # User/password in the replacement is just a straight replacement
- result_decoded[loc] = uri_replace_decoded[loc]
- elif (re.match(regexp, uri_decoded[loc])):
- if not uri_replace_decoded[loc]:
- result_decoded[loc] = ""
- else:
- for k in replacements:
- uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
- #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
- result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], count=1)
- if loc == 2:
- # Handle path manipulations
- basename = None
- if uri_decoded[0] != uri_replace_decoded[0] and mirrortarball:
- # If the source and destination url types differ, must be a mirrortarball mapping
- basename = os.path.basename(mirrortarball)
- # Kill parameters, they make no sense for mirror tarballs
- uri_decoded[5] = {}
- uri_find_decoded[5] = {}
- elif ud.localpath and ud.method.supports_checksum(ud):
- basename = os.path.basename(ud.localpath)
- if basename:
- uri_basename = os.path.basename(uri_decoded[loc])
- # Prefix with a slash as a sentinel in case
- # result_decoded[loc] does not contain one.
- path = "/" + result_decoded[loc]
- if uri_basename and basename != uri_basename and path.endswith("/" + uri_basename):
- result_decoded[loc] = path[1:-len(uri_basename)] + basename
- elif not path.endswith("/" + basename):
- result_decoded[loc] = os.path.join(path[1:], basename)
- else:
- return None
- result = encodeurl(result_decoded)
- if result == ud.url:
- return None
- logger.debug2("For url %s returning %s" % (ud.url, result))
- return result
- methods = []
- urldata_cache = {}
- saved_headrevs = {}
- def fetcher_init(d, servercontext=True):
- """
- Called to initialize the fetchers once the configuration data is known.
- Calls before this must not hit the cache.
- """
- _checksum_cache.init_cache(d.getVar("BB_CACHEDIR"))
- _revisions_cache.init_cache(d.getVar("BB_CACHEDIR"))
- if not servercontext:
- return
- try:
- # fetcher_init is called multiple times, so make sure we only save the
- # revs the first time it is called.
- if not bb.fetch2.saved_headrevs:
- bb.fetch2.saved_headrevs = _revisions_cache.get_revs()
- except:
- pass
- # When to drop SCM head revisions controlled by user policy
- srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
- if srcrev_policy == "cache":
- logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
- elif srcrev_policy == "clear":
- logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
- _revisions_cache.clear_cache()
- else:
- raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
- for m in methods:
- if hasattr(m, "init"):
- m.init(d)
- def fetcher_parse_save():
- _checksum_cache.save_extras()
- _revisions_cache.save_extras()
- def fetcher_parse_done():
- _checksum_cache.save_merge()
- _revisions_cache.save_merge()
- def fetcher_compare_revisions(d):
- """
- Compare the revisions in the persistent cache with the saved values from
- when bitbake was started and return true if they have changed.
- """
- headrevs = _revisions_cache.get_revs()
- return headrevs != bb.fetch2.saved_headrevs
- def mirror_from_string(data):
- mirrors = (data or "").replace('\\n',' ').split()
- # Split into pairs
- if len(mirrors) % 2 != 0:
- bb.warn('Invalid mirror data %s, should have paired members.' % data)
- return list(zip(*[iter(mirrors)]*2))
- def verify_checksum(ud, d, precomputed={}, localpath=None, fatal_nochecksum=True):
- """
- verify the MD5 and SHA256 checksum for downloaded src
- Raises a FetchError if one or both of the SRC_URI checksums do not match
- the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
- checksums specified.
- Returns a dict of checksums that can be stored in a done stamp file and
- passed in as precomputed parameter in a later call to avoid re-computing
- the checksums from the file. This allows verifying the checksums of the
- file against those in the recipe each time, rather than only after
- downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
- """
- if ud.ignore_checksums or not ud.method.supports_checksum(ud):
- return {}
- if localpath is None:
- localpath = ud.localpath
- def compute_checksum_info(checksum_id):
- checksum_name = getattr(ud, "%s_name" % checksum_id)
- if checksum_id in precomputed:
- checksum_data = precomputed[checksum_id]
- else:
- checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(localpath)
- checksum_expected = getattr(ud, "%s_expected" % checksum_id)
- if checksum_expected == '':
- checksum_expected = None
- return {
- "id": checksum_id,
- "name": checksum_name,
- "data": checksum_data,
- "expected": checksum_expected
- }
- checksum_infos = []
- for checksum_id in CHECKSUM_LIST:
- checksum_infos.append(compute_checksum_info(checksum_id))
- checksum_dict = {ci["id"] : ci["data"] for ci in checksum_infos}
- checksum_event = {"%ssum" % ci["id"] : ci["data"] for ci in checksum_infos}
- for ci in checksum_infos:
- if ci["id"] in SHOWN_CHECKSUM_LIST:
- checksum_lines = ["SRC_URI[%s] = \"%s\"" % (ci["name"], ci["data"])]
- # If no checksum has been provided
- if fatal_nochecksum and ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos):
- messages = []
- strict = d.getVar("BB_STRICT_CHECKSUM") or "0"
- # If strict checking enabled and neither sum defined, raise error
- if strict == "1":
- raise NoChecksumError("\n".join(checksum_lines))
- bb.event.fire(MissingChecksumEvent(ud.url, **checksum_event), d)
- if strict == "ignore":
- return checksum_dict
- # Log missing sums so user can more easily add them
- messages.append("Missing checksum for '%s', consider adding at " \
- "least one to the recipe:" % ud.localpath)
- messages.extend(checksum_lines)
- logger.warning("\n".join(messages))
- # We want to alert the user if a checksum is defined in the recipe but
- # it does not match.
- messages = []
- messages.append("Checksum mismatch!")
- bad_checksum = None
- for ci in checksum_infos:
- if ci["expected"] and ci["expected"] != ci["data"]:
- messages.append("File: '%s' has %s checksum '%s' when '%s' was " \
- "expected" % (localpath, ci["id"], ci["data"], ci["expected"]))
- bad_checksum = ci["data"]
- if bad_checksum:
- messages.append("If this change is expected (e.g. you have upgraded " \
- "to a new version without updating the checksums) " \
- "then you can use these lines within the recipe:")
- messages.extend(checksum_lines)
- messages.append("Otherwise you should retry the download and/or " \
- "check with upstream to determine if the file has " \
- "become corrupted or otherwise unexpectedly modified.")
- raise ChecksumError("\n".join(messages), ud.url, bad_checksum)
- return checksum_dict
- def verify_donestamp(ud, d, origud=None):
- """
- Check whether the done stamp file has the right checksums (if the fetch
- method supports them). If it doesn't, delete the done stamp and force
- a re-download.
- Returns True, if the donestamp exists and is valid, False otherwise. When
- returning False, any existing done stamps are removed.
- """
- if not ud.needdonestamp or (origud and not origud.needdonestamp):
- return True
- if not os.path.exists(ud.localpath):
- # local path does not exist
- if os.path.exists(ud.donestamp):
- # done stamp exists, but the downloaded file does not; the done stamp
- # must be incorrect, re-trigger the download
- bb.utils.remove(ud.donestamp)
- return False
- if (not ud.method.supports_checksum(ud) or
- (origud and not origud.method.supports_checksum(origud))):
- # if done stamp exists and checksums not supported; assume the local
- # file is current
- return os.path.exists(ud.donestamp)
- precomputed_checksums = {}
- # Only re-use the precomputed checksums if the donestamp is newer than the
- # file. Do not rely on the mtime of directories, though. If ud.localpath is
- # a directory, there will probably not be any checksums anyway.
- if os.path.exists(ud.donestamp) and (os.path.isdir(ud.localpath) or
- os.path.getmtime(ud.localpath) < os.path.getmtime(ud.donestamp)):
- try:
- with open(ud.donestamp, "rb") as cachefile:
- pickled = pickle.Unpickler(cachefile)
- precomputed_checksums.update(pickled.load())
- except Exception as e:
- # Avoid the warnings on the upgrade path from emtpy done stamp
- # files to those containing the checksums.
- if not isinstance(e, EOFError):
- # Ignore errors, they aren't fatal
- logger.warning("Couldn't load checksums from donestamp %s: %s "
- "(msg: %s)" % (ud.donestamp, type(e).__name__,
- str(e)))
- try:
- checksums = verify_checksum(ud, d, precomputed_checksums)
- # If the cache file did not have the checksums, compute and store them
- # as an upgrade path from the previous done stamp file format.
- if checksums != precomputed_checksums:
- with open(ud.donestamp, "wb") as cachefile:
- p = pickle.Pickler(cachefile, 2)
- p.dump(checksums)
- return True
- except ChecksumError as e:
- # Checksums failed to verify, trigger re-download and remove the
- # incorrect stamp file.
- logger.warning("Checksum mismatch for local file %s\n"
- "Cleaning and trying again." % ud.localpath)
- if os.path.exists(ud.localpath):
- rename_bad_checksum(ud, e.checksum)
- bb.utils.remove(ud.donestamp)
- return False
- def update_stamp(ud, d):
- """
- donestamp is file stamp indicating the whole fetching is done
- this function update the stamp after verifying the checksum
- """
- if not ud.needdonestamp:
- return
- if os.path.exists(ud.donestamp):
- # Touch the done stamp file to show active use of the download
- try:
- os.utime(ud.donestamp, None)
- except:
- # Errors aren't fatal here
- pass
- else:
- try:
- checksums = verify_checksum(ud, d)
- # Store the checksums for later re-verification against the recipe
- with open(ud.donestamp, "wb") as cachefile:
- p = pickle.Pickler(cachefile, 2)
- p.dump(checksums)
- except ChecksumError as e:
- # Checksums failed to verify, trigger re-download and remove the
- # incorrect stamp file.
- logger.warning("Checksum mismatch for local file %s\n"
- "Cleaning and trying again." % ud.localpath)
- if os.path.exists(ud.localpath):
- rename_bad_checksum(ud, e.checksum)
- bb.utils.remove(ud.donestamp)
- raise
- def subprocess_setup():
- # Python installs a SIGPIPE handler by default. This is usually not what
- # non-Python subprocesses expect.
- # SIGPIPE errors are known issues with gzip/bash
- signal.signal(signal.SIGPIPE, signal.SIG_DFL)
- def mark_recipe_nocache(d):
- if d.getVar('BB_SRCREV_POLICY') != "cache":
- d.setVar('BB_DONT_CACHE', '1')
- def get_autorev(d):
- mark_recipe_nocache(d)
- d.setVar("__BBAUTOREV_SEEN", True)
- return "AUTOINC"
- def _get_srcrev(d, method_name='sortable_revision'):
- """
- Return the revision string, usually for use in the version string (PV) of the current package
- Most packages usually only have one SCM so we just pass on the call.
- In the multi SCM case, we build a value based on SRCREV_FORMAT which must
- have been set.
- The idea here is that we put the string "AUTOINC+" into return value if the revisions are not
- incremental, other code is then responsible for turning that into an increasing value (if needed)
- A method_name can be supplied to retrieve an alternatively formatted revision from a fetcher, if
- that fetcher provides a method with the given name and the same signature as sortable_revision.
- """
- d.setVar("__BBSRCREV_SEEN", "1")
- recursion = d.getVar("__BBINSRCREV")
- if recursion:
- raise FetchError("There are recursive references in fetcher variables, likely through SRC_URI")
- d.setVar("__BBINSRCREV", True)
- scms = []
- revs = []
- fetcher = Fetch(d.getVar('SRC_URI').split(), d)
- urldata = fetcher.ud
- for u in urldata:
- if urldata[u].method.supports_srcrev():
- scms.append(u)
- if not scms:
- d.delVar("__BBINSRCREV")
- return "", revs
- if len(scms) == 1:
- autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].name)
- revs.append(rev)
- if len(rev) > 10:
- rev = rev[:10]
- d.delVar("__BBINSRCREV")
- if autoinc:
- return "AUTOINC+" + rev, revs
- return rev, revs
- #
- # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
- #
- format = d.getVar('SRCREV_FORMAT')
- if not format:
- raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.\n"\
- "The SCMs are:\n%s" % '\n'.join(scms))
- name_to_rev = {}
- seenautoinc = False
- for scm in scms:
- ud = urldata[scm]
- autoinc, rev = getattr(ud.method, method_name)(ud, d, ud.name)
- revs.append(rev)
- seenautoinc = seenautoinc or autoinc
- if len(rev) > 10:
- rev = rev[:10]
- name_to_rev[ud.name] = rev
- # Replace names by revisions in the SRCREV_FORMAT string. The approach used
- # here can handle names being prefixes of other names and names appearing
- # as substrings in revisions (in which case the name should not be
- # expanded). The '|' regular expression operator tries matches from left to
- # right, so we need to sort the names with the longest ones first.
- names_descending_len = sorted(name_to_rev, key=len, reverse=True)
- name_to_rev_re = "|".join(re.escape(name) for name in names_descending_len)
- format = re.sub(name_to_rev_re, lambda match: name_to_rev[match.group(0)], format)
- if seenautoinc:
- format = "AUTOINC+" + format
- d.delVar("__BBINSRCREV")
- return format, revs
- def get_hashvalue(d, method_name='sortable_revision'):
- pkgv, revs = _get_srcrev(d, method_name=method_name)
- return " ".join(revs)
- def get_pkgv_string(d, method_name='sortable_revision'):
- pkgv, revs = _get_srcrev(d, method_name=method_name)
- return pkgv
- def get_srcrev(d, method_name='sortable_revision'):
- pkgv, revs = _get_srcrev(d, method_name=method_name)
- if not pkgv:
- raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
- return pkgv
- def localpath(url, d):
- fetcher = bb.fetch2.Fetch([url], d)
- return fetcher.localpath(url)
- # Need to export PATH as binary could be in metadata paths
- # rather than host provided
- # Also include some other variables.
- FETCH_EXPORT_VARS = ['HOME', 'PATH',
- 'HTTP_PROXY', 'http_proxy',
- 'HTTPS_PROXY', 'https_proxy',
- 'FTP_PROXY', 'ftp_proxy',
- 'FTPS_PROXY', 'ftps_proxy',
- 'NO_PROXY', 'no_proxy',
- 'ALL_PROXY', 'all_proxy',
- 'GIT_PROXY_COMMAND',
- 'GIT_SSH',
- 'GIT_SSH_COMMAND',
- 'GIT_SSL_CAINFO',
- 'GIT_SMART_HTTP',
- 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
- 'SOCKS5_USER', 'SOCKS5_PASSWD',
- 'DBUS_SESSION_BUS_ADDRESS',
- 'P4CONFIG',
- 'SSL_CERT_FILE',
- 'NODE_EXTRA_CA_CERTS',
- 'AWS_PROFILE',
- 'AWS_ACCESS_KEY_ID',
- 'AWS_SECRET_ACCESS_KEY',
- 'AWS_ROLE_ARN',
- 'AWS_WEB_IDENTITY_TOKEN_FILE',
- 'AWS_DEFAULT_REGION',
- 'AWS_SESSION_TOKEN',
- 'GIT_CACHE_PATH',
- 'REMOTE_CONTAINERS_IPC',
- 'GITHUB_TOKEN',
- 'SSL_CERT_DIR']
- def get_fetcher_environment(d):
- newenv = {}
- origenv = d.getVar("BB_ORIGENV")
- for name in bb.fetch2.FETCH_EXPORT_VARS:
- value = d.getVar(name)
- if not value and origenv:
- value = origenv.getVar(name)
- if value:
- newenv[name] = value
- return newenv
- def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
- """
- Run cmd returning the command output
- Raise an error if interrupted or cmd fails
- Optionally echo command output to stdout
- Optionally remove the files/directories listed in cleanup upon failure
- """
- exportvars = FETCH_EXPORT_VARS
- if not cleanup:
- cleanup = []
- # If PATH contains WORKDIR which contains PV-PR which contains SRCPV we
- # can end up in circular recursion here so give the option of breaking it
- # in a data store copy.
- try:
- d.getVar("PV")
- d.getVar("PR")
- except bb.data_smart.ExpansionError:
- d = bb.data.createCopy(d)
- d.setVar("PV", "fetcheravoidrecurse")
- d.setVar("PR", "fetcheravoidrecurse")
- origenv = d.getVar("BB_ORIGENV", False)
- for var in exportvars:
- val = d.getVar(var) or (origenv and origenv.getVar(var))
- if val:
- cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
- # Disable pseudo as it may affect ssh, potentially causing it to hang.
- cmd = 'export PSEUDO_DISABLED=1; ' + cmd
- if workdir:
- logger.debug("Running '%s' in %s" % (cmd, workdir))
- else:
- logger.debug("Running %s", cmd)
- success = False
- error_message = ""
- try:
- (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir)
- success = True
- except bb.process.NotFoundError as e:
- error_message = "Fetch command %s not found" % (e.command)
- except bb.process.ExecutionError as e:
- if e.stdout:
- output = "output:\n%s\n%s" % (e.stdout, e.stderr)
- elif e.stderr:
- output = "output:\n%s" % e.stderr
- else:
- if log:
- output = "see logfile for output"
- else:
- output = "no output"
- error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output)
- except bb.process.CmdError as e:
- error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
- if not success:
- for f in cleanup:
- try:
- bb.utils.remove(f, True)
- except OSError:
- pass
- raise FetchError(error_message)
- return output
- def check_network_access(d, info, url):
- """
- log remote network access, and error if BB_NO_NETWORK is set or the given
- URI is untrusted
- """
- if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
- raise NetworkAccess(url, info)
- elif not trusted_network(d, url):
- raise UntrustedUrl(url, info)
- else:
- logger.debug("Fetcher accessed the network with the command %s" % info)
- def build_mirroruris(origud, mirrors, ld):
- uris = []
- uds = []
- replacements = {}
- replacements["TYPE"] = origud.type
- replacements["HOST"] = origud.host
- replacements["PATH"] = origud.path
- replacements["BASENAME"] = origud.path.split("/")[-1]
- replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
- def adduri(ud, uris, uds, mirrors, tarballs):
- for line in mirrors:
- try:
- (find, replace) = line
- except ValueError:
- continue
- for tarball in tarballs:
- newuri = uri_replace(ud, find, replace, replacements, ld, tarball)
- if not newuri or newuri in uris or newuri == origud.url:
- continue
- if not trusted_network(ld, newuri):
- logger.debug("Mirror %s not in the list of trusted networks, skipping" % (newuri))
- continue
- # Create a local copy of the mirrors minus the current line
- # this will prevent us from recursively processing the same line
- # as well as indirect recursion A -> B -> C -> A
- localmirrors = list(mirrors)
- localmirrors.remove(line)
- try:
- newud = FetchData(newuri, ld)
- newud.ignore_checksums = True
- newud.setup_localpath(ld)
- except bb.fetch2.BBFetchException as e:
- logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
- logger.debug(str(e))
- try:
- # setup_localpath of file:// urls may fail, we should still see
- # if mirrors of the url exist
- adduri(newud, uris, uds, localmirrors, tarballs)
- except UnboundLocalError:
- pass
- continue
- uris.append(newuri)
- uds.append(newud)
- adduri(newud, uris, uds, localmirrors, tarballs)
- adduri(origud, uris, uds, mirrors, origud.mirrortarballs or [None])
- return uris, uds
- def rename_bad_checksum(ud, suffix):
- """
- Renames files to have suffix from parameter
- """
- if ud.localpath is None:
- return
- new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
- bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
- if not bb.utils.movefile(ud.localpath, new_localpath):
- bb.warn("Renaming %s to %s failed, grep movefile in log.do_fetch to see why" % (ud.localpath, new_localpath))
- def try_mirror_url(fetch, origud, ud, ld, check = False):
- # Return of None or a value means we're finished
- # False means try another url
- if ud.lockfile and ud.lockfile != origud.lockfile:
- lf = bb.utils.lockfile(ud.lockfile)
- try:
- if check:
- found = ud.method.checkstatus(fetch, ud, ld)
- if found:
- return found
- return False
- if not verify_donestamp(ud, ld, origud) or ud.method.need_update(ud, ld):
- ud.method.download(ud, ld)
- if hasattr(ud.method,"build_mirror_data"):
- ud.method.build_mirror_data(ud, ld)
- if not ud.localpath or not os.path.exists(ud.localpath):
- return False
- if ud.localpath == origud.localpath:
- return ud.localpath
- # We may be obtaining a mirror tarball which needs further processing by the real fetcher
- # If that tarball is a local file:// we need to provide a symlink to it
- dldir = ld.getVar("DL_DIR")
- if bb.utils.to_boolean(ld.getVar("BB_FETCH_PREMIRRORONLY")):
- ld = ld.createCopy()
- ld.setVar("BB_NO_NETWORK", "1")
- if origud.mirrortarballs and os.path.basename(ud.localpath) in origud.mirrortarballs and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
- # Create donestamp in old format to avoid triggering a re-download
- if ud.donestamp:
- bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
- open(ud.donestamp, 'w').close()
- dest = os.path.join(dldir, os.path.basename(ud.localpath))
- if not os.path.exists(dest):
- # In case this is executing without any file locks held (as is
- # the case for file:// URLs), two tasks may end up here at the
- # same time, in which case we do not want the second task to
- # fail when the link has already been created by the first task.
- try:
- os.symlink(ud.localpath, dest)
- except FileExistsError:
- pass
- if not verify_donestamp(origud, ld) or origud.method.need_update(origud, ld):
- origud.method.download(origud, ld)
- if hasattr(origud.method, "build_mirror_data"):
- origud.method.build_mirror_data(origud, ld)
- return origud.localpath
- # Otherwise the result is a local file:// and we symlink to it
- # This may also be a link to a shallow archive
- # When using shallow mode, add a symlink to the original fullshallow
- # path to ensure a valid symlink even in the `PREMIRRORS` case
- origud.method.update_mirror_links(ud, origud)
- update_stamp(origud, ld)
- return ud.localpath
- except bb.fetch2.NetworkAccess:
- raise
- except IOError as e:
- if e.errno in [errno.ESTALE]:
- logger.warning("Stale Error Observed %s." % ud.url)
- return False
- raise
- except bb.fetch2.BBFetchException as e:
- if isinstance(e, ChecksumError):
- logger.warning("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
- logger.warning(str(e))
- if os.path.exists(ud.localpath):
- rename_bad_checksum(ud, e.checksum)
- elif isinstance(e, NoChecksumError):
- raise
- else:
- logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
- logger.debug(str(e))
- try:
- if ud.method.cleanup_upon_failure():
- ud.method.clean(ud, ld)
- except UnboundLocalError:
- pass
- return False
- finally:
- if ud.lockfile and ud.lockfile != origud.lockfile:
- bb.utils.unlockfile(lf)
- def try_mirrors(fetch, d, origud, mirrors, check = False):
- """
- Try to use a mirrored version of the sources.
- This method will be automatically called before the fetchers go.
- d Is a bb.data instance
- uri is the original uri we're trying to download
- mirrors is the list of mirrors we're going to try
- """
- ld = d.createCopy()
- uris, uds = build_mirroruris(origud, mirrors, ld)
- for index, uri in enumerate(uris):
- ret = try_mirror_url(fetch, origud, uds[index], ld, check)
- if ret:
- return ret
- return None
- def trusted_network(d, url):
- """
- Use a trusted url during download if networking is enabled and
- BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
- Note: modifies SRC_URI & mirrors.
- """
- if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
- return True
- pkgname = d.getVar('PN')
- trusted_hosts = None
- if pkgname:
- trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
- if not trusted_hosts:
- trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS')
- # Not enabled.
- if not trusted_hosts:
- return True
- scheme, network, path, user, passwd, param = decodeurl(url)
- if not network:
- return True
- network = network.split(':')[0]
- network = network.lower()
- for host in trusted_hosts.split(" "):
- host = host.lower()
- if host.startswith("*.") and ("." + network).endswith(host[1:]):
- return True
- if host == network:
- return True
- return False
- def srcrev_internal_helper(ud, d, name):
- """
- Return:
- a) a source revision if specified
- b) latest revision if SRCREV="AUTOINC"
- c) None if not specified
- """
- srcrev = None
- pn = d.getVar("PN")
- attempts = []
- if name != '' and pn:
- attempts.append("SRCREV_%s:pn-%s" % (name, pn))
- if name != '':
- attempts.append("SRCREV_%s" % name)
- if pn:
- attempts.append("SRCREV:pn-%s" % pn)
- attempts.append("SRCREV")
- for a in attempts:
- srcrev = d.getVar(a)
- if srcrev and srcrev != "INVALID":
- break
- if 'rev' in ud.parm:
- parmrev = ud.parm['rev']
- if srcrev == "INVALID" or not srcrev:
- return parmrev
- if srcrev != parmrev:
- raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please specify one valid value" % (srcrev, parmrev))
- return parmrev
- if 'tag' in ud.parm and (srcrev == "INVALID" or not srcrev):
- return ud.parm['tag']
- if srcrev == "INVALID" or not srcrev:
- raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
- if srcrev == "AUTOINC":
- d.setVar("__BBAUTOREV_ACTED_UPON", True)
- srcrev = ud.method.latest_revision(ud, d, name)
- return srcrev
- def get_checksum_file_list(d):
- """ Get a list of files checksum in SRC_URI
- Returns the resolved local paths of all local file entries in
- SRC_URI as a space-separated string
- """
- fetch = Fetch([], d, cache = False, localonly = True)
- filelist = []
- for u in fetch.urls:
- ud = fetch.ud[u]
- if ud and isinstance(ud.method, local.Local):
- found = False
- paths = ud.method.localfile_searchpaths(ud, d)
- for f in paths:
- pth = ud.path
- if os.path.exists(f):
- found = True
- filelist.append(f + ":" + str(os.path.exists(f)))
- if not found:
- bb.fatal(("Unable to get checksum for %s SRC_URI entry %s: file could not be found"
- "\nThe following paths were searched:"
- "\n%s") % (d.getVar('PN'), os.path.basename(f), '\n'.join(paths)))
- return " ".join(filelist)
- def get_file_checksums(filelist, pn, localdirsexclude):
- """Get a list of the checksums for a list of local files
- Returns the checksums for a list of local files, caching the results as
- it proceeds
- """
- return _checksum_cache.get_checksums(filelist, pn, localdirsexclude)
- class FetchData(object):
- """
- A class which represents the fetcher state for a given URI.
- """
- def __init__(self, url, d, localonly = False):
- # localpath is the location of a downloaded result. If not set, the file is local.
- self.donestamp = None
- self.needdonestamp = True
- self.localfile = ""
- self.localpath = None
- self.lockfile = None
- self.mirrortarballs = []
- self.basename = None
- self.basepath = None
- (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(d.expand(url))
- self.date = self.getSRCDate(d)
- self.url = url
- if not self.user and "user" in self.parm:
- self.user = self.parm["user"]
- if not self.pswd and "pswd" in self.parm:
- self.pswd = self.parm["pswd"]
- self.setup = False
- def configure_checksum(checksum_id):
- checksum_plain_name = "%ssum" % checksum_id
- if "name" in self.parm:
- checksum_name = "%s.%ssum" % (self.parm["name"], checksum_id)
- else:
- checksum_name = checksum_plain_name
- if checksum_name in self.parm:
- checksum_expected = self.parm[checksum_name]
- elif checksum_plain_name in self.parm:
- checksum_expected = self.parm[checksum_plain_name]
- checksum_name = checksum_plain_name
- elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate", "gs", "gomod", "npm"]:
- checksum_expected = None
- else:
- checksum_expected = d.getVarFlag("SRC_URI", checksum_name)
- setattr(self, "%s_name" % checksum_id, checksum_name)
- setattr(self, "%s_expected" % checksum_id, checksum_expected)
- self.name = self.parm.get("name",'default')
- if "," in self.name:
- raise ParameterError("The fetcher no longer supports multiple name parameters in a single url", self.url)
- self.method = None
- for m in methods:
- if m.supports(self, d):
- self.method = m
- break
- if not self.method:
- raise NoMethodError(url)
- if localonly and not isinstance(self.method, local.Local):
- raise NonLocalMethod()
- if self.parm.get("proto", None) and "protocol" not in self.parm:
- logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN'))
- self.parm["protocol"] = self.parm.get("proto", None)
- if hasattr(self.method, "urldata_init"):
- self.method.urldata_init(self, d)
- for checksum_id in CHECKSUM_LIST:
- configure_checksum(checksum_id)
- self.ignore_checksums = False
- if "localpath" in self.parm:
- # if user sets localpath for file, use it instead.
- self.localpath = self.parm["localpath"]
- self.basename = os.path.basename(self.localpath)
- elif self.localfile:
- self.localpath = self.method.localpath(self, d)
- dldir = d.getVar("DL_DIR")
- if not self.needdonestamp:
- return
- # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
- if self.localpath and self.localpath.startswith(dldir):
- basepath = self.localpath
- elif self.localpath:
- basepath = dldir + os.sep + os.path.basename(self.localpath)
- elif self.basepath or self.basename:
- basepath = dldir + os.sep + (self.basepath or self.basename)
- else:
- bb.fatal("Can't determine lock path for url %s" % url)
- self.donestamp = basepath + '.done'
- self.lockfile = basepath + '.lock'
- def setup_revisions(self, d):
- self.revision = srcrev_internal_helper(self, d, self.name)
- def setup_localpath(self, d):
- if not self.localpath:
- self.localpath = self.method.localpath(self, d)
- def getSRCDate(self, d):
- """
- Return the SRC Date for the component
- d the bb.data module
- """
- if "srcdate" in self.parm:
- return self.parm['srcdate']
- pn = d.getVar("PN")
- if pn:
- return d.getVar("SRCDATE_%s" % pn) or d.getVar("SRCDATE") or d.getVar("DATE")
- return d.getVar("SRCDATE") or d.getVar("DATE")
- class FetchMethod(object):
- """Base class for 'fetch'ing data"""
- def __init__(self, urls=None):
- self.urls = []
- def supports(self, urldata, d):
- """
- Check to see if this fetch class supports a given url.
- """
- return 0
- def localpath(self, urldata, d):
- """
- Return the local filename of a given url assuming a successful fetch.
- Can also setup variables in urldata for use in go (saving code duplication
- and duplicate code execution)
- """
- return os.path.join(d.getVar("DL_DIR"), urldata.localfile)
- def supports_checksum(self, urldata):
- """
- Is localpath something that can be represented by a checksum?
- """
- # We cannot compute checksums for None
- if urldata.localpath is None:
- return False
- # We cannot compute checksums for directories
- if os.path.isdir(urldata.localpath):
- return False
- return True
- def recommends_checksum(self, urldata):
- """
- Is the backend on where checksumming is recommended (should warnings
- be displayed if there is no checksum)?
- """
- return False
- def cleanup_upon_failure(self):
- """
- When a fetch fails, should clean() be called?
- """
- return True
- def verify_donestamp(self, ud, d):
- """
- Verify the donestamp file
- """
- return verify_donestamp(ud, d)
- def update_donestamp(self, ud, d):
- """
- Update the donestamp file
- """
- update_stamp(ud, d)
- def _strip_leading_slashes(self, relpath):
- """
- Remove leading slash as os.path.join can't cope
- """
- while os.path.isabs(relpath):
- relpath = relpath[1:]
- return relpath
- def setUrls(self, urls):
- self.__urls = urls
- def getUrls(self):
- return self.__urls
- urls = property(getUrls, setUrls, None, "Urls property")
- def need_update(self, ud, d):
- """
- Force a fetch, even if localpath exists?
- """
- if os.path.exists(ud.localpath):
- return False
- return True
- def supports_srcrev(self):
- """
- The fetcher supports auto source revisions (SRCREV)
- """
- return False
- def download(self, urldata, d):
- """
- Fetch urls
- Assumes localpath was called first
- """
- raise NoMethodError(urldata.url)
- def unpack(self, urldata, rootdir, data):
- iterate = False
- file = urldata.localpath
- try:
- unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
- except ValueError as exc:
- bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
- (file, urldata.parm.get('unpack')))
- base, ext = os.path.splitext(file)
- if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz', '.zst']:
- efile = os.path.join(rootdir, os.path.basename(base))
- else:
- efile = file
- cmd = None
- if unpack:
- tar_cmd = 'tar --extract --no-same-owner'
- if 'striplevel' in urldata.parm:
- tar_cmd += ' --strip-components=%s' % urldata.parm['striplevel']
- if file.endswith('.tar'):
- cmd = '%s -f %s' % (tar_cmd, file)
- elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
- cmd = '%s -z -f %s' % (tar_cmd, file)
- elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
- cmd = 'bzip2 -dc %s | %s -f -' % (file, tar_cmd)
- elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
- cmd = 'gzip -dc %s > %s' % (file, efile)
- elif file.endswith('.bz2'):
- cmd = 'bzip2 -dc %s > %s' % (file, efile)
- elif file.endswith('.txz') or file.endswith('.tar.xz'):
- cmd = 'xz -dc %s | %s -f -' % (file, tar_cmd)
- elif file.endswith('.xz'):
- cmd = 'xz -dc %s > %s' % (file, efile)
- elif file.endswith('.tar.lz'):
- cmd = 'lzip -dc %s | %s -f -' % (file, tar_cmd)
- elif file.endswith('.lz'):
- cmd = 'lzip -dc %s > %s' % (file, efile)
- elif file.endswith('.tar.7z'):
- cmd = '7z x -so %s | %s -f -' % (file, tar_cmd)
- elif file.endswith('.7z'):
- cmd = '7za x -y %s 1>/dev/null' % file
- elif file.endswith('.tzst') or file.endswith('.tar.zst'):
- cmd = 'zstd --decompress --stdout %s | %s -f -' % (file, tar_cmd)
- elif file.endswith('.zst'):
- cmd = 'zstd --decompress --stdout %s > %s' % (file, efile)
- elif file.endswith('.zip') or file.endswith('.jar'):
- try:
- dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
- except ValueError as exc:
- bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
- (file, urldata.parm.get('dos')))
- cmd = 'unzip -q -o'
- if dos:
- cmd = '%s -a' % cmd
- cmd = "%s '%s'" % (cmd, file)
- elif file.endswith('.rpm') or file.endswith('.srpm'):
- if 'extract' in urldata.parm:
- unpack_file = urldata.parm.get('extract')
- cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file)
- iterate = True
- iterate_file = unpack_file
- else:
- cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
- elif file.endswith('.deb') or file.endswith('.ipk'):
- output = subprocess.check_output(['ar', '-t', file], preexec_fn=subprocess_setup)
- datafile = None
- if output:
- for line in output.decode().splitlines():
- if line.startswith('data.tar.') or line == 'data.tar':
- datafile = line
- break
- else:
- raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar* file", urldata.url)
- else:
- raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url)
- cmd = 'ar x %s %s && %s -p -f %s && rm %s' % (file, datafile, tar_cmd, datafile, datafile)
- # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
- if 'subdir' in urldata.parm:
- subdir = urldata.parm.get('subdir')
- if os.path.isabs(subdir):
- if not os.path.realpath(subdir).startswith(os.path.realpath(rootdir)):
- raise UnpackError("subdir argument isn't a subdirectory of unpack root %s" % rootdir, urldata.url)
- unpackdir = subdir
- else:
- unpackdir = os.path.join(rootdir, subdir)
- bb.utils.mkdirhier(unpackdir)
- else:
- unpackdir = rootdir
- if not unpack or not cmd:
- urldata.unpack_tracer.unpack("file-copy", unpackdir)
- # If file == dest, then avoid any copies, as we already put the file into dest!
- dest = os.path.join(unpackdir, os.path.basename(file))
- if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)):
- destdir = '.'
- # For file:// entries all intermediate dirs in path must be created at destination
- if urldata.type == "file":
- # Trailing '/' does a copying to wrong place
- urlpath = urldata.path.rstrip('/')
- # Want files places relative to cwd so no leading '/'
- urlpath = urlpath.lstrip('/')
- if urlpath.find("/") != -1:
- destdir = urlpath.rsplit("/", 1)[0] + '/'
- bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
- cmd = 'cp --force --preserve=timestamps --no-dereference --recursive -H "%s" "%s"' % (file, destdir)
- else:
- urldata.unpack_tracer.unpack("archive-extract", unpackdir)
- if not cmd:
- return
- path = data.getVar('PATH')
- if path:
- cmd = "PATH=\"%s\" %s" % (path, cmd)
- bb.note("Unpacking %s to %s/" % (file, unpackdir))
- ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=unpackdir)
- if ret != 0:
- raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
- if iterate is True:
- iterate_urldata = urldata
- iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
- self.unpack(urldata, rootdir, data)
- return
- def clean(self, urldata, d):
- """
- Clean any existing full or partial download
- """
- bb.utils.remove(urldata.localpath)
- def ensure_symlink(self, target, link_name):
- if not os.path.exists(link_name):
- dirname = os.path.dirname(link_name)
- bb.utils.mkdirhier(dirname)
- if os.path.islink(link_name):
- # Broken symbolic link
- os.unlink(link_name)
- # In case this is executing without any file locks held (as is
- # the case for file:// URLs), two tasks may end up here at the
- # same time, in which case we do not want the second task to
- # fail when the link has already been created by the first task.
- try:
- os.symlink(target, link_name)
- except FileExistsError:
- pass
- def update_mirror_links(self, ud, origud):
- # For local file:// results, create a symlink to them
- # This may also be a link to a shallow archive
- self.ensure_symlink(ud.localpath, origud.localpath)
- def try_premirror(self, urldata, d):
- """
- Should premirrors be used?
- """
- return True
- def try_mirrors(self, fetch, urldata, d, mirrors, check=False):
- """
- Try to use a mirror
- """
- return bool(try_mirrors(fetch, d, urldata, mirrors, check))
- def checkstatus(self, fetch, urldata, d):
- """
- Check the status of a URL
- Assumes localpath was called first
- """
- logger.info("URL %s could not be checked for status since no method exists.", urldata.url)
- return True
- def latest_revision(self, ud, d, name):
- """
- Look in the cache for the latest revision, if not present ask the SCM.
- """
- if not hasattr(self, "_latest_revision"):
- raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url)
- key = self.generate_revision_key(ud, d, name)
- rev = _revisions_cache.get_rev(key)
- if rev is None:
- rev = self._latest_revision(ud, d, name)
- _revisions_cache.set_rev(key, rev)
- return rev
- def sortable_revision(self, ud, d, name):
- latest_rev = self._build_revision(ud, d, name)
- return True, str(latest_rev)
- def generate_revision_key(self, ud, d, name):
- return self._revision_key(ud, d, name)
- def latest_versionstring(self, ud, d):
- """
- Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
- by searching through the tags output of ls-remote, comparing
- versions and returning the highest match as a (version, revision) pair.
- """
- return ('', '')
- def done(self, ud, d):
- """
- Is the download done ?
- """
- if os.path.exists(ud.localpath):
- return True
- return False
- def implicit_urldata(self, ud, d):
- """
- Get a list of FetchData objects for any implicit URLs that will also
- be downloaded when we fetch the given URL.
- """
- return []
- class DummyUnpackTracer(object):
- """
- Abstract API definition for a class that traces unpacked source files back
- to their respective upstream SRC_URI entries, for software composition
- analysis, license compliance and detailed SBOM generation purposes.
- User may load their own unpack tracer class (instead of the dummy
- one) by setting the BB_UNPACK_TRACER_CLASS config parameter.
- """
- def start(self, unpackdir, urldata_dict, d):
- """
- Start tracing the core Fetch.unpack process, using an index to map
- unpacked files to each SRC_URI entry.
- This method is called by Fetch.unpack and it may receive nested calls by
- gitsm and npmsw fetchers, that expand SRC_URI entries by adding implicit
- URLs and by recursively calling Fetch.unpack from new (nested) Fetch
- instances.
- """
- return
- def start_url(self, url):
- """Start tracing url unpack process.
- This method is called by Fetch.unpack before the fetcher-specific unpack
- method starts, and it may receive nested calls by gitsm and npmsw
- fetchers.
- """
- return
- def unpack(self, unpack_type, destdir):
- """
- Set unpack_type and destdir for current url.
- This method is called by the fetcher-specific unpack method after url
- tracing started.
- """
- return
- def finish_url(self, url):
- """Finish tracing url unpack process and update the file index.
- This method is called by Fetch.unpack after the fetcher-specific unpack
- method finished its job, and it may receive nested calls by gitsm
- and npmsw fetchers.
- """
- return
- def complete(self):
- """
- Finish tracing the Fetch.unpack process, and check if all nested
- Fecth.unpack calls (if any) have been completed; if so, save collected
- metadata.
- """
- return
- class Fetch(object):
- def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
- if localonly and cache:
- raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
- if not urls:
- urls = d.getVar("SRC_URI").split()
- self.urls = urls
- self.d = d
- self.ud = {}
- self.connection_cache = connection_cache
- fn = d.getVar('FILE')
- mc = d.getVar('__BBMULTICONFIG') or ""
- key = None
- if cache and fn:
- key = mc + fn + str(id(d))
- if key in urldata_cache:
- self.ud = urldata_cache[key]
- # the unpack_tracer object needs to be made available to possible nested
- # Fetch instances (when those are created by gitsm and npmsw fetchers)
- # so we set it as a global variable
- global unpack_tracer
- try:
- unpack_tracer
- except NameError:
- class_path = d.getVar("BB_UNPACK_TRACER_CLASS")
- if class_path:
- # use user-defined unpack tracer class
- import importlib
- module_name, _, class_name = class_path.rpartition(".")
- module = importlib.import_module(module_name)
- class_ = getattr(module, class_name)
- unpack_tracer = class_()
- else:
- # fall back to the dummy/abstract class
- unpack_tracer = DummyUnpackTracer()
- for url in urls:
- if url not in self.ud:
- try:
- self.ud[url] = FetchData(url, d, localonly)
- self.ud[url].unpack_tracer = unpack_tracer
- except NonLocalMethod:
- if localonly:
- self.ud[url] = None
- pass
- if key:
- urldata_cache[key] = self.ud
- def localpath(self, url):
- if url not in self.urls:
- self.ud[url] = FetchData(url, self.d)
- self.ud[url].setup_localpath(self.d)
- return self.ud[url].localpath
- def localpaths(self):
- """
- Return a list of the local filenames, assuming successful fetch
- """
- local = []
- for u in self.urls:
- ud = self.ud[u]
- ud.setup_localpath(self.d)
- local.append(ud.localpath)
- return local
- def download(self, urls=None):
- """
- Fetch all urls
- """
- if not urls:
- urls = self.urls
- network = self.d.getVar("BB_NO_NETWORK")
- premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY"))
- checksum_missing_messages = []
- for u in urls:
- ud = self.ud[u]
- ud.setup_localpath(self.d)
- m = ud.method
- done = False
- if ud.lockfile:
- lf = bb.utils.lockfile(ud.lockfile)
- try:
- self.d.setVar("BB_NO_NETWORK", network)
- if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
- done = True
- elif m.try_premirror(ud, self.d):
- logger.debug("Trying PREMIRRORS")
- mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
- done = m.try_mirrors(self, ud, self.d, mirrors)
- if done:
- try:
- # early checksum verification so that if the checksum of the premirror
- # contents mismatch the fetcher can still try upstream and mirrors
- m.update_donestamp(ud, self.d)
- except ChecksumError as e:
- logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u)
- logger.debug(str(e))
- done = False
- d = self.d
- if premirroronly:
- # Only disable the network in a copy
- d = bb.data.createCopy(self.d)
- d.setVar("BB_NO_NETWORK", "1")
- firsterr = None
- verified_stamp = False
- if done:
- verified_stamp = m.verify_donestamp(ud, d)
- if not done and (not verified_stamp or m.need_update(ud, d)):
- try:
- if not trusted_network(d, ud.url):
- raise UntrustedUrl(ud.url)
- logger.debug("Trying Upstream")
- m.download(ud, d)
- if hasattr(m, "build_mirror_data"):
- m.build_mirror_data(ud, d)
- done = True
- # early checksum verify, so that if checksum mismatched,
- # fetcher still have chance to fetch from mirror
- m.update_donestamp(ud, d)
- except bb.fetch2.NetworkAccess:
- raise
- except BBFetchException as e:
- if isinstance(e, ChecksumError):
- logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
- logger.debug(str(e))
- if os.path.exists(ud.localpath):
- rename_bad_checksum(ud, e.checksum)
- elif isinstance(e, NoChecksumError):
- raise
- else:
- logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u)
- logger.debug(str(e))
- firsterr = e
- # Remove any incomplete fetch
- if not verified_stamp and m.cleanup_upon_failure():
- m.clean(ud, d)
- logger.debug("Trying MIRRORS")
- mirrors = mirror_from_string(d.getVar('MIRRORS'))
- done = m.try_mirrors(self, ud, d, mirrors)
- if not done or not m.done(ud, d):
- if firsterr:
- logger.error(str(firsterr))
- raise FetchError("Unable to fetch URL from any source.", u)
- m.update_donestamp(ud, d)
- except IOError as e:
- if e.errno in [errno.ESTALE]:
- logger.error("Stale Error Observed %s." % u)
- raise ChecksumError("Stale Error Detected")
- except BBFetchException as e:
- if isinstance(e, NoChecksumError):
- (message, _) = e.args
- checksum_missing_messages.append(message)
- continue
- elif isinstance(e, ChecksumError):
- logger.error("Checksum failure fetching %s" % u)
- raise
- finally:
- if ud.lockfile:
- bb.utils.unlockfile(lf)
- if checksum_missing_messages:
- logger.error("Missing SRC_URI checksum, please add those to the recipe: \n%s", "\n".join(checksum_missing_messages))
- raise BBFetchException("There was some missing checksums in the recipe")
- def checkstatus(self, urls=None):
- """
- Check all URLs exist upstream.
- Returns None if the URLs exist, raises FetchError if the check wasn't
- successful but there wasn't an error (such as file not found), and
- raises other exceptions in error cases.
- """
- if not urls:
- urls = self.urls
- for u in urls:
- ud = self.ud[u]
- ud.setup_localpath(self.d)
- m = ud.method
- logger.debug("Testing URL %s", u)
- # First try checking uri, u, from PREMIRRORS
- mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
- ret = m.try_mirrors(self, ud, self.d, mirrors, True)
- if not ret:
- # Next try checking from the original uri, u
- ret = m.checkstatus(self, ud, self.d)
- if not ret:
- # Finally, try checking uri, u, from MIRRORS
- mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
- ret = m.try_mirrors(self, ud, self.d, mirrors, True)
- if not ret:
- raise FetchError("URL doesn't work", u)
- def unpack(self, root, urls=None):
- """
- Unpack urls to root
- """
- if not urls:
- urls = self.urls
- unpack_tracer.start(root, self.ud, self.d)
- for u in urls:
- ud = self.ud[u]
- ud.setup_localpath(self.d)
- if ud.lockfile:
- lf = bb.utils.lockfile(ud.lockfile)
- unpack_tracer.start_url(u)
- ud.method.unpack(ud, root, self.d)
- unpack_tracer.finish_url(u)
- if ud.lockfile:
- bb.utils.unlockfile(lf)
- unpack_tracer.complete()
- def clean(self, urls=None):
- """
- Clean files that the fetcher gets or places
- """
- if not urls:
- urls = self.urls
- for url in urls:
- if url not in self.ud:
- self.ud[url] = FetchData(url, self.d)
- ud = self.ud[url]
- ud.setup_localpath(self.d)
- if not ud.localfile and ud.localpath is None:
- continue
- if ud.lockfile:
- lf = bb.utils.lockfile(ud.lockfile)
- ud.method.clean(ud, self.d)
- if ud.donestamp:
- bb.utils.remove(ud.donestamp)
- if ud.lockfile:
- bb.utils.unlockfile(lf)
- def expanded_urldata(self, urls=None):
- """
- Get an expanded list of FetchData objects covering both the given
- URLS and any additional implicit URLs that are added automatically by
- the appropriate FetchMethod.
- """
- if not urls:
- urls = self.urls
- urldata = []
- for url in urls:
- ud = self.ud[url]
- urldata.append(ud)
- urldata += ud.method.implicit_urldata(ud, self.d)
- return urldata
- class FetchConnectionCache(object):
- """
- A class which represents an container for socket connections.
- """
- def __init__(self):
- self.cache = {}
- def get_connection_name(self, host, port):
- return host + ':' + str(port)
- def add_connection(self, host, port, connection):
- cn = self.get_connection_name(host, port)
- if cn not in self.cache:
- self.cache[cn] = connection
- def get_connection(self, host, port):
- connection = None
- cn = self.get_connection_name(host, port)
- if cn in self.cache:
- connection = self.cache[cn]
- return connection
- def remove_connection(self, host, port):
- cn = self.get_connection_name(host, port)
- if cn in self.cache:
- self.cache[cn].close()
- del self.cache[cn]
- def close_connections(self):
- for cn in list(self.cache.keys()):
- self.cache[cn].close()
- del self.cache[cn]
- from . import cvs
- from . import git
- from . import gitsm
- from . import gitannex
- from . import local
- from . import svn
- from . import wget
- from . import ssh
- from . import sftp
- from . import s3
- from . import perforce
- from . import bzr
- from . import hg
- from . import osc
- from . import repo
- from . import clearcase
- from . import npm
- from . import npmsw
- from . import az
- from . import crate
- from . import gcp
- from . import gomod
- methods.append(local.Local())
- methods.append(wget.Wget())
- methods.append(svn.Svn())
- methods.append(git.Git())
- methods.append(gitsm.GitSM())
- methods.append(gitannex.GitANNEX())
- methods.append(cvs.Cvs())
- methods.append(ssh.SSH())
- methods.append(sftp.SFTP())
- methods.append(s3.S3())
- methods.append(perforce.Perforce())
- methods.append(bzr.Bzr())
- methods.append(hg.Hg())
- methods.append(osc.Osc())
- methods.append(repo.Repo())
- methods.append(clearcase.ClearCase())
- methods.append(npm.Npm())
- methods.append(npmsw.NpmShrinkWrap())
- methods.append(az.Az())
- methods.append(crate.Crate())
- methods.append(gcp.GCP())
- methods.append(gomod.GoMod())
- methods.append(gomod.GoModGit())
|