pkg-stats 36 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074
  1. #!/usr/bin/env python
  2. # Copyright (C) 2009 by Thomas Petazzoni <thomas.petazzoni@free-electrons.com>
  3. #
  4. # This program is free software; you can redistribute it and/or modify
  5. # it under the terms of the GNU General Public License as published by
  6. # the Free Software Foundation; either version 2 of the License, or
  7. # (at your option) any later version.
  8. #
  9. # This program is distributed in the hope that it will be useful,
  10. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. # General Public License for more details.
  13. #
  14. # You should have received a copy of the GNU General Public License
  15. # along with this program; if not, write to the Free Software
  16. # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
  17. import argparse
  18. import datetime
  19. import fnmatch
  20. import os
  21. from collections import defaultdict
  22. import re
  23. import subprocess
  24. import requests # URL checking
  25. import json
  26. import ijson
  27. import certifi
  28. import distutils.version
  29. import time
  30. import gzip
  31. import sys
  32. from urllib3 import HTTPSConnectionPool
  33. from urllib3.exceptions import HTTPError
  34. from multiprocessing import Pool
  35. sys.path.append('utils/')
  36. from getdeveloperlib import parse_developers # noqa: E402
  37. NVD_START_YEAR = 2002
  38. NVD_JSON_VERSION = "1.0"
  39. NVD_BASE_URL = "https://nvd.nist.gov/feeds/json/cve/" + NVD_JSON_VERSION
  40. INFRA_RE = re.compile(r"\$\(eval \$\(([a-z-]*)-package\)\)")
  41. URL_RE = re.compile(r"\s*https?://\S*\s*$")
  42. RM_API_STATUS_ERROR = 1
  43. RM_API_STATUS_FOUND_BY_DISTRO = 2
  44. RM_API_STATUS_FOUND_BY_PATTERN = 3
  45. RM_API_STATUS_NOT_FOUND = 4
  46. CVE_AFFECTS = 1
  47. CVE_DOESNT_AFFECT = 2
  48. CVE_UNKNOWN = 3
  49. # Used to make multiple requests to the same host. It is global
  50. # because it's used by sub-processes.
  51. http_pool = None
  52. class Defconfig:
  53. def __init__(self, name, path):
  54. self.name = name
  55. self.path = path
  56. self.developers = None
  57. def set_developers(self, developers):
  58. """
  59. Fills in the .developers field
  60. """
  61. self.developers = [
  62. developer.name
  63. for developer in developers
  64. if developer.hasfile(self.path)
  65. ]
  66. def get_defconfig_list():
  67. """
  68. Builds the list of Buildroot defconfigs, returning a list of Defconfig
  69. objects.
  70. """
  71. return [
  72. Defconfig(name[:-len('_defconfig')], os.path.join('configs', name))
  73. for name in os.listdir('configs')
  74. if name.endswith('_defconfig')
  75. ]
  76. class Package:
  77. all_licenses = dict()
  78. all_license_files = list()
  79. all_versions = dict()
  80. all_ignored_cves = dict()
  81. # This is the list of all possible checks. Add new checks to this list so
  82. # a tool that post-processeds the json output knows the checks before
  83. # iterating over the packages.
  84. status_checks = ['cve', 'developers', 'hash', 'license',
  85. 'license-files', 'patches', 'pkg-check', 'url', 'version']
  86. def __init__(self, name, path):
  87. self.name = name
  88. self.path = path
  89. self.pkg_path = os.path.dirname(path)
  90. self.infras = None
  91. self.license = None
  92. self.has_license = False
  93. self.has_license_files = False
  94. self.has_hash = False
  95. self.patch_files = []
  96. self.warnings = 0
  97. self.current_version = None
  98. self.url = None
  99. self.url_worker = None
  100. self.cves = list()
  101. self.latest_version = {'status': RM_API_STATUS_ERROR, 'version': None, 'id': None}
  102. self.status = {}
  103. def pkgvar(self):
  104. return self.name.upper().replace("-", "_")
  105. def set_url(self):
  106. """
  107. Fills in the .url field
  108. """
  109. self.status['url'] = ("warning", "no Config.in")
  110. for filename in os.listdir(os.path.dirname(self.path)):
  111. if fnmatch.fnmatch(filename, 'Config.*'):
  112. fp = open(os.path.join(os.path.dirname(self.path), filename), "r")
  113. for config_line in fp:
  114. if URL_RE.match(config_line):
  115. self.url = config_line.strip()
  116. self.status['url'] = ("ok", "found")
  117. fp.close()
  118. return
  119. self.status['url'] = ("error", "missing")
  120. fp.close()
  121. @property
  122. def patch_count(self):
  123. return len(self.patch_files)
  124. @property
  125. def has_valid_infra(self):
  126. try:
  127. if self.infras[0][1] == 'virtual':
  128. return False
  129. except IndexError:
  130. return False
  131. return True
  132. def set_infra(self):
  133. """
  134. Fills in the .infras field
  135. """
  136. self.infras = list()
  137. with open(self.path, 'r') as f:
  138. lines = f.readlines()
  139. for l in lines:
  140. match = INFRA_RE.match(l)
  141. if not match:
  142. continue
  143. infra = match.group(1)
  144. if infra.startswith("host-"):
  145. self.infras.append(("host", infra[5:]))
  146. else:
  147. self.infras.append(("target", infra))
  148. def set_license(self):
  149. """
  150. Fills in the .status['license'] and .status['license-files'] fields
  151. """
  152. if not self.has_valid_infra:
  153. self.status['license'] = ("na", "no valid package infra")
  154. self.status['license-files'] = ("na", "no valid package infra")
  155. return
  156. var = self.pkgvar()
  157. self.status['license'] = ("error", "missing")
  158. self.status['license-files'] = ("error", "missing")
  159. if var in self.all_licenses:
  160. self.license = self.all_licenses[var]
  161. self.status['license'] = ("ok", "found")
  162. if var in self.all_license_files:
  163. self.status['license-files'] = ("ok", "found")
  164. def set_hash_info(self):
  165. """
  166. Fills in the .status['hash'] field
  167. """
  168. if not self.has_valid_infra:
  169. self.status['hash'] = ("na", "no valid package infra")
  170. self.status['hash-license'] = ("na", "no valid package infra")
  171. return
  172. hashpath = self.path.replace(".mk", ".hash")
  173. if os.path.exists(hashpath):
  174. self.status['hash'] = ("ok", "found")
  175. else:
  176. self.status['hash'] = ("error", "missing")
  177. def set_patch_count(self):
  178. """
  179. Fills in the .patch_count, .patch_files and .status['patches'] fields
  180. """
  181. if not self.has_valid_infra:
  182. self.status['patches'] = ("na", "no valid package infra")
  183. return
  184. pkgdir = os.path.dirname(self.path)
  185. for subdir, _, _ in os.walk(pkgdir):
  186. self.patch_files = fnmatch.filter(os.listdir(subdir), '*.patch')
  187. if self.patch_count == 0:
  188. self.status['patches'] = ("ok", "no patches")
  189. elif self.patch_count < 5:
  190. self.status['patches'] = ("warning", "some patches")
  191. else:
  192. self.status['patches'] = ("error", "lots of patches")
  193. def set_current_version(self):
  194. """
  195. Fills in the .current_version field
  196. """
  197. var = self.pkgvar()
  198. if var in self.all_versions:
  199. self.current_version = self.all_versions[var]
  200. def set_check_package_warnings(self):
  201. """
  202. Fills in the .warnings and .status['pkg-check'] fields
  203. """
  204. cmd = ["./utils/check-package"]
  205. pkgdir = os.path.dirname(self.path)
  206. self.status['pkg-check'] = ("error", "Missing")
  207. for root, dirs, files in os.walk(pkgdir):
  208. for f in files:
  209. if f.endswith(".mk") or f.endswith(".hash") or f == "Config.in" or f == "Config.in.host":
  210. cmd.append(os.path.join(root, f))
  211. o = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[1]
  212. lines = o.splitlines()
  213. for line in lines:
  214. m = re.match("^([0-9]*) warnings generated", line.decode())
  215. if m:
  216. self.warnings = int(m.group(1))
  217. if self.warnings == 0:
  218. self.status['pkg-check'] = ("ok", "no warnings")
  219. else:
  220. self.status['pkg-check'] = ("error", "{} warnings".format(self.warnings))
  221. return
  222. def is_cve_ignored(self, cve):
  223. """
  224. Tells if the CVE is ignored by the package
  225. """
  226. return cve in self.all_ignored_cves.get(self.pkgvar(), [])
  227. def set_developers(self, developers):
  228. """
  229. Fills in the .developers and .status['developers'] field
  230. """
  231. self.developers = [
  232. dev.name
  233. for dev in developers
  234. if dev.hasfile(self.path)
  235. ]
  236. if self.developers:
  237. self.status['developers'] = ("ok", "{} developers".format(len(self.developers)))
  238. else:
  239. self.status['developers'] = ("warning", "no developers")
  240. def is_status_ok(self, name):
  241. return self.status[name][0] == 'ok'
  242. def __eq__(self, other):
  243. return self.path == other.path
  244. def __lt__(self, other):
  245. return self.path < other.path
  246. def __str__(self):
  247. return "%s (path='%s', license='%s', license_files='%s', hash='%s', patches=%d)" % \
  248. (self.name, self.path, self.is_status_ok('license'),
  249. self.is_status_ok('license-files'), self.status['hash'], self.patch_count)
  250. class CVE:
  251. """An accessor class for CVE Items in NVD files"""
  252. def __init__(self, nvd_cve):
  253. """Initialize a CVE from its NVD JSON representation"""
  254. self.nvd_cve = nvd_cve
  255. @staticmethod
  256. def download_nvd_year(nvd_path, year):
  257. metaf = "nvdcve-%s-%s.meta" % (NVD_JSON_VERSION, year)
  258. path_metaf = os.path.join(nvd_path, metaf)
  259. jsonf_gz = "nvdcve-%s-%s.json.gz" % (NVD_JSON_VERSION, year)
  260. path_jsonf_gz = os.path.join(nvd_path, jsonf_gz)
  261. # If the database file is less than a day old, we assume the NVD data
  262. # locally available is recent enough.
  263. if os.path.exists(path_jsonf_gz) and os.stat(path_jsonf_gz).st_mtime >= time.time() - 86400:
  264. return path_jsonf_gz
  265. # If not, we download the meta file
  266. url = "%s/%s" % (NVD_BASE_URL, metaf)
  267. print("Getting %s" % url)
  268. page_meta = requests.get(url)
  269. page_meta.raise_for_status()
  270. # If the meta file already existed, we compare the existing
  271. # one with the data newly downloaded. If they are different,
  272. # we need to re-download the database.
  273. # If the database does not exist locally, we need to redownload it in
  274. # any case.
  275. if os.path.exists(path_metaf) and os.path.exists(path_jsonf_gz):
  276. meta_known = open(path_metaf, "r").read()
  277. if page_meta.text == meta_known:
  278. return path_jsonf_gz
  279. # Grab the compressed JSON NVD, and write files to disk
  280. url = "%s/%s" % (NVD_BASE_URL, jsonf_gz)
  281. print("Getting %s" % url)
  282. page_json = requests.get(url)
  283. page_json.raise_for_status()
  284. open(path_jsonf_gz, "wb").write(page_json.content)
  285. open(path_metaf, "w").write(page_meta.text)
  286. return path_jsonf_gz
  287. @classmethod
  288. def read_nvd_dir(cls, nvd_dir):
  289. """
  290. Iterate over all the CVEs contained in NIST Vulnerability Database
  291. feeds since NVD_START_YEAR. If the files are missing or outdated in
  292. nvd_dir, a fresh copy will be downloaded, and kept in .json.gz
  293. """
  294. for year in range(NVD_START_YEAR, datetime.datetime.now().year + 1):
  295. filename = CVE.download_nvd_year(nvd_dir, year)
  296. try:
  297. content = ijson.items(gzip.GzipFile(filename), 'CVE_Items.item')
  298. except: # noqa: E722
  299. print("ERROR: cannot read %s. Please remove the file then rerun this script" % filename)
  300. raise
  301. for cve in content:
  302. yield cls(cve['cve'])
  303. def each_product(self):
  304. """Iterate over each product section of this cve"""
  305. for vendor in self.nvd_cve['affects']['vendor']['vendor_data']:
  306. for product in vendor['product']['product_data']:
  307. yield product
  308. @property
  309. def identifier(self):
  310. """The CVE unique identifier"""
  311. return self.nvd_cve['CVE_data_meta']['ID']
  312. @property
  313. def pkg_names(self):
  314. """The set of package names referred by this CVE definition"""
  315. return set(p['product_name'] for p in self.each_product())
  316. def affects(self, br_pkg):
  317. """
  318. True if the Buildroot Package object passed as argument is affected
  319. by this CVE.
  320. """
  321. if br_pkg.is_cve_ignored(self.identifier):
  322. return CVE_DOESNT_AFFECT
  323. for product in self.each_product():
  324. if product['product_name'] != br_pkg.name:
  325. continue
  326. for v in product['version']['version_data']:
  327. if v["version_affected"] == "=":
  328. if br_pkg.current_version == v["version_value"]:
  329. return CVE_AFFECTS
  330. elif v["version_affected"] == "<=":
  331. pkg_version = distutils.version.LooseVersion(br_pkg.current_version)
  332. if not hasattr(pkg_version, "version"):
  333. print("Cannot parse package '%s' version '%s'" % (br_pkg.name, br_pkg.current_version))
  334. continue
  335. cve_affected_version = distutils.version.LooseVersion(v["version_value"])
  336. if not hasattr(cve_affected_version, "version"):
  337. print("Cannot parse CVE affected version '%s'" % v["version_value"])
  338. continue
  339. try:
  340. affected = pkg_version <= cve_affected_version
  341. break
  342. except TypeError:
  343. return CVE_UNKNOWN
  344. if affected:
  345. return CVE_AFFECTS
  346. else:
  347. return CVE_DOESNT_AFFECT
  348. else:
  349. print("version_affected: %s" % v['version_affected'])
  350. return CVE_DOESNT_AFFECT
  351. def get_pkglist(npackages, package_list):
  352. """
  353. Builds the list of Buildroot packages, returning a list of Package
  354. objects. Only the .name and .path fields of the Package object are
  355. initialized.
  356. npackages: limit to N packages
  357. package_list: limit to those packages in this list
  358. """
  359. WALK_USEFUL_SUBDIRS = ["boot", "linux", "package", "toolchain"]
  360. WALK_EXCLUDES = ["boot/common.mk",
  361. "linux/linux-ext-.*.mk",
  362. "package/freescale-imx/freescale-imx.mk",
  363. "package/gcc/gcc.mk",
  364. "package/gstreamer/gstreamer.mk",
  365. "package/gstreamer1/gstreamer1.mk",
  366. "package/gtk2-themes/gtk2-themes.mk",
  367. "package/matchbox/matchbox.mk",
  368. "package/opengl/opengl.mk",
  369. "package/qt5/qt5.mk",
  370. "package/x11r7/x11r7.mk",
  371. "package/doc-asciidoc.mk",
  372. "package/pkg-.*.mk",
  373. "package/nvidia-tegra23/nvidia-tegra23.mk",
  374. "toolchain/toolchain-external/pkg-toolchain-external.mk",
  375. "toolchain/toolchain-external/toolchain-external.mk",
  376. "toolchain/toolchain.mk",
  377. "toolchain/helpers.mk",
  378. "toolchain/toolchain-wrapper.mk"]
  379. packages = list()
  380. count = 0
  381. for root, dirs, files in os.walk("."):
  382. rootdir = root.split("/")
  383. if len(rootdir) < 2:
  384. continue
  385. if rootdir[1] not in WALK_USEFUL_SUBDIRS:
  386. continue
  387. for f in files:
  388. if not f.endswith(".mk"):
  389. continue
  390. # Strip ending ".mk"
  391. pkgname = f[:-3]
  392. if package_list and pkgname not in package_list:
  393. continue
  394. pkgpath = os.path.join(root, f)
  395. skip = False
  396. for exclude in WALK_EXCLUDES:
  397. # pkgpath[2:] strips the initial './'
  398. if re.match(exclude, pkgpath[2:]):
  399. skip = True
  400. continue
  401. if skip:
  402. continue
  403. p = Package(pkgname, pkgpath)
  404. packages.append(p)
  405. count += 1
  406. if npackages and count == npackages:
  407. return packages
  408. return packages
  409. def package_init_make_info():
  410. # Fetch all variables at once
  411. variables = subprocess.check_output(["make", "BR2_HAVE_DOT_CONFIG=y", "-s", "printvars",
  412. "VARS=%_LICENSE %_LICENSE_FILES %_VERSION %_IGNORE_CVES"])
  413. variable_list = variables.decode().splitlines()
  414. # We process first the host package VERSION, and then the target
  415. # package VERSION. This means that if a package exists in both
  416. # target and host variants, with different values (eg. version
  417. # numbers (unlikely)), we'll report the target one.
  418. variable_list = [x[5:] for x in variable_list if x.startswith("HOST_")] + \
  419. [x for x in variable_list if not x.startswith("HOST_")]
  420. for l in variable_list:
  421. # Get variable name and value
  422. pkgvar, value = l.split("=")
  423. # Strip the suffix according to the variable
  424. if pkgvar.endswith("_LICENSE"):
  425. # If value is "unknown", no license details available
  426. if value == "unknown":
  427. continue
  428. pkgvar = pkgvar[:-8]
  429. Package.all_licenses[pkgvar] = value
  430. elif pkgvar.endswith("_LICENSE_FILES"):
  431. if pkgvar.endswith("_MANIFEST_LICENSE_FILES"):
  432. continue
  433. pkgvar = pkgvar[:-14]
  434. Package.all_license_files.append(pkgvar)
  435. elif pkgvar.endswith("_VERSION"):
  436. if pkgvar.endswith("_DL_VERSION"):
  437. continue
  438. pkgvar = pkgvar[:-8]
  439. Package.all_versions[pkgvar] = value
  440. elif pkgvar.endswith("_IGNORE_CVES"):
  441. pkgvar = pkgvar[:-12]
  442. Package.all_ignored_cves[pkgvar] = value.split()
  443. def check_url_status_worker(url, url_status):
  444. if url_status[0] == 'ok':
  445. try:
  446. url_status_code = requests.head(url, timeout=30).status_code
  447. if url_status_code >= 400:
  448. return ("error", "invalid {}".format(url_status_code))
  449. except requests.exceptions.RequestException:
  450. return ("error", "invalid (err)")
  451. return ("ok", "valid")
  452. return url_status
  453. def check_package_urls(packages):
  454. pool = Pool(processes=64)
  455. for pkg in packages:
  456. pkg.url_worker = pool.apply_async(check_url_status_worker, (pkg.url, pkg.status['url']))
  457. for pkg in packages:
  458. pkg.status['url'] = pkg.url_worker.get(timeout=3600)
  459. del pkg.url_worker
  460. pool.terminate()
  461. def release_monitoring_get_latest_version_by_distro(pool, name):
  462. try:
  463. req = pool.request('GET', "/api/project/Buildroot/%s" % name)
  464. except HTTPError:
  465. return (RM_API_STATUS_ERROR, None, None)
  466. if req.status != 200:
  467. return (RM_API_STATUS_NOT_FOUND, None, None)
  468. data = json.loads(req.data)
  469. if 'version' in data:
  470. return (RM_API_STATUS_FOUND_BY_DISTRO, data['version'], data['id'])
  471. else:
  472. return (RM_API_STATUS_FOUND_BY_DISTRO, None, data['id'])
  473. def release_monitoring_get_latest_version_by_guess(pool, name):
  474. try:
  475. req = pool.request('GET', "/api/projects/?pattern=%s" % name)
  476. except HTTPError:
  477. return (RM_API_STATUS_ERROR, None, None)
  478. if req.status != 200:
  479. return (RM_API_STATUS_NOT_FOUND, None, None)
  480. data = json.loads(req.data)
  481. projects = data['projects']
  482. projects.sort(key=lambda x: x['id'])
  483. for p in projects:
  484. if p['name'] == name and 'version' in p:
  485. return (RM_API_STATUS_FOUND_BY_PATTERN, p['version'], p['id'])
  486. return (RM_API_STATUS_NOT_FOUND, None, None)
  487. def check_package_latest_version_worker(name):
  488. """Wrapper to try both by name then by guess"""
  489. print(name)
  490. res = release_monitoring_get_latest_version_by_distro(http_pool, name)
  491. if res[0] == RM_API_STATUS_NOT_FOUND:
  492. res = release_monitoring_get_latest_version_by_guess(http_pool, name)
  493. return res
  494. def check_package_latest_version(packages):
  495. """
  496. Fills in the .latest_version field of all Package objects
  497. This field is a dict and has the following keys:
  498. - status: one of RM_API_STATUS_ERROR,
  499. RM_API_STATUS_FOUND_BY_DISTRO, RM_API_STATUS_FOUND_BY_PATTERN,
  500. RM_API_STATUS_NOT_FOUND
  501. - version: string containing the latest version known by
  502. release-monitoring.org for this package
  503. - id: string containing the id of the project corresponding to this
  504. package, as known by release-monitoring.org
  505. """
  506. global http_pool
  507. http_pool = HTTPSConnectionPool('release-monitoring.org', port=443,
  508. cert_reqs='CERT_REQUIRED', ca_certs=certifi.where(),
  509. timeout=30)
  510. worker_pool = Pool(processes=64)
  511. results = worker_pool.map(check_package_latest_version_worker, (pkg.name for pkg in packages))
  512. for pkg, r in zip(packages, results):
  513. pkg.latest_version = dict(zip(['status', 'version', 'id'], r))
  514. if not pkg.has_valid_infra:
  515. pkg.status['version'] = ("na", "no valid package infra")
  516. continue
  517. if pkg.latest_version['status'] == RM_API_STATUS_ERROR:
  518. pkg.status['version'] = ('warning', "Release Monitoring API error")
  519. elif pkg.latest_version['status'] == RM_API_STATUS_NOT_FOUND:
  520. pkg.status['version'] = ('warning', "Package not found on Release Monitoring")
  521. if pkg.latest_version['version'] is None:
  522. pkg.status['version'] = ('warning', "No upstream version available on Release Monitoring")
  523. elif pkg.latest_version['version'] != pkg.current_version:
  524. pkg.status['version'] = ('error', "The newer version {} is available upstream".format(pkg.latest_version['version']))
  525. else:
  526. pkg.status['version'] = ('ok', 'up-to-date')
  527. worker_pool.terminate()
  528. del http_pool
  529. def check_package_cves(nvd_path, packages):
  530. if not os.path.isdir(nvd_path):
  531. os.makedirs(nvd_path)
  532. for cve in CVE.read_nvd_dir(nvd_path):
  533. for pkg_name in cve.pkg_names:
  534. if pkg_name in packages and cve.affects(packages[pkg_name]) == CVE_AFFECTS:
  535. packages[pkg_name].cves.append(cve.identifier)
  536. def calculate_stats(packages):
  537. stats = defaultdict(int)
  538. stats['packages'] = len(packages)
  539. for pkg in packages:
  540. # If packages have multiple infra, take the first one. For the
  541. # vast majority of packages, the target and host infra are the
  542. # same. There are very few packages that use a different infra
  543. # for the host and target variants.
  544. if len(pkg.infras) > 0:
  545. infra = pkg.infras[0][1]
  546. stats["infra-%s" % infra] += 1
  547. else:
  548. stats["infra-unknown"] += 1
  549. if pkg.is_status_ok('license'):
  550. stats["license"] += 1
  551. else:
  552. stats["no-license"] += 1
  553. if pkg.is_status_ok('license-files'):
  554. stats["license-files"] += 1
  555. else:
  556. stats["no-license-files"] += 1
  557. if pkg.is_status_ok('hash'):
  558. stats["hash"] += 1
  559. else:
  560. stats["no-hash"] += 1
  561. if pkg.latest_version['status'] == RM_API_STATUS_FOUND_BY_DISTRO:
  562. stats["rmo-mapping"] += 1
  563. else:
  564. stats["rmo-no-mapping"] += 1
  565. if not pkg.latest_version['version']:
  566. stats["version-unknown"] += 1
  567. elif pkg.latest_version['version'] == pkg.current_version:
  568. stats["version-uptodate"] += 1
  569. else:
  570. stats["version-not-uptodate"] += 1
  571. stats["patches"] += pkg.patch_count
  572. stats["total-cves"] += len(pkg.cves)
  573. if len(pkg.cves) != 0:
  574. stats["pkg-cves"] += 1
  575. return stats
  576. html_header = """
  577. <head>
  578. <script src=\"https://www.kryogenix.org/code/browser/sorttable/sorttable.js\"></script>
  579. <style type=\"text/css\">
  580. table {
  581. width: 100%;
  582. }
  583. td {
  584. border: 1px solid black;
  585. }
  586. td.centered {
  587. text-align: center;
  588. }
  589. td.wrong {
  590. background: #ff9a69;
  591. }
  592. td.correct {
  593. background: #d2ffc4;
  594. }
  595. td.nopatches {
  596. background: #d2ffc4;
  597. }
  598. td.somepatches {
  599. background: #ffd870;
  600. }
  601. td.lotsofpatches {
  602. background: #ff9a69;
  603. }
  604. td.good_url {
  605. background: #d2ffc4;
  606. }
  607. td.missing_url {
  608. background: #ffd870;
  609. }
  610. td.invalid_url {
  611. background: #ff9a69;
  612. }
  613. td.version-good {
  614. background: #d2ffc4;
  615. }
  616. td.version-needs-update {
  617. background: #ff9a69;
  618. }
  619. td.version-unknown {
  620. background: #ffd870;
  621. }
  622. td.version-error {
  623. background: #ccc;
  624. }
  625. </style>
  626. <title>Statistics of Buildroot packages</title>
  627. </head>
  628. <a href=\"#results\">Results</a><br/>
  629. <p id=\"sortable_hint\"></p>
  630. """
  631. html_footer = """
  632. </body>
  633. <script>
  634. if (typeof sorttable === \"object\") {
  635. document.getElementById(\"sortable_hint\").innerHTML =
  636. \"hint: the table can be sorted by clicking the column headers\"
  637. }
  638. </script>
  639. </html>
  640. """
  641. def infra_str(infra_list):
  642. if not infra_list:
  643. return "Unknown"
  644. elif len(infra_list) == 1:
  645. return "<b>%s</b><br/>%s" % (infra_list[0][1], infra_list[0][0])
  646. elif infra_list[0][1] == infra_list[1][1]:
  647. return "<b>%s</b><br/>%s + %s" % \
  648. (infra_list[0][1], infra_list[0][0], infra_list[1][0])
  649. else:
  650. return "<b>%s</b> (%s)<br/><b>%s</b> (%s)" % \
  651. (infra_list[0][1], infra_list[0][0],
  652. infra_list[1][1], infra_list[1][0])
  653. def boolean_str(b):
  654. if b:
  655. return "Yes"
  656. else:
  657. return "No"
  658. def dump_html_pkg(f, pkg):
  659. f.write(" <tr>\n")
  660. f.write(" <td>%s</td>\n" % pkg.path[2:])
  661. # Patch count
  662. td_class = ["centered"]
  663. if pkg.patch_count == 0:
  664. td_class.append("nopatches")
  665. elif pkg.patch_count < 5:
  666. td_class.append("somepatches")
  667. else:
  668. td_class.append("lotsofpatches")
  669. f.write(" <td class=\"%s\">%s</td>\n" %
  670. (" ".join(td_class), str(pkg.patch_count)))
  671. # Infrastructure
  672. infra = infra_str(pkg.infras)
  673. td_class = ["centered"]
  674. if infra == "Unknown":
  675. td_class.append("wrong")
  676. else:
  677. td_class.append("correct")
  678. f.write(" <td class=\"%s\">%s</td>\n" %
  679. (" ".join(td_class), infra_str(pkg.infras)))
  680. # License
  681. td_class = ["centered"]
  682. if pkg.is_status_ok('license'):
  683. td_class.append("correct")
  684. else:
  685. td_class.append("wrong")
  686. f.write(" <td class=\"%s\">%s</td>\n" %
  687. (" ".join(td_class), boolean_str(pkg.is_status_ok('license'))))
  688. # License files
  689. td_class = ["centered"]
  690. if pkg.is_status_ok('license-files'):
  691. td_class.append("correct")
  692. else:
  693. td_class.append("wrong")
  694. f.write(" <td class=\"%s\">%s</td>\n" %
  695. (" ".join(td_class), boolean_str(pkg.is_status_ok('license-files'))))
  696. # Hash
  697. td_class = ["centered"]
  698. if pkg.is_status_ok('hash'):
  699. td_class.append("correct")
  700. else:
  701. td_class.append("wrong")
  702. f.write(" <td class=\"%s\">%s</td>\n" %
  703. (" ".join(td_class), boolean_str(pkg.is_status_ok('hash'))))
  704. # Current version
  705. if len(pkg.current_version) > 20:
  706. current_version = pkg.current_version[:20] + "..."
  707. else:
  708. current_version = pkg.current_version
  709. f.write(" <td class=\"centered\">%s</td>\n" % current_version)
  710. # Latest version
  711. if pkg.latest_version['status'] == RM_API_STATUS_ERROR:
  712. td_class.append("version-error")
  713. if pkg.latest_version['version'] is None:
  714. td_class.append("version-unknown")
  715. elif pkg.latest_version['version'] != pkg.current_version:
  716. td_class.append("version-needs-update")
  717. else:
  718. td_class.append("version-good")
  719. if pkg.latest_version['status'] == RM_API_STATUS_ERROR:
  720. latest_version_text = "<b>Error</b>"
  721. elif pkg.latest_version['status'] == RM_API_STATUS_NOT_FOUND:
  722. latest_version_text = "<b>Not found</b>"
  723. else:
  724. if pkg.latest_version['version'] is None:
  725. latest_version_text = "<b>Found, but no version</b>"
  726. else:
  727. latest_version_text = "<a href=\"https://release-monitoring.org/project/%s\"><b>%s</b></a>" % \
  728. (pkg.latest_version['id'], str(pkg.latest_version['version']))
  729. latest_version_text += "<br/>"
  730. if pkg.latest_version['status'] == RM_API_STATUS_FOUND_BY_DISTRO:
  731. latest_version_text += "found by <a href=\"https://release-monitoring.org/distro/Buildroot/\">distro</a>"
  732. else:
  733. latest_version_text += "found by guess"
  734. f.write(" <td class=\"%s\">%s</td>\n" %
  735. (" ".join(td_class), latest_version_text))
  736. # Warnings
  737. td_class = ["centered"]
  738. if pkg.warnings == 0:
  739. td_class.append("correct")
  740. else:
  741. td_class.append("wrong")
  742. f.write(" <td class=\"%s\">%d</td>\n" %
  743. (" ".join(td_class), pkg.warnings))
  744. # URL status
  745. td_class = ["centered"]
  746. url_str = pkg.status['url'][1]
  747. if pkg.status['url'][0] in ("error", "warning"):
  748. td_class.append("missing_url")
  749. if pkg.status['url'][0] == "error":
  750. td_class.append("invalid_url")
  751. url_str = "<a href=%s>%s</a>" % (pkg.url, pkg.status['url'][1])
  752. else:
  753. td_class.append("good_url")
  754. url_str = "<a href=%s>Link</a>" % pkg.url
  755. f.write(" <td class=\"%s\">%s</td>\n" %
  756. (" ".join(td_class), url_str))
  757. # CVEs
  758. td_class = ["centered"]
  759. if len(pkg.cves) == 0:
  760. td_class.append("correct")
  761. else:
  762. td_class.append("wrong")
  763. f.write(" <td class=\"%s\">\n" % " ".join(td_class))
  764. for cve in pkg.cves:
  765. f.write(" <a href=\"https://security-tracker.debian.org/tracker/%s\">%s<br/>\n" % (cve, cve))
  766. f.write(" </td>\n")
  767. f.write(" </tr>\n")
  768. def dump_html_all_pkgs(f, packages):
  769. f.write("""
  770. <table class=\"sortable\">
  771. <tr>
  772. <td>Package</td>
  773. <td class=\"centered\">Patch count</td>
  774. <td class=\"centered\">Infrastructure</td>
  775. <td class=\"centered\">License</td>
  776. <td class=\"centered\">License files</td>
  777. <td class=\"centered\">Hash file</td>
  778. <td class=\"centered\">Current version</td>
  779. <td class=\"centered\">Latest version</td>
  780. <td class=\"centered\">Warnings</td>
  781. <td class=\"centered\">Upstream URL</td>
  782. <td class=\"centered\">CVEs</td>
  783. </tr>
  784. """)
  785. for pkg in sorted(packages):
  786. dump_html_pkg(f, pkg)
  787. f.write("</table>")
  788. def dump_html_stats(f, stats):
  789. f.write("<a id=\"results\"></a>\n")
  790. f.write("<table>\n")
  791. infras = [infra[6:] for infra in stats.keys() if infra.startswith("infra-")]
  792. for infra in infras:
  793. f.write(" <tr><td>Packages using the <i>%s</i> infrastructure</td><td>%s</td></tr>\n" %
  794. (infra, stats["infra-%s" % infra]))
  795. f.write(" <tr><td>Packages having license information</td><td>%s</td></tr>\n" %
  796. stats["license"])
  797. f.write(" <tr><td>Packages not having license information</td><td>%s</td></tr>\n" %
  798. stats["no-license"])
  799. f.write(" <tr><td>Packages having license files information</td><td>%s</td></tr>\n" %
  800. stats["license-files"])
  801. f.write(" <tr><td>Packages not having license files information</td><td>%s</td></tr>\n" %
  802. stats["no-license-files"])
  803. f.write(" <tr><td>Packages having a hash file</td><td>%s</td></tr>\n" %
  804. stats["hash"])
  805. f.write(" <tr><td>Packages not having a hash file</td><td>%s</td></tr>\n" %
  806. stats["no-hash"])
  807. f.write(" <tr><td>Total number of patches</td><td>%s</td></tr>\n" %
  808. stats["patches"])
  809. f.write("<tr><td>Packages having a mapping on <i>release-monitoring.org</i></td><td>%s</td></tr>\n" %
  810. stats["rmo-mapping"])
  811. f.write("<tr><td>Packages lacking a mapping on <i>release-monitoring.org</i></td><td>%s</td></tr>\n" %
  812. stats["rmo-no-mapping"])
  813. f.write("<tr><td>Packages that are up-to-date</td><td>%s</td></tr>\n" %
  814. stats["version-uptodate"])
  815. f.write("<tr><td>Packages that are not up-to-date</td><td>%s</td></tr>\n" %
  816. stats["version-not-uptodate"])
  817. f.write("<tr><td>Packages with no known upstream version</td><td>%s</td></tr>\n" %
  818. stats["version-unknown"])
  819. f.write("<tr><td>Packages affected by CVEs</td><td>%s</td></tr>\n" %
  820. stats["pkg-cves"])
  821. f.write("<tr><td>Total number of CVEs affecting all packages</td><td>%s</td></tr>\n" %
  822. stats["total-cves"])
  823. f.write("</table>\n")
  824. def dump_html_gen_info(f, date, commit):
  825. # Updated on Mon Feb 19 08:12:08 CET 2018, Git commit aa77030b8f5e41f1c53eb1c1ad664b8c814ba032
  826. f.write("<p><i>Updated on %s, git commit %s</i></p>\n" % (str(date), commit))
  827. def dump_html(packages, stats, date, commit, output):
  828. with open(output, 'w') as f:
  829. f.write(html_header)
  830. dump_html_all_pkgs(f, packages)
  831. dump_html_stats(f, stats)
  832. dump_html_gen_info(f, date, commit)
  833. f.write(html_footer)
  834. def dump_json(packages, defconfigs, stats, date, commit, output):
  835. # Format packages as a dictionnary instead of a list
  836. # Exclude local field that does not contains real date
  837. excluded_fields = ['url_worker', 'name']
  838. pkgs = {
  839. pkg.name: {
  840. k: v
  841. for k, v in pkg.__dict__.items()
  842. if k not in excluded_fields
  843. } for pkg in packages
  844. }
  845. defconfigs = {
  846. d.name: {
  847. k: v
  848. for k, v in d.__dict__.items()
  849. } for d in defconfigs
  850. }
  851. # Aggregate infrastructures into a single dict entry
  852. statistics = {
  853. k: v
  854. for k, v in stats.items()
  855. if not k.startswith('infra-')
  856. }
  857. statistics['infra'] = {k[6:]: v for k, v in stats.items() if k.startswith('infra-')}
  858. # The actual structure to dump, add commit and date to it
  859. final = {'packages': pkgs,
  860. 'stats': statistics,
  861. 'defconfigs': defconfigs,
  862. 'package_status_checks': Package.status_checks,
  863. 'commit': commit,
  864. 'date': str(date)}
  865. with open(output, 'w') as f:
  866. json.dump(final, f, indent=2, separators=(',', ': '))
  867. f.write('\n')
  868. def resolvepath(path):
  869. return os.path.abspath(os.path.expanduser(path))
  870. def parse_args():
  871. parser = argparse.ArgumentParser()
  872. output = parser.add_argument_group('output', 'Output file(s)')
  873. output.add_argument('--html', dest='html', type=resolvepath,
  874. help='HTML output file')
  875. output.add_argument('--json', dest='json', type=resolvepath,
  876. help='JSON output file')
  877. packages = parser.add_mutually_exclusive_group()
  878. packages.add_argument('-n', dest='npackages', type=int, action='store',
  879. help='Number of packages')
  880. packages.add_argument('-p', dest='packages', action='store',
  881. help='List of packages (comma separated)')
  882. parser.add_argument('--nvd-path', dest='nvd_path',
  883. help='Path to the local NVD database', type=resolvepath)
  884. args = parser.parse_args()
  885. if not args.html and not args.json:
  886. parser.error('at least one of --html or --json (or both) is required')
  887. return args
  888. def __main__():
  889. args = parse_args()
  890. if args.packages:
  891. package_list = args.packages.split(",")
  892. else:
  893. package_list = None
  894. date = datetime.datetime.utcnow()
  895. commit = subprocess.check_output(['git', 'rev-parse',
  896. 'HEAD']).splitlines()[0].decode()
  897. print("Build package list ...")
  898. packages = get_pkglist(args.npackages, package_list)
  899. print("Getting developers ...")
  900. developers = parse_developers()
  901. print("Build defconfig list ...")
  902. defconfigs = get_defconfig_list()
  903. for d in defconfigs:
  904. d.set_developers(developers)
  905. print("Getting package make info ...")
  906. package_init_make_info()
  907. print("Getting package details ...")
  908. for pkg in packages:
  909. pkg.set_infra()
  910. pkg.set_license()
  911. pkg.set_hash_info()
  912. pkg.set_patch_count()
  913. pkg.set_check_package_warnings()
  914. pkg.set_current_version()
  915. pkg.set_url()
  916. pkg.set_developers(developers)
  917. print("Checking URL status")
  918. check_package_urls(packages)
  919. print("Getting latest versions ...")
  920. check_package_latest_version(packages)
  921. if args.nvd_path:
  922. print("Checking packages CVEs")
  923. check_package_cves(args.nvd_path, {p.name: p for p in packages})
  924. print("Calculate stats")
  925. stats = calculate_stats(packages)
  926. if args.html:
  927. print("Write HTML")
  928. dump_html(packages, stats, date, commit, args.html)
  929. if args.json:
  930. print("Write JSON")
  931. dump_json(packages, defconfigs, stats, date, commit, args.json)
  932. __main__()