Reland of Add helpful error messages to check_gn_headers.py (patchset #1 id:1 of https://codereview.chromium.org/2905863002/ )
Reason for revert:
The real culprit is https://crrev.com/2903733004/, which was reverted.
This CL (https://crrev.com/2891363004) made it fail more gracefully, and should be kept.
Original issue's description:
> Revert of Add helpful error messages to check_gn_headers.py (patchset #3 id:40001 of https://codereview.chromium.org/2891363004/ )
>
> Reason for revert:
> This CL is likely causing massive failures:
>
> https://build.chromium.org/p/chromium.linux/builders/Linux%20Builder/builds/84473
> to
> https://build.chromium.org/p/chromium.linux/builders/Linux%20Builder/builds/84498
>
> and
>
> https://build.chromium.org/p/chromium.linux/builders/Linux%20Builder%20%28dbg%29/builds/111528
> to
> https://build.chromium.org/p/chromium.linux/builders/Linux%20Builder%20%28dbg%29/builds/111542
>
> Original issue's description:
> > Add helpful error messages to check_gn_headers.py
> >
> > Before running check_gn_headers.py, it is necessary to build all
> > in order to get the dependency info from the compiler. Directly
> > checking whether rebuilding is needed by running a build dry run
> > takes too long, so some heuristics are used instead.
> >
> > BUG=661774
> >
> > Review-Url: https://codereview.chromium.org/2891363004
> > Cr-Commit-Position: refs/heads/master@{#474588}
> > Committed: https://chromium.googlesource.com/chromium/src/+/0ceaba5ef538ce1780578d5e948641bc27f5829b
>
> [email protected],[email protected],[email protected]
> # Skipping CQ checks because original CL landed less than 1 days ago.
> NOPRESUBMIT=true
> NOTREECHECKS=true
> NOTRY=true
> BUG=661774
>
> Review-Url: https://codereview.chromium.org/2905863002
> Cr-Commit-Position: refs/heads/master@{#474649}
> Committed: https://chromium.googlesource.com/chromium/src/+/d3c2681426db53550547f2bc115c3f72ba40f276
[email protected],[email protected],[email protected]
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=661774
Review-Url: https://codereview.chromium.org/2907683002
Cr-Commit-Position: refs/heads/master@{#474786}
diff --git a/build/check_gn_headers.py b/build/check_gn_headers.py
index 0850196e..e6f2b1e 100755
--- a/build/check_gn_headers.py
+++ b/build/check_gn_headers.py
@@ -13,8 +13,10 @@
import json
import os
import re
+import shutil
import subprocess
import sys
+import tempfile
from multiprocessing import Process, Queue
@@ -66,12 +68,22 @@
def GetHeadersFromGN(out_dir, q):
"""Return all the header files from GN"""
- subprocess.check_call(['gn', 'gen', out_dir, '--ide=json', '-q'])
- gn_json = json.load(open(os.path.join(out_dir, 'project.json')))
- q.put(ParseGNProjectJSON(gn_json))
+
+ tmp = None
+ try:
+ tmp = tempfile.mkdtemp()
+ shutil.copy2(os.path.join(out_dir, 'args.gn'),
+ os.path.join(tmp, 'args.gn'))
+ # Do "gn gen" in a temp dir to prevent dirtying |out_dir|.
+ subprocess.check_call(['gn', 'gen', tmp, '--ide=json', '-q'])
+ gn_json = json.load(open(os.path.join(tmp, 'project.json')))
+ finally:
+ if tmp:
+ shutil.rmtree(tmp)
+ q.put(ParseGNProjectJSON(gn_json, out_dir, tmp))
-def ParseGNProjectJSON(gn):
+def ParseGNProjectJSON(gn, out_dir, tmp_out):
"""Parse GN output and get the header files"""
all_headers = set()
@@ -85,6 +97,8 @@
if f.endswith('.h') or f.endswith('.hh'):
if f.startswith('//'):
f = f[2:] # Strip the '//' prefix.
+ if f.startswith(tmp_out):
+ f = out_dir + f[len(tmp_out):]
all_headers.add(f)
return all_headers
@@ -125,14 +139,20 @@
def main():
- parser = argparse.ArgumentParser()
- parser.add_argument('--out-dir', default='out/Release')
- parser.add_argument('--json')
- parser.add_argument('--whitelist')
- parser.add_argument('args', nargs=argparse.REMAINDER)
+ parser = argparse.ArgumentParser(description='''
+ NOTE: Use ninja to build all targets in OUT_DIR before running
+ this script.''')
+ parser.add_argument('--out-dir', metavar='OUT_DIR', default='out/Release',
+ help='output directory of the build')
+ parser.add_argument('--json',
+ help='JSON output filename for missing headers')
+ parser.add_argument('--whitelist', help='file containing whitelist')
args, _extras = parser.parse_known_args()
+ if not os.path.isdir(args.out_dir):
+ parser.error('OUT_DIR "%s" does not exist.' % args.out_dir)
+
d_q = Queue()
d_p = Process(target=GetHeadersFromNinja, args=(args.out_dir, d_q,))
d_p.start()
@@ -146,8 +166,6 @@
deps_p.start()
d = d_q.get()
- assert len(GetNonExistingFiles(d)) == 0, \
- 'Found non-existing files in ninja deps'
gn = gn_q.get()
missing = d - gn
nonexisting = GetNonExistingFiles(gn)
@@ -160,6 +178,14 @@
gn_p.join()
deps_p.join()
+ if len(GetNonExistingFiles(d)) > 0:
+ parser.error('''Found non-existing files in ninja deps. You should
+ build all in OUT_DIR.''')
+ if len(d) == 0:
+ parser.error('OUT_DIR looks empty. You should build all there.')
+ if any((('/gen/' in i) for i in nonexisting)):
+ parser.error('OUT_DIR looks wrong. You should build all there.')
+
if args.whitelist:
whitelist = ParseWhiteList(open(args.whitelist).read())
missing -= whitelist