Skip to content

Commit

Permalink
Rewrite lint.sh as check_lint.py (#3161)
Browse files Browse the repository at this point in the history
* Rewrite lint.sh as check_lint.py

This makes several beneficial changes:

  * Runs in parallel: lint all now takes 5.5 seconds, down from 25.
  * Supports linting specific files, passed on the command-line.
  * Defaults to linting files changed since master, pass --all to lint
    everything.
  * Infers header language based on related files.
  * Lays the groundwork for linting more than just C++.
  * Adds support for linting python.

* Fix lint errors

* Use check_lint.py in check.sh

* Remove lint.sh

* Python lint configuration

* Enable python linting in travis
  • Loading branch information
wilhuff authored Jun 14, 2019
1 parent 64d8a40 commit 3e3bcb4
Show file tree
Hide file tree
Showing 15 changed files with 923 additions and 117 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,9 @@ Ninja
/cmake-build-debug
/cmake-build-release

# Python
*.pyc

# Visual Studio
/.vs

Expand Down
1 change: 1 addition & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ jobs:
before_install:
- brew install clang-format
- brew install swiftformat
- pip install flake8
script:
- ./scripts/check.sh --test-only

Expand Down
16 changes: 8 additions & 8 deletions Firestore/Example/Benchmarks/FSTLevelDBBenchmarkTests.mm
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,8 @@

#import <Foundation/Foundation.h>
#import <XCTest/XCTest.h>
#include <cstdint>

#include "benchmark/benchmark.h"
#include <cstdint>

#import "Firestore/Source/Local/FSTLevelDB.h"
#import "Firestore/Source/Local/FSTLocalSerializer.h"
Expand All @@ -29,6 +28,7 @@
#include "Firestore/core/src/firebase/firestore/model/document_key.h"
#include "Firestore/core/src/firebase/firestore/model/types.h"
#include "Firestore/core/src/firebase/firestore/util/string_format.h"
#include "benchmark/benchmark.h"

NS_ASSUME_NONNULL_BEGIN

Expand Down Expand Up @@ -104,19 +104,19 @@ void FillDB() {
auto docKey = DocumentKey::FromPathString(StringFormat("docs/doc_%i", i));
std::string docKeyString = LevelDbRemoteDocumentKey::Key(docKey);
txn.Put(docKeyString, DocumentData());
WriteIndex(txn, docKey);
WriteIndex(&txn, docKey);
}
txn.Commit();
// Force a write to disk to simulate startup situation
db_.ptr->CompactRange(NULL, NULL);
}

protected:
void WriteIndex(LevelDbTransaction &txn, const DocumentKey &docKey) {
void WriteIndex(LevelDbTransaction *txn, const DocumentKey &docKey) {
// Arbitrary target ID
TargetId targetID = 1;
txn.Put(LevelDbDocumentTargetKey::Key(docKey, targetID), emptyBuffer_);
txn.Put(LevelDbTargetDocumentKey::Key(targetID, docKey), emptyBuffer_);
txn->Put(LevelDbDocumentTargetKey::Key(docKey, targetID), emptyBuffer_);
txn->Put(LevelDbTargetDocumentKey::Key(targetID, docKey), emptyBuffer_);
}

FSTLevelDB *db_;
Expand All @@ -128,7 +128,7 @@ void WriteIndex(LevelDbTransaction &txn, const DocumentKey &docKey) {
// Write a couple large values (documents)
// In each test, either overwrite index entries and documents, or just documents

BENCHMARK_DEFINE_F(LevelDBFixture, RemoteEvent)(benchmark::State &state) {
BENCHMARK_DEFINE_F(LevelDBFixture, RemoteEvent)(benchmark::State &state) { // NOLINT
bool writeIndexes = static_cast<bool>(state.range(0));
int64_t documentSize = state.range(1);
int64_t docsToUpdate = state.range(2);
Expand All @@ -137,7 +137,7 @@ void WriteIndex(LevelDbTransaction &txn, const DocumentKey &docKey) {
LevelDbTransaction txn(db_.ptr, "benchmark");
for (int i = 0; i < docsToUpdate; i++) {
auto docKey = DocumentKey::FromPathString(StringFormat("docs/doc_%i", i));
if (writeIndexes) WriteIndex(txn, docKey);
if (writeIndexes) WriteIndex(&txn, docKey);
std::string docKeyString = LevelDbRemoteDocumentKey::Key(docKey);
txn.Put(docKeyString, documentUpdate);
}
Expand Down
16 changes: 10 additions & 6 deletions scripts/binary_to_array.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,19 +50,22 @@

arg_parser = argparse.ArgumentParser()

arg_parser.add_argument("input", help="Input file containing binary data to embed")
arg_parser.add_argument("input",
help="Input file containing binary data to embed")
arg_parser.add_argument("--output_source",
help="Output source file, defining the array data.")
help="Output source file, defining the array data.")
arg_parser.add_argument("--output_header",
help="Output header file, declaring the array data.")
help="Output header file, declaring the array data.")
arg_parser.add_argument("--array", help="Identifier for the array.")
arg_parser.add_argument("--array_size", help="Identifier for the array size.")
arg_parser.add_argument("--filename", help="Override file name in code.")
arg_parser.add_argument("--filename_identifier", help="Where to put the filename.")
arg_parser.add_argument("--filename_identifier",
help="Where to put the filename.")
arg_parser.add_argument("--header_guard",
help="Header guard to #define in the output header.")
help="Header guard to #define in the output header.")
arg_parser.add_argument("--cpp_namespace",
help="C++ namespace to use. If blank, will generate a C array.")
help="C++ namespace to use. "
"If blank, will generate a C array.")

# How many hex bytes to display in a line. Each "0x00, " takes 6 characters, so
# a width of 12 lets us fit within 80 characters.
Expand Down Expand Up @@ -278,5 +281,6 @@ def main():
src.write(source_text)
logging.debug("Wrote source file %s", output_source)


if __name__ == "__main__":
main()
2 changes: 1 addition & 1 deletion scripts/check.sh
Original file line number Diff line number Diff line change
Expand Up @@ -261,4 +261,4 @@ fi
"${top_dir}/scripts/check_test_inclusion.py"

# Google C++ style
"${top_dir}/scripts/lint.sh" "${START_SHA}"
"${top_dir}/scripts/check_lint.py" "${START_SHA}"
252 changes: 252 additions & 0 deletions scripts/check_lint.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,252 @@
#!/usr/bin/env python

# Copyright 2019 Google
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""Lints source files for conformance with the style guide that applies.
Currently supports linting Objective-C, Objective-C++, C++, and Python source.
"""

import argparse
import logging
import os
import re
import subprocess
import sys
import textwrap

from lib import checker
from lib import command_trace
from lib import git
from lib import source

_logger = logging.getLogger('lint')


_dry_run = False


_CPPLINT_OBJC_FILTERS = [
# Objective-C uses #import and does not use header guards
'-build/header_guard',

# Inline definitions of Objective-C blocks confuse
'-readability/braces',

# C-style casts are acceptable in Objective-C++
'-readability/casting',

# Objective-C needs use type 'long' for interop between types like NSInteger
# and printf-style functions.
'-runtime/int',

# cpplint is generally confused by Objective-C mixing with C++.
# * Objective-C method invocations in a for loop make it think its a
# range-for
# * Objective-C dictionary literals confuse brace spacing
# * Empty category declarations ("@interface Foo ()") look like function
# invocations
'-whitespace',
]

_CPPLINT_OBJC_OPTIONS = [
# cpplint normally excludes Objective-C++
'--extensions=h,m,mm',

# Objective-C style allows longer lines
'--linelength=100',

'--filter=' + ','.join(_CPPLINT_OBJC_FILTERS),
]


def main():
global _dry_run

parser = argparse.ArgumentParser(description='Lint source files.')
parser.add_argument('--dry-run', '-n', action='store_true',
help='Show what the linter would do without doing it')
parser.add_argument('--all', action='store_true',
help='run the linter over all known sources')
parser.add_argument('rev_or_files', nargs='*',
help='A single revision that specifies a point in time '
'from which to look for changes. Defaults to '
'origin/master. Alternatively, a list of specific '
'files or git pathspecs to lint.')
args = command_trace.parse_args(parser)

if args.dry_run:
_dry_run = True
command_trace.enable_tracing()

pool = checker.Pool()

sources = _unique(source.CC_DIRS + source.OBJC_DIRS + source.PYTHON_DIRS)
patterns = git.make_patterns(sources)

files = git.find_changed_or_files(args.all, args.rev_or_files, patterns)
check(pool, files)

pool.exit()


def check(pool, files):
group = source.categorize_files(files)

for kind, files in group.kinds.items():
for chunk in checker.shard(files):
if not chunk:
continue

linter = _linters[kind]
pool.submit(linter, chunk)


def lint_cc(files):
return _run_cpplint([], files)


def lint_objc(files):
return _run_cpplint(_CPPLINT_OBJC_OPTIONS, files)


def _run_cpplint(options, files):
scripts_dir = os.path.dirname(os.path.abspath(__file__))
cpplint = os.path.join(scripts_dir, 'cpplint.py')

command = [sys.executable, cpplint, '--quiet']
command.extend(options)
command.extend(files)

return _read_output(command)


_flake8_warned = False


def lint_py(files):
flake8 = which('flake8')
if flake8 is None:
global _flake8_warned
if not _flake8_warned:
_flake8_warned = True
_logger.warn(textwrap.dedent(
"""
Could not find flake8 on the path; skipping python lint.
Install with:
pip install --user flake8
"""))
return

command = [flake8]
command.extend(files)

return _read_output(command)


def _read_output(command):
command_trace.log(command)

if _dry_run:
return checker.Result(0, '')

proc = subprocess.Popen(
command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = proc.communicate('')[0]
sc = proc.wait()

return checker.Result(sc, output)


_linters = {
'cc': lint_cc,
'objc': lint_objc,
'py': lint_py,
}


def _unique(items):
return list(set(items))


def make_path():
"""Makes a list of paths to search for binaries.
Returns:
A list of directories that can be sources of binaries to run. This includes
both the PATH environment variable and any bin directories associated with
python install locations.
"""
# Start with the system-supplied PATH.
path = os.environ['PATH'].split(os.pathsep)

# In addition, add any bin directories near the lib directories in the python
# path. This makes it possible to find flake8 in ~/Library/Python/2.7/bin
# after pip install --user flake8. Also handle installations on Windows which
# go in %APPDATA%/Python/Scripts.
lib_pattern = re.compile(r'(.*)/[^/]*/site-packages')
for entry in sys.path:
entry = entry.replace(os.sep, '/')
m = lib_pattern.match(entry)
if not m:
continue

python_root = m.group(1).replace('/', os.sep)

for bin_basename in ('bin', 'Scripts'):
bin_dir = os.path.join(python_root, bin_basename)
if bin_dir not in path and os.path.exists(bin_dir):
path.append(bin_dir)

return path


_PATH = make_path()


def which(executable):
"""Finds the executable with the given name.
Returns:
The fully qualified path to the executable or None if the executable isn't
found.
"""
if executable.startswith('/'):
return executable

for executable_with_ext in _executable_names(executable):
for entry in _PATH:
joined = os.path.join(entry, executable_with_ext)
if os.path.isfile(joined) and os.access(joined, os.X_OK):
return joined

return None


def _executable_names(executable):
"""Yields a sequence of all possible executable names."""

if os.name == 'nt':
pathext = os.environ.get('PATHEXT', '').split(os.pathsep)
for ext in pathext:
yield executable + ext

else:
yield executable


if __name__ == '__main__':
main()
2 changes: 1 addition & 1 deletion scripts/check_test_inclusion.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def CheckProject(project_file, test_files):
"""

# An dict of basename to filename
basenames = {os.path.basename(f) : f for f in test_files}
basenames = {os.path.basename(f): f for f in test_files}

file_list_pattern = re.compile(r"/\* (\S+) in Sources \*/")
with open(project_file, "r") as fd:
Expand Down
13 changes: 13 additions & 0 deletions scripts/lib/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# Copyright 2019 Google
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Loading

0 comments on commit 3e3bcb4

Please sign in to comment.