Upgrade oss-fuzz to 1a87da68c870ce09ab6ffac30589da12e7ff6a8d am: db2798894a am: 3270d2fe0c am: 7a05c05687

Change-Id: I67ec585410524014d706f0dafc60aaaa7de56784
diff --git a/.pylintrc b/.pylintrc
new file mode 100644
index 0000000..00878f7
--- /dev/null
+++ b/.pylintrc
@@ -0,0 +1,585 @@
+[MASTER]
+
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code.
+extension-pkg-whitelist=
+
+# Add files or directories to the blacklist. They should be base names, not
+# paths.
+ignore=CVS
+
+# Add files or directories matching the regex patterns to the blacklist. The
+# regex matches against base names, not paths.
+ignore-patterns=
+
+# Python code to execute, usually for sys.path manipulation such as
+# pygtk.require().
+#init-hook=
+
+# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
+# number of processors available to use.
+jobs=1
+
+# Control the amount of potential inferred values when inferring a single
+# object. This can help the performance when dealing with large functions or
+# complex, nested conditions.
+limit-inference-results=100
+
+# List of plugins (as comma separated values of python module names) to load,
+# usually to register additional checkers.
+load-plugins=
+
+# Pickle collected data for later comparisons.
+persistent=yes
+
+# Specify a configuration file.
+#rcfile=
+
+# When enabled, pylint would attempt to guess common misconfiguration and emit
+# user-friendly hints instead of false-positive error messages.
+suggestion-mode=yes
+
+# Allow loading of arbitrary C extensions. Extensions are imported into the
+# active Python interpreter and may run arbitrary code.
+unsafe-load-any-extension=no
+
+
+[MESSAGES CONTROL]
+
+# Only show warnings with the listed confidence levels. Leave empty to show
+# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
+confidence=
+
+# Disable the message, report, category or checker with the given id(s). You
+# can either give multiple identifiers separated by comma (,) or put this
+# option multiple times (only on the command line, not in the configuration
+# file where it should appear only once). You can also use "--disable=all" to
+# disable everything first and then reenable specific checks. For example, if
+# you want to run only the similarities checker, you can use "--disable=all
+# --enable=similarities". If you want to run only the classes checker, but have
+# no Warning level messages displayed, use "--disable=all --enable=classes
+# --disable=W".
+disable=print-statement,
+        parameter-unpacking,
+        unpacking-in-except,
+        old-raise-syntax,
+        backtick,
+        long-suffix,
+        old-ne-operator,
+        old-octal-literal,
+        import-star-module-level,
+        non-ascii-bytes-literal,
+        raw-checker-failed,
+        bad-inline-option,
+        locally-disabled,
+        file-ignored,
+        suppressed-message,
+        useless-suppression,
+        deprecated-pragma,
+        use-symbolic-message-instead,
+        apply-builtin,
+        basestring-builtin,
+        buffer-builtin,
+        cmp-builtin,
+        coerce-builtin,
+        execfile-builtin,
+        file-builtin,
+        long-builtin,
+        raw_input-builtin,
+        reduce-builtin,
+        standarderror-builtin,
+        unicode-builtin,
+        xrange-builtin,
+        coerce-method,
+        delslice-method,
+        getslice-method,
+        setslice-method,
+        no-absolute-import,
+        old-division,
+        dict-iter-method,
+        dict-view-method,
+        next-method-called,
+        metaclass-assignment,
+        indexing-exception,
+        raising-string,
+        reload-builtin,
+        oct-method,
+        hex-method,
+        nonzero-method,
+        cmp-method,
+        input-builtin,
+        round-builtin,
+        intern-builtin,
+        unichr-builtin,
+        map-builtin-not-iterating,
+        zip-builtin-not-iterating,
+        range-builtin-not-iterating,
+        filter-builtin-not-iterating,
+        using-cmp-argument,
+        eq-without-hash,
+        div-method,
+        idiv-method,
+        rdiv-method,
+        exception-message-attribute,
+        invalid-str-codec,
+        sys-max-int,
+        bad-python3-import,
+        deprecated-string-function,
+        deprecated-str-translate-call,
+        deprecated-itertools-function,
+        deprecated-types-field,
+        next-method-defined,
+        dict-items-not-iterating,
+        dict-keys-not-iterating,
+        dict-values-not-iterating,
+        deprecated-operator-function,
+        deprecated-urllib-function,
+        xreadlines-attribute,
+        deprecated-sys-function,
+        exception-escape,
+        comprehension-escape,
+        fixme
+
+# Enable the message, report, category or checker with the given id(s). You can
+# either give multiple identifier separated by comma (,) or put this option
+# multiple time (only on the command line, not in the configuration file where
+# it should appear only once). See also the "--disable" option for examples.
+enable=c-extension-no-member
+
+
+[REPORTS]
+
+# Python expression which should return a score less than or equal to 10. You
+# have access to the variables 'error', 'warning', 'refactor', and 'convention'
+# which contain the number of messages in each category, as well as 'statement'
+# which is the total number of statements analyzed. This score is used by the
+# global evaluation report (RP0004).
+evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
+
+# Template used to display messages. This is a python new-style format string
+# used to format the message information. See doc for all details.
+#msg-template=
+
+# Set the output format. Available formats are text, parseable, colorized, json
+# and msvs (visual studio). You can also give a reporter class, e.g.
+# mypackage.mymodule.MyReporterClass.
+output-format=text
+
+# Tells whether to display a full report or only the messages.
+reports=no
+
+# Activate the evaluation score.
+score=yes
+
+
+[REFACTORING]
+
+# Maximum number of nested blocks for function / method body
+max-nested-blocks=5
+
+# Complete name of functions that never returns. When checking for
+# inconsistent-return-statements if a never returning function is called then
+# it will be considered as an explicit return statement and no message will be
+# printed.
+never-returning-functions=sys.exit
+
+
+[BASIC]
+
+# Naming style matching correct argument names.
+argument-naming-style=snake_case
+
+# Regular expression matching correct argument names. Overrides argument-
+# naming-style.
+#argument-rgx=
+
+# Naming style matching correct attribute names.
+attr-naming-style=snake_case
+
+# Regular expression matching correct attribute names. Overrides attr-naming-
+# style.
+#attr-rgx=
+
+# Bad variable names which should always be refused, separated by a comma.
+bad-names=foo,
+          bar,
+          baz,
+          toto,
+          tutu,
+          tata
+
+# Naming style matching correct class attribute names.
+class-attribute-naming-style=any
+
+# Regular expression matching correct class attribute names. Overrides class-
+# attribute-naming-style.
+#class-attribute-rgx=
+
+# Naming style matching correct class names.
+class-naming-style=PascalCase
+
+# Regular expression matching correct class names. Overrides class-naming-
+# style.
+#class-rgx=
+
+# Naming style matching correct constant names.
+const-naming-style=UPPER_CASE
+
+# Regular expression matching correct constant names. Overrides const-naming-
+# style.
+#const-rgx=
+
+# Minimum line length for functions/classes that require docstrings, shorter
+# ones are exempt.
+docstring-min-length=-1
+
+# Naming style matching correct function names.
+function-naming-style=snake_case
+
+# Regular expression matching correct function names. Overrides function-
+# naming-style.
+#function-rgx=
+
+# Good variable names which should always be accepted, separated by a comma.
+good-names=i,
+           j,
+           k,
+           ex,
+           Run,
+           _
+
+# Include a hint for the correct naming format with invalid-name.
+include-naming-hint=no
+
+# Naming style matching correct inline iteration names.
+inlinevar-naming-style=any
+
+# Regular expression matching correct inline iteration names. Overrides
+# inlinevar-naming-style.
+#inlinevar-rgx=
+
+# Naming style matching correct method names.
+method-naming-style=snake_case
+
+# Regular expression matching correct method names. Overrides method-naming-
+# style.
+#method-rgx=
+
+# Naming style matching correct module names.
+module-naming-style=snake_case
+
+# Regular expression matching correct module names. Overrides module-naming-
+# style.
+#module-rgx=
+
+# Colon-delimited sets of names that determine each other's naming style when
+# the name regexes allow several styles.
+name-group=
+
+# Regular expression which should only match function or class names that do
+# not require a docstring.
+no-docstring-rgx=^_
+
+# List of decorators that produce properties, such as abc.abstractproperty. Add
+# to this list to register other decorators that produce valid properties.
+# These decorators are taken in consideration only for invalid-name.
+property-classes=abc.abstractproperty
+
+# Naming style matching correct variable names.
+variable-naming-style=snake_case
+
+# Regular expression matching correct variable names. Overrides variable-
+# naming-style.
+#variable-rgx=
+
+
+[SIMILARITIES]
+
+# Ignore comments when computing similarities.
+ignore-comments=yes
+
+# Ignore docstrings when computing similarities.
+ignore-docstrings=yes
+
+# Ignore imports when computing similarities.
+ignore-imports=no
+
+# Minimum lines number of a similarity.
+min-similarity-lines=4
+
+
+[FORMAT]
+
+# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
+expected-line-ending-format=
+
+# Regexp for a line that is allowed to be longer than the limit.
+ignore-long-lines=^\s*(# )?<?https?://\S+>?$
+
+# Number of spaces of indent required inside a hanging or continued line.
+indent-after-paren=4
+
+# String used as indentation unit. This is usually "    " (4 spaces) or "\t" (1
+# tab).
+indent-string='  '
+
+# Maximum number of characters on a single line.
+max-line-length=100
+
+# Maximum number of lines in a module.
+max-module-lines=1000
+
+# List of optional constructs for which whitespace checking is disabled. `dict-
+# separator` is used to allow tabulation in dicts, etc.: {1  : 1,\n222: 2}.
+# `trailing-comma` allows a space between comma and closing bracket: (a, ).
+# `empty-line` allows space-only lines.
+no-space-check=trailing-comma,
+               dict-separator
+
+# Allow the body of a class to be on the same line as the declaration if body
+# contains single statement.
+single-line-class-stmt=no
+
+# Allow the body of an if to be on the same line as the test if there is no
+# else.
+single-line-if-stmt=no
+
+
+[MISCELLANEOUS]
+
+# List of note tags to take in consideration, separated by a comma.
+notes=FIXME,
+      XXX,
+      TODO
+
+
+[TYPECHECK]
+
+# List of decorators that produce context managers, such as
+# contextlib.contextmanager. Add to this list to register other decorators that
+# produce valid context managers.
+contextmanager-decorators=contextlib.contextmanager
+
+# List of members which are set dynamically and missed by pylint inference
+# system, and so shouldn't trigger E1101 when accessed. Python regular
+# expressions are accepted.
+generated-members=
+
+# Tells whether missing members accessed in mixin class should be ignored. A
+# mixin class is detected if its name ends with "mixin" (case insensitive).
+ignore-mixin-members=yes
+
+# Tells whether to warn about missing members when the owner of the attribute
+# is inferred to be None.
+ignore-none=yes
+
+# This flag controls whether pylint should warn about no-member and similar
+# checks whenever an opaque object is returned when inferring. The inference
+# can return multiple potential results while evaluating a Python object, but
+# some branches might not be evaluated, which results in partial inference. In
+# that case, it might be useful to still emit no-member and other checks for
+# the rest of the inferred objects.
+ignore-on-opaque-inference=yes
+
+# List of class names for which member attributes should not be checked (useful
+# for classes with dynamically set attributes). This supports the use of
+# qualified names.
+ignored-classes=optparse.Values,thread._local,_thread._local
+
+# List of module names for which member attributes should not be checked
+# (useful for modules/projects where namespaces are manipulated during runtime
+# and thus existing member attributes cannot be deduced by static analysis). It
+# supports qualified module names, as well as Unix pattern matching.
+ignored-modules=
+
+# Show a hint with possible names when a member name was not found. The aspect
+# of finding the hint is based on edit distance.
+missing-member-hint=yes
+
+# The minimum edit distance a name should have in order to be considered a
+# similar match for a missing member name.
+missing-member-hint-distance=1
+
+# The total number of similar names that should be taken in consideration when
+# showing a hint for a missing member.
+missing-member-max-choices=1
+
+# List of decorators that change the signature of a decorated function.
+signature-mutators=
+
+
+[SPELLING]
+
+# Limits count of emitted suggestions for spelling mistakes.
+max-spelling-suggestions=4
+
+# Spelling dictionary name. Available dictionaries: none. To make it work,
+# install the python-enchant package.
+spelling-dict=
+
+# List of comma separated words that should not be checked.
+spelling-ignore-words=
+
+# A path to a file that contains the private dictionary; one word per line.
+spelling-private-dict-file=
+
+# Tells whether to store unknown words to the private dictionary (see the
+# --spelling-private-dict-file option) instead of raising a message.
+spelling-store-unknown-words=no
+
+
+[VARIABLES]
+
+# List of additional names supposed to be defined in builtins. Remember that
+# you should avoid defining new builtins when possible.
+additional-builtins=
+
+# Tells whether unused global variables should be treated as a violation.
+allow-global-unused-variables=yes
+
+# List of strings which can identify a callback function by name. A callback
+# name must start or end with one of those strings.
+callbacks=cb_,
+          _cb
+
+# A regular expression matching the name of dummy variables (i.e. expected to
+# not be used).
+dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
+
+# Argument names that match this expression will be ignored. Default to name
+# with leading underscore.
+ignored-argument-names=_.*|^ignored_|^unused_
+
+# Tells whether we should check for unused import in __init__ files.
+init-import=no
+
+# List of qualified module names which can have objects that can redefine
+# builtins.
+redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
+
+
+[LOGGING]
+
+# Format style used to check logging format string. `old` means using %
+# formatting, `new` is for `{}` formatting,and `fstr` is for f-strings.
+logging-format-style=old
+
+# Logging modules to check that the string format arguments are in logging
+# function parameter format.
+logging-modules=logging
+
+
+[STRING]
+
+# This flag controls whether the implicit-str-concat-in-sequence should
+# generate a warning on implicit string concatenation in sequences defined over
+# several lines.
+check-str-concat-over-line-jumps=no
+
+
+[IMPORTS]
+
+# List of modules that can be imported at any level, not just the top level
+# one.
+allow-any-import-level=
+
+# Allow wildcard imports from modules that define __all__.
+allow-wildcard-with-all=no
+
+# Analyse import fallback blocks. This can be used to support both Python 2 and
+# 3 compatible code, which means that the block might have code that exists
+# only in one or another interpreter, leading to false positives when analysed.
+analyse-fallback-blocks=no
+
+# Deprecated modules which should not be used, separated by a comma.
+deprecated-modules=optparse,tkinter.tix
+
+# Create a graph of external dependencies in the given file (report RP0402 must
+# not be disabled).
+ext-import-graph=
+
+# Create a graph of every (i.e. internal and external) dependencies in the
+# given file (report RP0402 must not be disabled).
+import-graph=
+
+# Create a graph of internal dependencies in the given file (report RP0402 must
+# not be disabled).
+int-import-graph=
+
+# Force import order to recognize a module as part of the standard
+# compatibility libraries.
+known-standard-library=
+
+# Force import order to recognize a module as part of a third party library.
+known-third-party=enchant
+
+# Couples of modules and preferred modules, separated by a comma.
+preferred-modules=
+
+
+[CLASSES]
+
+# List of method names used to declare (i.e. assign) instance attributes.
+defining-attr-methods=__init__,
+                      __new__,
+                      setUp,
+                      __post_init__
+
+# List of member names, which should be excluded from the protected access
+# warning.
+exclude-protected=_asdict,
+                  _fields,
+                  _replace,
+                  _source,
+                  _make
+
+# List of valid names for the first argument in a class method.
+valid-classmethod-first-arg=cls
+
+# List of valid names for the first argument in a metaclass class method.
+valid-metaclass-classmethod-first-arg=cls
+
+
+[DESIGN]
+
+# Maximum number of arguments for function / method.
+max-args=5
+
+# Maximum number of attributes for a class (see R0902).
+max-attributes=7
+
+# Maximum number of boolean expressions in an if statement (see R0916).
+max-bool-expr=5
+
+# Maximum number of branch for function / method body.
+max-branches=12
+
+# Maximum number of locals for function / method body.
+max-locals=15
+
+# Maximum number of parents for a class (see R0901).
+max-parents=7
+
+# Maximum number of public methods for a class (see R0904).
+max-public-methods=20
+
+# Maximum number of return / yield for function / method body.
+max-returns=6
+
+# Maximum number of statements in function / method body.
+max-statements=50
+
+# Minimum number of public methods for a class (see R0903).
+min-public-methods=2
+
+
+[EXCEPTIONS]
+
+# Exceptions that will emit a warning when being caught. Defaults to
+# "BaseException, Exception".
+overgeneral-exceptions=BaseException,
+                       Exception
+
+# Maximum number of characters on a single line.
+max-line-length=80
\ No newline at end of file
diff --git a/.style.yapf b/.style.yapf
new file mode 100644
index 0000000..f1eceb6
--- /dev/null
+++ b/.style.yapf
@@ -0,0 +1,5 @@
+[style]
+based_on_style = google
+column_limit = 80
+indent_width = 2
+split_before_named_assigns = true
diff --git a/.travis.yml b/.travis.yml
index d30563d..2064c60 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -12,6 +12,10 @@
 
 matrix:
   include:
+    - name: "presubmit"
+      install:
+        - pip install -r infra/dev-requirements.txt
+      script: ./infra/presubmit.py
     - name: "libfuzzer address x86_64"
       env:
         - TRAVIS_ENGINE=libfuzzer
@@ -49,3 +53,6 @@
         - TRAVIS_ARCHITECTURE=x86_64
 
 script: ./infra/travis/travis_build.py
+
+notifications:
+  webhooks: https://www.travisbuddy.com/
diff --git a/METADATA b/METADATA
index 35c3966..f8659b3 100644
--- a/METADATA
+++ b/METADATA
@@ -9,11 +9,11 @@
     type: GIT
     value: "https://github.com/google/oss-fuzz.git"
   }
-  version: "65956add1114508bb8b482b6abf9ad2240130c24"
+  version: "1a87da68c870ce09ab6ffac30589da12e7ff6a8d"
   license_type: NOTICE
   last_upgrade_date {
-    year: 2019
-    month: 12
-    day: 26
+    year: 2020
+    month: 2
+    day: 1
   }
 }
diff --git a/README.md b/README.md
index bc9af6e..ab300c3 100644
--- a/README.md
+++ b/README.md
@@ -27,10 +27,10 @@
 Read our [detailed documentation](https://google.github.io/oss-fuzz) to learn how to use OSS-Fuzz.
 
 ## Trophies
-As of August 2019, OSS-Fuzz has found over [14,000] bugs in [200] open source projects.
+As of January 2020, OSS-Fuzz has found over [16,000] bugs in [250] open source projects.
 
-[14,000]: https://bugs.chromium.org/p/oss-fuzz/issues/list?can=1&q=-status%3AWontFix%2CDuplicate+-Infra
-[200]: https://github.com/google/oss-fuzz/tree/master/projects
+[16,000]: https://bugs.chromium.org/p/oss-fuzz/issues/list?q=-status%3AWontFix%2CDuplicate%20-component%3AInfra&can=1
+[250]: https://github.com/google/oss-fuzz/tree/master/projects
 
 ## Blog posts
 
diff --git a/docs/advanced-topics/reproducing.md b/docs/advanced-topics/reproducing.md
index e3f7f70..7e8ea6d 100644
--- a/docs/advanced-topics/reproducing.md
+++ b/docs/advanced-topics/reproducing.md
@@ -39,7 +39,7 @@
 ```
 
 For timeout bugs, add the `-timeout=25` argument. For OOM bugs, add the
-`-rss_limit_mb=2048` argument. Read more on [how timeouts and OOMs are
+`-rss_limit_mb=2560` argument. Read more on [how timeouts and OOMs are
 handled]({{ site.baseurl }}/faq/#how-do-you-handle-timeouts-and-ooms).
 
 Depending on the nature of the bug, the fuzz target binary needs to be built
@@ -145,4 +145,4 @@
 
 If you need to reproduce a `coverage` build failure, follow the
 [Code Coverage page]({{ site.baseurl }}/advanced-topics/code-coverage) to build
-your project and generate a code coverage report.
\ No newline at end of file
+your project and generate a code coverage report.
diff --git a/docs/faq.md b/docs/faq.md
index 872cb77..ca02b9b 100644
--- a/docs/faq.md
+++ b/docs/faq.md
@@ -12,6 +12,15 @@
 {:toc}
 ---
 
+## Where can I learn more about fuzzing?
+
+We recommend reading [libFuzzer tutorial] and the other docs in [google/fuzzing]
+repository. These and some other resources are listed on the
+[useful links]({{ site.baseurl }}/reference/useful-links/#tutorials) page.
+
+[google/fuzzing]: https://github.com/google/fuzzing/tree/master/docs
+[libFuzzer tutorial]: https://github.com/google/fuzzing/blob/master/tutorial/libFuzzerTutorial.md
+
 ## What kind of projects are you accepting?
 
 We accept established projects that have a critical impact on infrastructure and
@@ -73,7 +82,7 @@
 ## How do you handle timeouts and OOMs?
 
 If a single input to a [fuzz target]({{ site.baseurl }}/reference/glossary/#fuzz-target)
-requires more than **~25 seconds** or more than **2GB RAM** to process, we
+requires more than **~25 seconds** or more than **2.5GB RAM** to process, we
 report this as a timeout or an OOM (out-of-memory) bug
 (examples: [timeouts](https://bugs.chromium.org/p/oss-fuzz/issues/list?can=1&q=%22Crash+Type%3A+Timeout%22),
 [OOMs](https://bugs.chromium.org/p/oss-fuzz/issues/list?can=1&q="Crash+Type%3A+Out-of-memory")).
@@ -173,4 +182,4 @@
 OSS-Fuzz builders have 32CPU/28.8GB RAM.
 
 Fuzzing machines only have a single core and fuzz targets should not use more
-than 2GB of RAM.
+than 2.5GB of RAM.
diff --git a/docs/getting-started/new-project-guide/go_lang.md b/docs/getting-started/new-project-guide/go_lang.md
index 800485c..939bae7 100644
--- a/docs/getting-started/new-project-guide/go_lang.md
+++ b/docs/getting-started/new-project-guide/go_lang.md
@@ -30,12 +30,19 @@
 ## Project files
 
 The structure of the project directory in OSS-Fuzz repository doesn't differ for
-projects written in Go. The project files have the following Go specific aspects.
+projects written in Go. The project files have the following Go specific
+aspects.
 
 ### project.yaml
 
-For projects written in Go, we use only `libfuzzer` fuzzing engine and `address`
-sanitizer.
+The `language` attribute must be specified.
+
+```yaml
+language: go
+```
+
+The only supported fuzzing engine and sanitizer are `libfuzzer` and `address`,
+respectively.
 [Example](https://github.com/google/oss-fuzz/blob/356f2b947670b7eb33a1f535c71bc5c87a60b0d1/projects/syzkaller/project.yaml#L7):
 
 ```yaml
diff --git a/docs/index.md b/docs/index.md
index 76f8f3e..66d9d9b 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -32,8 +32,18 @@
 Currently, OSS-Fuzz supports C/C++, Rust, and Go code. Other languages supported by [LLVM](http://llvm.org) may work too.
 OSS-Fuzz supports fuzzing x86_64 and i386 builds.
 
-## Trophies
-As of August 2019, OSS-Fuzz has found over [14,000] bugs in [200] open source projects.
+## Learn more about fuzzing
 
-[14,000]: https://bugs.chromium.org/p/oss-fuzz/issues/list?can=1&q=-status%3AWontFix%2CDuplicate+-Infra
-[200]: https://github.com/google/oss-fuzz/tree/master/projects
+This documentation describes how to use OSS-Fuzz service for your open source project.
+To learn more about fuzzing in general, we recommend reading [libFuzzer tutorial]
+and the other docs in [google/fuzzing] repository. These and some other resources
+are listed on the [useful links]({{ site.baseurl }}/reference/useful-links/#tutorials) page.
+
+[google/fuzzing]: https://github.com/google/fuzzing/tree/master/docs
+[libFuzzer tutorial]: https://github.com/google/fuzzing/blob/master/tutorial/libFuzzerTutorial.md
+
+## Trophies
+As of January 2020, OSS-Fuzz has found over [16,000] bugs in [250] open source projects.
+
+[16,000]: https://bugs.chromium.org/p/oss-fuzz/issues/list?q=-status%3AWontFix%2CDuplicate%20-component%3AInfra&can=1
+[250]: https://github.com/google/oss-fuzz/tree/master/projects
diff --git a/infra/base-images/base-builder/Dockerfile b/infra/base-images/base-builder/Dockerfile
index 702f105..0e22baf 100644
--- a/infra/base-images/base-builder/Dockerfile
+++ b/infra/base-images/base-builder/Dockerfile
@@ -68,7 +68,7 @@
 ENV COVERAGE_FLAGS_coverage "-fprofile-instr-generate -fcoverage-mapping -pthread -Wl,--no-as-needed -Wl,-ldl -Wl,-lm -Wno-unused-command-line-argument"
 
 # Coverage isntrumentation flags for dataflow builds.
-ENV COVERAGE_FLAGS_dataflow="-fsanitize-coverage=trace-pc-guard,pc-table,func,trace-cmp"
+ENV COVERAGE_FLAGS_dataflow="-fsanitize-coverage=trace-pc-guard,pc-table,bb,trace-cmp"
 
 # Default sanitizer, fuzzing engine and architecture to use.
 ENV SANITIZER="address"
@@ -104,6 +104,8 @@
 COPY compile compile_afl compile_dataflow compile_libfuzzer compile_honggfuzz \
     precompile_honggfuzz srcmap write_labels.py /usr/local/bin/
 
+COPY detect_repo.py /src
+
 RUN precompile_honggfuzz
 
 CMD ["compile"]
diff --git a/infra/base-images/base-builder/compile_dataflow b/infra/base-images/base-builder/compile_dataflow
index bfdd766..bf0a425 100755
--- a/infra/base-images/base-builder/compile_dataflow
+++ b/infra/base-images/base-builder/compile_dataflow
@@ -15,13 +15,18 @@
 #
 ################################################################################
 
+export LIB_FUZZING_ENGINE="/usr/lib/DataFlow*.o"
 echo -n "Compiling DataFlow to $LIB_FUZZING_ENGINE... "
 mkdir -p $WORK/libfuzzer
 pushd $WORK/libfuzzer > /dev/null
 
-$CXX $CXXFLAGS $SANITIZER_FLAGS -std=c++11 -O2 -c \
-    $SRC/libfuzzer/dataflow/*.cpp
-ar r $LIB_FUZZING_ENGINE $WORK/libfuzzer/*.o
+$CXX $CXXFLAGS -fno-sanitize=all $SANITIZER_FLAGS -std=c++11 -O2 -c \
+    $SRC/libfuzzer/dataflow/DataFlow.cpp
+$CXX $CXXFLAGS -fno-sanitize=all -fPIC -std=c++11 -O2 -c \
+    $SRC/libfuzzer/dataflow/DataFlowCallbacks.cpp
+
+cp $WORK/libfuzzer/DataFlow*.o /usr/lib/
+
 popd > /dev/null
 rm -rf $WORK/libfuzzer
 echo " done."
diff --git a/infra/base-images/base-builder/detect_repo.py b/infra/base-images/base-builder/detect_repo.py
new file mode 100644
index 0000000..272a915
--- /dev/null
+++ b/infra/base-images/base-builder/detect_repo.py
@@ -0,0 +1,156 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Module to get the the name of a git repo containing a specific commit
+inside of an OSS-Fuzz project.
+
+Example Usage:
+
+  python detect_repo.py --src_dir /src --example_commit
+    b534f03eecd8a109db2b085ab24d419b6486de97
+
+Prints the location of the git remote repo as well as the repo's name
+seperated by a space.
+
+  https://github.com/VirusTotal/yara.git yara
+
+"""
+import argparse
+import logging
+import os
+import subprocess
+
+
+def main():
+  """Function to get a git repo's url and name referenced by OSS-Fuzz
+  Dockerfile.
+
+  Raises:
+    ValueError when a commit or a ref is not provided.
+  """
+  parser = argparse.ArgumentParser(
+      description=
+      'Finds a specific git repo in an oss-fuzz project\'s docker file.')
+  parser.add_argument('--repo_name', help='The name of the git repo.')
+  parser.add_argument('--src_dir', help='The location of the possible repo.')
+  parser.add_argument('--example_commit',
+                      help='A commit SHA referencing the project\'s main repo.')
+
+  args = parser.parse_args()
+  if not args.repo_name and not args.example_commit:
+    raise ValueError(
+        'Requires an example commit or a repo name to find repo location.')
+  if args.src_dir:
+    src_dir = args.src_dir
+  else:
+    src_dir = os.environ.get('SRC', '/src')
+
+  for single_dir in os.listdir(src_dir):
+    full_path = os.path.join(src_dir, single_dir)
+    if not os.path.isdir(full_path):
+      continue
+    if args.example_commit and check_for_commit(full_path, args.example_commit):
+      print('Detected repo:', get_repo(full_path), full_path)
+      return
+    if args.repo_name and check_for_repo_name(full_path, args.repo_name):
+      print('Detected repo:', get_repo(full_path), full_path)
+      return
+  logging.error('No git repos with specific commit: %s found in %s',
+                args.example_commit, src_dir)
+
+
+def get_repo(repo_path):
+  """Gets a git repo link from a specific directory in a docker image.
+
+  Args:
+    repo_path: The directory on the image where the git repo exists.
+
+  Returns:
+    The repo location or None.
+  """
+  output, return_code = execute(['git', 'config', '--get', 'remote.origin.url'],
+                                location=repo_path,
+                                check_result=True)
+  if return_code == 0 and output:
+    return output.rstrip()
+  return None
+
+
+def check_for_repo_name(repo_path, repo_name):
+  """Check to see if the repo_name matches the remote repository repo name.
+
+  Args:
+    repo_path: The directory of the git repo.
+    repo_name: The name of the target git repo.
+  """
+  if not os.path.exists(os.path.join(repo_path, '.git')):
+    return False
+
+  out, _ = execute(['git', 'config', '--get', 'remote.origin.url'],
+                   location=repo_path)
+  out = out.split('/')[-1].replace('.git', '').rstrip()
+  return out == repo_name
+
+
+def check_for_commit(repo_path, commit):
+  """Checks a directory for a specific commit.
+
+  Args:
+    repo_path: The name of the directory to test for the commit.
+    commit: The commit SHA to check for.
+
+  Returns:
+    True if directory contains that commit.
+  """
+
+  # Check if valid git repo.
+  if not os.path.exists(os.path.join(repo_path, '.git')):
+    return False
+
+  # Check if history fetch is needed.
+  if os.path.exists(os.path.join(repo_path, '.git', 'shallow')):
+    execute(['git', 'fetch', '--unshallow'], location=repo_path)
+
+  # Check if commit is in history.
+  _, return_code = execute(['git', 'cat-file', '-e', commit],
+                           location=repo_path)
+  return return_code == 0
+
+
+def execute(command, location, check_result=False):
+  """Runs a shell command in the specified directory location.
+
+  Args:
+    command: The command as a list to be run.
+    location: The directory the command is run in.
+    check_result: Should an exception be thrown on failed command.
+
+  Returns:
+    The stdout of the command, the error code.
+
+  Raises:
+    RuntimeError: running a command resulted in an error.
+  """
+  process = subprocess.Popen(command, stdout=subprocess.PIPE, cwd=location)
+  output, err = process.communicate()
+  if check_result and (process.returncode or err):
+    raise RuntimeError(
+        'Error: %s\n running command: %s\n return code: %s\n out %s\n' %
+        (err, command, process.returncode, output))
+  if output is not None:
+    output = output.decode('ascii')
+  return output, process.returncode
+
+
+if __name__ == '__main__':
+  main()
diff --git a/infra/base-images/base-builder/detect_repo_test.py b/infra/base-images/base-builder/detect_repo_test.py
new file mode 100644
index 0000000..4886522
--- /dev/null
+++ b/infra/base-images/base-builder/detect_repo_test.py
@@ -0,0 +1,92 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Test the functionality of the detect_repo module.
+This will consist of the following functional test:
+  1. Determine if an OSS-Fuzz projects main repo can be detected from example
+  commits.
+  2. Determine if an OSS-Fuzz project main repo can be detected from a
+  repo name.
+"""
+import os
+import re
+import sys
+import tempfile
+import unittest
+
+import detect_repo
+
+# Appending to path for access to repo_manager module.
+# pylint: disable=wrong-import-position
+sys.path.append(
+    os.path.dirname(os.path.dirname(os.path.dirname(
+        os.path.abspath(__file__)))))
+import repo_manager
+import test_repos
+# pylint: enable=wrong-import-position
+
+
+class DetectRepoIntegrationTest(unittest.TestCase):
+  """Class to test the functionality of the detect_repo module."""
+
+  def test_infer_main_repo_from_commit(self):
+    """Tests that the main repo can be inferred based on an example commit."""
+
+    with tempfile.TemporaryDirectory() as tmp_dir:
+      # Construct example repo's to check for commits.
+      for example_repo in test_repos.TEST_REPOS:
+        repo_manager.RepoManager(example_repo.git_url, tmp_dir)
+        self.check_with_repo(example_repo.git_url,
+                             example_repo.git_repo_name,
+                             tmp_dir,
+                             commit=example_repo.old_commit)
+
+  def test_infer_main_repo_from_name(self):
+    """Tests that the main project repo can be inferred from a repo name."""
+
+    with tempfile.TemporaryDirectory() as tmp_dir:
+      for example_repo in test_repos.TEST_REPOS:
+        repo_manager.RepoManager(example_repo.git_url, tmp_dir)
+        self.check_with_repo(example_repo.git_url, example_repo.git_repo_name,
+                             tmp_dir)
+
+  def check_with_repo(self, repo_origin, repo_name, tmp_dir, commit=None):
+    """Checks the detect repo's main method for a specific set of inputs.
+
+    Args:
+      repo_origin: URL of the git repo.
+      repo_name: The name of the directory it is cloned to.
+      tmp_dir: The location of the directory of git repos to be searched.
+      commit: The commit that should be used to look up the repo.
+    """
+    command = ['python3', 'detect_repo.py', '--src_dir', tmp_dir]
+
+    if commit:
+      command += ['--example_commit', commit]
+    else:
+      command += ['--repo_name', repo_name]
+
+    out, _ = detect_repo.execute(command,
+                                 location=os.path.dirname(
+                                     os.path.realpath(__file__)))
+    match = re.search(r'\bDetected repo: ([^ ]+) ([^ ]+)', out.rstrip())
+    if match and match.group(1) and match.group(2):
+      self.assertEqual(match.group(1), repo_origin)
+      self.assertEqual(match.group(2), os.path.join(tmp_dir, repo_name))
+    else:
+      self.assertIsNone(repo_origin)
+      self.assertIsNone(repo_name)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/infra/base-images/base-builder/precompile_honggfuzz b/infra/base-images/base-builder/precompile_honggfuzz
index b80b2ea..152922d 100755
--- a/infra/base-images/base-builder/precompile_honggfuzz
+++ b/infra/base-images/base-builder/precompile_honggfuzz
@@ -41,4 +41,5 @@
 popd > /dev/null
 
 apt-get remove -y --purge ${PACKAGES[@]}
+apt-get autoremove -y
 echo " done."
diff --git a/infra/base-images/base-runner/Dockerfile b/infra/base-images/base-runner/Dockerfile
index 0be5810..ea29e1f 100644
--- a/infra/base-images/base-runner/Dockerfile
+++ b/infra/base-images/base-runner/Dockerfile
@@ -40,8 +40,10 @@
 RUN pip3 install -r /opt/code_coverage/requirements.txt
 
 COPY bad_build_check \
+    collect_dft \
     coverage \
     coverage_helper \
+    dataflow_tracer.py \
     download_corpus \
     minijail0 \
     reproduce \
@@ -49,6 +51,7 @@
     run_minijail \
     targets_list \
     test_all \
+    test_one \
     /usr/local/bin/
 
 # Default environment options for various sanitizers.
@@ -58,5 +61,5 @@
 ENV ASAN_OPTIONS="alloc_dealloc_mismatch=0:allocator_may_return_null=1:allocator_release_to_os_interval_ms=500:check_malloc_usable_size=0:detect_container_overflow=1:detect_odr_violation=0:detect_leaks=1:detect_stack_use_after_return=1:fast_unwind_on_fatal=0:handle_abort=1:handle_segv=1:handle_sigill=1:max_uar_stack_size_log=16:print_scariness=1:quarantine_size_mb=10:strict_memcmp=1:strip_path_prefix=/workspace/:symbolize=1:use_sigaltstack=1"
 ENV MSAN_OPTIONS="print_stats=1:strip_path_prefix=/workspace/:symbolize=1"
 ENV UBSAN_OPTIONS="print_stacktrace=1:print_summary=1:silence_unsigned_overflow=1:strip_path_prefix=/workspace/:symbolize=1"
-ENV FUZZER_ARGS="-rss_limit_mb=2048 -timeout=25"
+ENV FUZZER_ARGS="-rss_limit_mb=2560 -timeout=25"
 ENV AFL_FUZZER_ARGS="-m none"
diff --git a/infra/base-images/base-runner/bad_build_check b/infra/base-images/base-runner/bad_build_check
index 284d29b..c3fa68c 100755
--- a/infra/base-images/base-runner/bad_build_check
+++ b/infra/base-images/base-runner/bad_build_check
@@ -23,6 +23,10 @@
 # Mercurial's bdiff_fuzzer has 116 PCs when built with ASan.
 THRESHOLD_FOR_NUMBER_OF_EDGES=100
 
+# A fuzz target is supposed to have at least two functions, such as
+# LLVMFuzzerTestOneInput and an API that is being called from there.
+THRESHOLD_FOR_NUMBER_OF_FUNCTIONS=2
+
 # Threshold values for different sanitizers used by instrumentation checks.
 ASAN_CALLS_THRESHOLD_FOR_ASAN_BUILD=1000
 ASAN_CALLS_THRESHOLD_FOR_NON_ASAN_BUILD=0
@@ -95,8 +99,14 @@
       return 1
     fi
   elif [[ "$FUZZING_ENGINE" == dataflow ]]; then
-    # TODO(https://github.com/google/oss-fuzz/issues/1632): add check for
-    # binaries compiled with dataflow engine when the interface becomes stable.
+    $FUZZER &> $FUZZER_OUTPUT
+    local NUMBER_OF_FUNCTIONS=$(grep -Po "INFO:\s+\K[[:digit:]]+(?=\s+instrumented function.*)" $FUZZER_OUTPUT)
+    [[ -z "$NUMBER_OF_FUNCTIONS" ]] && NUMBER_OF_FUNCTIONS=0
+    if (( $NUMBER_OF_FUNCTIONS < $THRESHOLD_FOR_NUMBER_OF_FUNCTIONS )); then
+      echo "BAD BUILD: $FUZZER does not seem to be properly built in 'dataflow' config."
+      cat $FUZZER_OUTPUT
+      return 1
+    fi
     return 0
   fi
 
@@ -330,7 +340,7 @@
 
   # Set up common fuzzing arguments, otherwise "run_fuzzer" errors out.
   if [ -z "$FUZZER_ARGS" ]; then
-    export FUZZER_ARGS="-rss_limit_mb=2048 -timeout=25"
+    export FUZZER_ARGS="-rss_limit_mb=2560 -timeout=25"
   fi
 
   bash -c "run_fuzzer $FUZZER_NAME -runs=0" &> $FUZZER_OUTPUT
diff --git a/infra/base-images/base-runner/collect_dft b/infra/base-images/base-runner/collect_dft
new file mode 100755
index 0000000..e316c0d
--- /dev/null
+++ b/infra/base-images/base-runner/collect_dft
@@ -0,0 +1,65 @@
+#!/bin/bash -u
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+cd $OUT
+
+if (( $# > 0 )); then
+  FUZZ_TARGETS="$@"
+else
+  FUZZ_TARGETS="$(find . -maxdepth 1 -type f -executable -printf '%P\n')"
+fi
+
+# Timeout for running a single fuzz target.
+TIMEOUT=1h
+
+# Number of CPUs available, this is needed for running targets in parallel.
+NPROC=$(nproc)
+
+function run_one_target {
+  local target=$1
+  local corpus="/corpus/${target}"
+  local traces="$OUT/${target}_dft"
+
+  # Put the logs in $OUT as well for debugging purposes.
+  local log="$OUT/${target}_dft.log"
+
+  rm -rf $traces && mkdir -p $traces
+
+  timeout $TIMEOUT dataflow_tracer.py $OUT/$target $corpus $traces &> $log
+  if (( $? != 0 )); then
+    echo "Error occured while collecting data flow traces for $target:"
+    cat $log
+  fi
+}
+
+# Run each fuzz target, write data flow traces into corresponding dir in $OUT.
+for fuzz_target in $FUZZ_TARGETS; do
+  # Skip binaries that do not seem to be fuzz targets.
+  grep "LLVMFuzzerTestOneInput" $fuzz_target > /dev/null 2>&1 || continue
+  
+  echo "Running $fuzz_target"
+  run_one_target $fuzz_target &
+
+  # Do not spawn more processes than the number of CPUs available.
+  n_child_proc=$(jobs -rp | wc -l)
+  while [ "$n_child_proc" -eq "$NPROC" ]; do
+    sleep 4
+    n_child_proc=$(jobs -rp | wc -l)
+  done
+done
+
+# Wait for background processes to finish.
+wait
diff --git a/infra/base-images/base-runner/dataflow_tracer.py b/infra/base-images/base-runner/dataflow_tracer.py
new file mode 100755
index 0000000..7166bf4
--- /dev/null
+++ b/infra/base-images/base-runner/dataflow_tracer.py
@@ -0,0 +1,150 @@
+#!/usr/bin/env python3
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+"""Script for collecting dataflow traces using DFSan compiled binary. The script
+imitates `CollectDataFlow` function from libFuzzer but provides some flexibility
+for skipping long and/or slow corpus elements.
+
+Follow https://github.com/google/oss-fuzz/issues/1632 for more details."""
+import hashlib
+import os
+import subprocess
+import sys
+
+# These can be controlled by the runner in order to change the values without
+# rebuilding OSS-Fuzz base images.
+FILE_SIZE_LIMIT = int(os.getenv('DFT_FILE_SIZE_LIMIT', 32 * 1024))
+MIN_TIMEOUT = float(os.getenv('DFT_MIN_TIMEOUT', 1.0))
+TIMEOUT_RANGE = float(os.getenv('DFT_TIMEOUT_RANGE', 3.0))
+
+DFSAN_OPTIONS = 'fast16labels=1:warn_unimplemented=0'
+
+
+def _error(msg):
+  sys.stderr.write(msg + '\n')
+
+
+def _list_dir(dirpath):
+  for root, _, files in os.walk(dirpath):
+    for f in files:
+      yield os.path.join(root, f)
+
+
+def _sha1(filepath):
+  h = hashlib.sha1()
+  with open(filepath, 'rb') as f:
+    h.update(f.read())
+  return h.hexdigest()
+
+
+def _run(cmd, timeout=None):
+  result = None
+  try:
+    result = subprocess.run(cmd,
+                            timeout=timeout,
+                            stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE)
+    if result.returncode:
+      _error('{command} finished with non-zero code: {code}'.format(
+          command=str(cmd), code=result.returncode))
+
+  except subprocess.TimeoutExpired:
+    raise
+  except Exception as e:
+    _error('Exception: ' + str(e))
+
+  return result
+
+
+def _timeout(size):
+  # Dynamic timeout value (proportional to file size) to discard slow units.
+  timeout = MIN_TIMEOUT
+  timeout += size * TIMEOUT_RANGE / FILE_SIZE_LIMIT
+  return timeout
+
+
+def collect_traces(binary, corpus_dir, dft_dir):
+  stats = {
+      'total': 0,
+      'traced': 0,
+      'long': 0,
+      'slow': 0,
+      'failed': 0,
+  }
+
+  files_and_sizes = {}
+  for f in _list_dir(corpus_dir):
+    stats['total'] += 1
+    size = os.path.getsize(f)
+    if size > FILE_SIZE_LIMIT:
+      stats['long'] += 1
+      print('Skipping large file ({size}b): {path}'.format(size=size, path=f))
+      continue
+    files_and_sizes[f] = size
+
+  for f in sorted(files_and_sizes, key=files_and_sizes.get):
+    output_path = os.path.join(dft_dir, _sha1(f))
+    try:
+      result = _run([binary, f, output_path], timeout=_timeout(size))
+      if result.returncode:
+        stats['failed'] += 1
+      else:
+        stats['traced'] += 1
+
+    except subprocess.TimeoutExpired as e:
+      _error('Slow input: ' + str(e))
+      stats['slow'] += 1
+
+  return stats
+
+
+def dump_functions(binary, dft_dir):
+  result = _run([binary])
+  if not result or result.returncode:
+    return False
+
+  with open(os.path.join(dft_dir, 'functions.txt'), 'wb') as f:
+    f.write(result.stdout)
+
+  return True
+
+
+def main():
+  if len(sys.argv) < 4:
+    _error('Usage: {0} <binary> <corpus_dir> <dft_dir>'.format(sys.argv[0]))
+    sys.exit(1)
+
+  binary = sys.argv[1]
+  corpus_dir = sys.argv[2]
+  dft_dir = sys.argv[3]
+
+  os.environ['DFSAN_OPTIONS'] = DFSAN_OPTIONS
+
+  if not dump_functions(binary, dft_dir):
+    _error('Failed to dump functions. Something is wrong.')
+    sys.exit(1)
+
+  stats = collect_traces(binary, corpus_dir, dft_dir)
+  for k, v in stats.items():
+    print('{0}: {1}'.format(k, v))
+
+  # Checksum that we didn't lose track of any of the inputs.
+  assert stats['total'] * 2 == sum(v for v in stats.values())
+  sys.exit(0)
+
+
+if __name__ == "__main__":
+  main()
diff --git a/infra/base-images/base-runner/run_fuzzer b/infra/base-images/base-runner/run_fuzzer
index e3a85a5..37785db 100755
--- a/infra/base-images/base-runner/run_fuzzer
+++ b/infra/base-images/base-runner/run_fuzzer
@@ -104,4 +104,9 @@
 fi
 
 echo $CMD_LINE
+
+# Unset OUT so the fuzz target can't rely on it.
+unset OUT
+
 bash -c "$CMD_LINE"
+
diff --git a/infra/base-images/base-runner/test_all b/infra/base-images/base-runner/test_all
index 2e49b75..1cc45ca 100755
--- a/infra/base-images/base-runner/test_all
+++ b/infra/base-images/base-runner/test_all
@@ -32,8 +32,21 @@
 mkdir $VALID_TARGETS_DIR
 mkdir $BROKEN_TARGETS_DIR
 
+# Move the directory the fuzzer is located in to somewhere that doesn't exist
+# on the builder to make it more likely that hardcoding /out fails here (since
+# it will fail on ClusterFuzz).
+TMP_FUZZER_DIR=/tmp/not-out
+rm -rf $TMP_FUZZER_DIR
+mkdir $TMP_FUZZER_DIR
+# Move contents of $OUT/ into $TMP_FUZZER_DIR. We can't move the directory
+# itself because it is a mount.
+mv $OUT/* $TMP_FUZZER_DIR
+INITIAL_OUT=$OUT
+export OUT=$TMP_FUZZER_DIR
+
+
 # Main loop that iterates through all fuzz targets and runs the check.
-for FUZZER_BINARY in $(find $OUT/ -maxdepth 1 -executable -type f); do
+for FUZZER_BINARY in $(find $TMP_FUZZER_DIR -maxdepth 1 -executable -type f); do
   if file "$FUZZER_BINARY" | grep -v ELF > /dev/null 2>&1; then
     continue
   fi
@@ -73,6 +86,10 @@
 # Wait for background processes to finish.
 wait
 
+# Restore OUT
+export OUT=$INITIAL_OUT
+mv $TMP_FUZZER_DIR/* $OUT
+
 # Sanity check in case there are no fuzz targets in the $OUT/ dir.
 if [ "$TOTAL_TARGETS_COUNT" -eq "0" ]; then
   echo "ERROR: no fuzzers found in $OUT/"
diff --git a/infra/base-images/base-runner/test_one b/infra/base-images/base-runner/test_one
new file mode 100755
index 0000000..23b7fd9
--- /dev/null
+++ b/infra/base-images/base-runner/test_one
@@ -0,0 +1,58 @@
+#!/bin/bash -u
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+
+# Wrapper around bad_build_check that moves the /out directory to /tmp/not-out.
+# This is useful when bad_build_check isn't called from test_all which does the
+# same thing.
+
+function main {
+  # Move the directory the fuzzer is located in to somewhere that doesn't exist
+  # on the builder to make it more likely that hardcoding /out fails here (since
+  # it will fail on ClusterFuzz).
+  local fuzzer=$1
+  fuzzer=$(realpath $fuzzer)
+  local initial_fuzzer_dir=$(dirname $fuzzer)
+
+  local tmp_fuzzer_dir=/tmp/not-out
+  rm -rf $tmp_fuzzer_dir
+  mkdir $tmp_fuzzer_dir
+  # Move the contents of $initial_fuzzer_dir rather than the directory itself in
+  # case it is a mount.
+  mv $initial_fuzzer_dir/* $tmp_fuzzer_dir
+  fuzzer="$tmp_fuzzer_dir/$(basename $fuzzer)"
+
+  # Change OUT to the temporary fuzzer dir.
+  local initial_out=$OUT
+  export OUT=$tmp_fuzzer_dir
+
+  bad_build_check $fuzzer
+  returncode=$?
+
+  # Restore OUT and $initial_fuzzer_dir
+  export OUT=$initial_out
+  mv $tmp_fuzzer_dir/* $initial_fuzzer_dir
+
+  return $returncode
+}
+
+if [ $# -ne 1 ]; then
+  echo "Usage: $0 <fuzz_target_binary>"
+  exit 1
+fi
+
+main $1
+exit $?
diff --git a/infra/bisector.py b/infra/bisector.py
index 8771105..d1fd669 100644
--- a/infra/bisector.py
+++ b/infra/bisector.py
@@ -18,7 +18,7 @@
 This is done with the following steps:
 
 
-  NOTE: NEEDS TO BE RUN FROM THE OSS-Fuzz HOME directory
+  NOTE: Needs to be run from root of the OSS-Fuzz source checkout.
 
   Typical usage example:
         python3 infra/bisector.py
@@ -31,122 +31,124 @@
 """
 
 import argparse
-from dataclasses import dataclass
-import os
+import logging
 import tempfile
 
 import build_specified_commit
 import helper
 import repo_manager
-
-
-@dataclass
-class BuildData():
-  """List of data requried for bisection of errors in OSS-Fuzz projects.
-
-  Attributes:
-    project_name: The name of the OSS-Fuzz project that is being checked
-    engine: The fuzzing engine to be used
-    sanitizer: The fuzzing sanitizer to be used
-    architecture: The system architecture being fuzzed
-  """
-  project_name: str
-  engine: str
-  sanitizer: str
-  architecture: str
+import utils
 
 
 def main():
   """Finds the commit SHA where an error was initally introduced."""
+  utils.chdir_to_root()
   parser = argparse.ArgumentParser(
       description='git bisection for finding introduction of bugs')
 
-  parser.add_argument(
-      '--project_name',
-      help='The name of the project where the bug occured',
-      required=True)
-  parser.add_argument(
-      '--commit_new',
-      help='The newest commit SHA to be bisected',
-      required=True)
-  parser.add_argument(
-      '--commit_old',
-      help='The oldest commit SHA to be bisected',
-      required=True)
-  parser.add_argument(
-      '--fuzz_target', help='the name of the fuzzer to be built', required=True)
-  parser.add_argument(
-      '--testcase', help='the testcase to be reproduced', required=True)
-  parser.add_argument('--engine', default='libfuzzer')
-  parser.add_argument(
-      '--sanitizer',
-      default='address',
-      help='the default is "address"; "dataflow" for "dataflow" engine')
+  parser.add_argument('--project_name',
+                      help='The name of the project where the bug occurred.',
+                      required=True)
+  parser.add_argument('--new_commit',
+                      help='The newest commit SHA to be bisected.',
+                      required=True)
+  parser.add_argument('--old_commit',
+                      help='The oldest commit SHA to be bisected.',
+                      required=True)
+  parser.add_argument('--fuzz_target',
+                      help='The name of the fuzzer to be built.',
+                      required=True)
+  parser.add_argument('--test_case_path',
+                      help='The path to test case.',
+                      required=True)
+  parser.add_argument('--engine',
+                      help='The default is "libfuzzer".',
+                      default='libfuzzer')
+  parser.add_argument('--sanitizer',
+                      default='address',
+                      help='The default is "address".')
   parser.add_argument('--architecture', default='x86_64')
   args = parser.parse_args()
-  build_data = BuildData(args.project_name, args.engine, args.sanitizer,
-                         args.architecture)
-  if os.getcwd() != os.path.dirname(
-      os.path.dirname(os.path.realpath(__file__))):
-    print("Error: bisector.py needs to be run from the OSS-Fuzz home directory")
-    return 1
-  error_sha = bisect(args.commit_old, args.commit_new, args.testcase,
+
+  build_data = build_specified_commit.BuildData(project_name=args.project_name,
+                                                engine=args.engine,
+                                                sanitizer=args.sanitizer,
+                                                architecture=args.architecture)
+
+  error_sha = bisect(args.old_commit, args.new_commit, args.test_case_path,
                      args.fuzz_target, build_data)
   if not error_sha:
-    print('No error was found in commit range %s:%s' %
-          (args.commit_old, args.commit_new))
+    logging.error('No error was found in commit range %s:%s', args.commit_old,
+                  args.commit_new)
+    return 1
+  if error_sha == args.commit_old:
+    logging.error(
+        'Bisection Error: Both the first and the last commits in'
+        'the given range have the same behavior, bisection is not possible. ')
     return 1
   print('Error was introduced at commit %s' % error_sha)
   return 0
 
 
-def bisect(commit_old, commit_new, testcase, fuzz_target, build_data):
+def bisect(old_commit, new_commit, test_case_path, fuzz_target, build_data):
   """From a commit range, this function caluclates which introduced a
-  specific error from a fuzz testcase.
+  specific error from a fuzz test_case_path.
 
   Args:
-    commit_old: The oldest commit in the error regression range
-    commit_new: The newest commit in the error regression range
-    testcase: The file path of the test case that triggers the error
-    fuzz_target: The name of the fuzzer to be tested
-    build_data: a class holding all of the input parameters for bisection
+    old_commit: The oldest commit in the error regression range.
+    new_commit: The newest commit in the error regression range.
+    test_case_path: The file path of the test case that triggers the error
+    fuzz_target: The name of the fuzzer to be tested.
+    build_data: a class holding all of the input parameters for bisection.
 
   Returns:
-    The commit SHA that introduced the error or None
-  """
-  local_store_path = tempfile.mkdtemp()
-  repo_url = build_specified_commit.infer_main_repo(build_data.project_name,
-                                                    local_store_path,
-                                                    commit_old)
-  bisect_repo_manager = repo_manager.RepoManager(repo_url, local_store_path)
-  commit_list = bisect_repo_manager.get_commit_list(commit_old, commit_new)
-  build_specified_commit.build_fuzzer_from_commit(
-      build_data.project_name, commit_list[0], bisect_repo_manager.repo_dir,
-      build_data.engine, build_data.sanitizer, build_data.architecture,
-      bisect_repo_manager)
-  error_code = helper.reproduce_impl(build_data.project_name, fuzz_target,
-                                     False, [], [], testcase)
-  old_idx = len(commit_list) - 1
-  new_idx = 0
-  if len(commit_list) == 1:
-    if not error_code:
-      return None
-    return commit_list[0]
+    The commit SHA that introduced the error or None.
 
-  while old_idx - new_idx != 1:
-    curr_idx = (old_idx + new_idx) // 2
-    build_specified_commit.build_fuzzer_from_commit(
-        build_data.project_name, commit_list[curr_idx],
-        bisect_repo_manager.repo_dir, build_data.engine, build_data.sanitizer,
-        build_data.architecture, bisect_repo_manager)
-    error_exists = (
-        helper.reproduce_impl(build_data.project_name, fuzz_target, False, [],
-                              [], testcase) == error_code)
-    if error_exists == error_code:
-      new_idx = curr_idx
-    else:
-      old_idx = curr_idx
-  return commit_list[new_idx]
+  Raises:
+    ValueError: when a repo url can't be determine from the project.
+  """
+  with tempfile.TemporaryDirectory() as tmp_dir:
+    repo_url, repo_name = build_specified_commit.detect_main_repo(
+        build_data.project_name, commit=old_commit)
+    if not repo_url or not repo_name:
+      raise ValueError('Main git repo can not be determined.')
+    bisect_repo_manager = repo_manager.RepoManager(repo_url,
+                                                   tmp_dir,
+                                                   repo_name=repo_name)
+    commit_list = bisect_repo_manager.get_commit_list(old_commit, new_commit)
+    old_idx = len(commit_list) - 1
+    new_idx = 0
+    build_specified_commit.build_fuzzers_from_commit(commit_list[new_idx],
+                                                     bisect_repo_manager,
+                                                     build_data)
+    expected_error_code = helper.reproduce_impl(build_data.project_name,
+                                                fuzz_target, False, [], [],
+                                                test_case_path)
+
+    # Check if the error is persistent through the commit range
+    build_specified_commit.build_fuzzers_from_commit(
+        commit_list[old_idx],
+        bisect_repo_manager,
+        build_data,
+    )
+
+    if expected_error_code == helper.reproduce_impl(build_data.project_name,
+                                                    fuzz_target, False, [], [],
+                                                    test_case_path):
+      return commit_list[old_idx]
+
+    while old_idx - new_idx > 1:
+      curr_idx = (old_idx + new_idx) // 2
+      build_specified_commit.build_fuzzers_from_commit(commit_list[curr_idx],
+                                                       bisect_repo_manager,
+                                                       build_data)
+      error_code = helper.reproduce_impl(build_data.project_name, fuzz_target,
+                                         False, [], [], test_case_path)
+      if expected_error_code == error_code:
+        new_idx = curr_idx
+      else:
+        old_idx = curr_idx
+    return commit_list[new_idx]
 
 
 if __name__ == '__main__':
diff --git a/infra/bisector_test.py b/infra/bisector_test.py
new file mode 100644
index 0000000..89d483e
--- /dev/null
+++ b/infra/bisector_test.py
@@ -0,0 +1,66 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing perepo_managerissions and
+# limitations under the License.
+"""Test the functionality of bisection module:
+1) Test a known case where an error appears in a regression range.
+2) Bisect can handle incorrect inputs.
+
+IMPORTANT: This test needs to be run with root privileges.
+"""
+
+import os
+import unittest
+
+import bisector
+import build_specified_commit
+import test_repos
+
+# Necessary because __file__ changes with os.chdir
+TEST_DIR_PATH = os.path.dirname(os.path.realpath(__file__))
+
+
+class BisectIntegrationTests(unittest.TestCase):
+  """Class to test the functionality of bisection method."""
+
+  def test_bisect_invalid_repo(self):
+    """Test the bisection method on a project that does not exist."""
+    test_repo = test_repos.INVALID_REPO
+    build_data = build_specified_commit.BuildData(
+        project_name=test_repo.project_name,
+        engine='libfuzzer',
+        sanitizer='address',
+        architecture='x86_64')
+    with self.assertRaises(ValueError):
+      bisector.bisect(test_repo.old_commit, test_repo.new_commit,
+                      test_repo.test_case_path, test_repo.fuzz_target,
+                      build_data)
+
+  def test_bisect(self):
+    """Test the bisect method on example projects."""
+    for test_repo in test_repos.TEST_REPOS:
+      build_data = build_specified_commit.BuildData(
+          project_name=test_repo.project_name,
+          engine='libfuzzer',
+          sanitizer='address',
+          architecture='x86_64')
+      error_sha = bisector.bisect(test_repo.old_commit, test_repo.new_commit,
+                                  test_repo.test_case_path,
+                                  test_repo.fuzz_target, build_data)
+      self.assertEqual(error_sha, test_repo.intro_commit)
+
+
+if __name__ == '__main__':
+  # Change to oss-fuzz main directory so helper.py runs correctly.
+  if os.getcwd() != os.path.dirname(TEST_DIR_PATH):
+    os.chdir(os.path.dirname(TEST_DIR_PATH))
+  unittest.main()
diff --git a/infra/build_specified_commit.py b/infra/build_specified_commit.py
old mode 100755
new mode 100644
index c7a4c63..823fd48
--- a/infra/build_specified_commit.py
+++ b/infra/build_specified_commit.py
@@ -17,79 +17,79 @@
 from a specific point in time. This feature can be used for implementations
 like continuious integration fuzzing and bisection to find errors
 """
+import os
+import collections
+import logging
 import re
 
 import helper
-import repo_manager
+import utils
+
+BuildData = collections.namedtuple(
+    'BuildData', ['project_name', 'engine', 'sanitizer', 'architecture'])
 
 
-def build_fuzzer_from_commit(project_name,
-                             commit,
-                             local_store_path,
-                             engine='libfuzzer',
-                             sanitizer='address',
-                             architecture='x86_64',
-                             old_repo_manager=None):
+def build_fuzzers_from_commit(commit, build_repo_manager, build_data):
   """Builds a OSS-Fuzz fuzzer at a  specific commit SHA.
 
   Args:
-    project_name: The OSS-Fuzz project name
-    commit: The commit SHA to build the fuzzers at
-    local_store_path: The full file path of a place where a temp git repo is stored
-    engine: The fuzzing engine to be used
-    sanitizer: The fuzzing sanitizer to be used
-    architecture: The system architiecture to be used for fuzzing
-
+    commit: The commit SHA to build the fuzzers at.
+    build_repo_manager: The OSS-Fuzz project's repo manager to be built at.
+    build_data: A struct containing project build information.
   Returns:
-    0 on successful build 1 on failure
+    0 on successful build or error code on failure.
   """
-  if not old_repo_manager:
-    inferred_url = infer_main_repo(project_name, local_store_path, commit)
-    old_repo_manager = repo_manager.RepoManager(inferred_url, local_store_path)
-  old_repo_manager.checkout_commit(commit)
-  return helper.build_fuzzers_impl(
-      project_name=project_name,
-      clean=True,
-      engine=engine,
-      sanitizer=sanitizer,
-      architecture=architecture,
-      env_to_add=None,
-      source_path=old_repo_manager.repo_dir)
+  build_repo_manager.checkout_commit(commit)
+  return helper.build_fuzzers_impl(project_name=build_data.project_name,
+                                   clean=True,
+                                   engine=build_data.engine,
+                                   sanitizer=build_data.sanitizer,
+                                   architecture=build_data.architecture,
+                                   env_to_add=None,
+                                   source_path=build_repo_manager.repo_dir,
+                                   mount_location=os.path.join(
+                                       '/src', build_repo_manager.repo_name))
 
 
-def infer_main_repo(project_name, local_store_path, example_commit=None):
-  """Tries to guess the main repo a project based on the Dockerfile.
+def detect_main_repo(project_name, repo_name=None, commit=None):
+  """Checks a docker image for the main repo of an OSS-Fuzz project.
 
-  NOTE: This is a fragile implementation and only works for git
+  Note: The default is to use the repo name to detect the main repo.
+
   Args:
-    project_name: The OSS-Fuzz project that you are checking the repo of
-    example_commit: A commit that is in the main repos tree
+    project_name: The name of the oss-fuzz project.
+    repo_name: The name of the main repo in an OSS-Fuzz project.
+    commit: A commit SHA that is associated with the main repo.
+    src_dir: The location of the projects source on the docker image.
+
   Returns:
-    The guessed repo url path or None on failue
+    The repo's origin, the repo's path.
   """
-  if not helper.check_project_exists(project_name):
-    return None
-  docker_path = helper.get_dockerfile_path(project_name)
-  with open(docker_path, 'r') as file_path:
-    lines = file_path.read()
-    # Use generic git format and project name to guess main repo
-    if example_commit is None:
-      repo_url = re.search(
-          r'\b(?:http|https|git)://[^ ]*' + re.escape(project_name) +
-          r'(.git)?', lines)
-      if repo_url:
-        return repo_url.group(0)
-    else:
-      # Use example commit SHA to guess main repo
-      for clone_command in re.findall('.*clone.*', lines):
-        repo_url = re.search(r'\b(?:https|http|git)://[^ ]*',
-                             clone_command).group(0)
-        print(repo_url)
-        try:
-          test_repo_manager = repo_manager.RepoManager(repo_url.rstrip(),
-                                                       local_store_path)
-          if test_repo_manager.commit_exists(example_commit):
-            return repo_url
-        except:
-          pass
-    return None
+
+  if not repo_name and not commit:
+    logging.error(
+        'Error: can not detect main repo without a repo_name or a commit.')
+    return None, None
+  if repo_name and commit:
+    logging.info(
+        'Both repo name and commit specific. Using repo name for detection.')
+
+  # Change to oss-fuzz main directory so helper.py runs correctly.
+  utils.chdir_to_root()
+  if not helper.build_image_impl(project_name):
+    logging.error('Error: building %s image failed.', project_name)
+    return None, None
+  docker_image_name = 'gcr.io/oss-fuzz/' + project_name
+  command_to_run = [
+      'docker', 'run', '--rm', '-t', docker_image_name, 'python3',
+      os.path.join('/src', 'detect_repo.py')
+  ]
+  if repo_name:
+    command_to_run.extend(['--repo_name', repo_name])
+  else:
+    command_to_run.extend(['--example_commit', commit])
+  out, _ = utils.execute(command_to_run)
+  match = re.search(r'\bDetected repo: ([^ ]+) ([^ ]+)', out.rstrip())
+  if match and match.group(1) and match.group(2):
+    return match.group(1), match.group(2)
+  return None, None
diff --git a/infra/build_specified_commit_test.py b/infra/build_specified_commit_test.py
index 77a0698..808bcc2 100644
--- a/infra/build_specified_commit_test.py
+++ b/infra/build_specified_commit_test.py
@@ -11,42 +11,28 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-"""Test the functionality of the build image from state module.
-NOTE: THIS TEST NEEDS TO BE RUN FROM THE OSS-FUZZ BASE DIR
-The will consist of the following functional tests
-  1. The inferance of the main repo for a specific project
+"""Test the functionality of the build image from commit module.
+The will consist of the following functional tests:
+  1. The inference of the main repo for a specific project.
+  2. The building of a projects fuzzers from a specific commit.
+
+IMPORTANT: This test needs to be run with root privileges.
 """
+import os
+import tempfile
 import unittest
 
 import build_specified_commit
 import helper
+import repo_manager
+import test_repos
 
-
-class BuildImageUnitTests(unittest.TestCase):
-  """Class to test the functionality of the build image from state module."""
-
-  def test_infer_main_repo(self):
-    """Tests that the main repo can be infered based on an example commit."""
-    infered_repo = build_specified_commit.infer_main_repo(
-        'curl', 'tmp', 'bc5d22c3dede2f04870c37aec9a50474c4b888ad')
-    self.assertEqual(infered_repo, 'https://github.com/curl/curl.git')
-    infered_repo = build_specified_commit.infer_main_repo('curl', 'tmp')
-    self.assertEqual(infered_repo, 'https://github.com/curl/curl.git')
-
-    infered_repo = build_specified_commit.infer_main_repo('usrsctp', 'tmp')
-    self.assertEqual(infered_repo, 'https://github.com/weinrank/usrsctp')
-    infered_repo = build_specified_commit.infer_main_repo(
-        'usrsctp', 'tmp', '4886aaa49fb90e479226fcfc3241d74208908232')
-    self.assertEqual(infered_repo, 'https://github.com/weinrank/usrsctp',
-                     '4886aaa49fb90e479226fcfc3241d74208908232')
-
-    infered_repo = build_specified_commit.infer_main_repo(
-        'not_a_project', 'tmp')
-    self.assertEqual(infered_repo, None)
+# Necessary because __file__ changes with os.chdir
+TEST_DIR_PATH = os.path.dirname(os.path.realpath(__file__))
 
 
 class BuildImageIntegrationTests(unittest.TestCase):
-  """Testing if an image can be built from different states e.g. a commit"""
+  """Testing if an image can be built from different states e.g. a commit."""
 
   def test_build_fuzzers_from_commit(self):
     """Tests if the fuzzers can build at a proper commit.
@@ -55,21 +41,65 @@
     The old commit should show the error when its fuzzers run and the new one
     should not.
     """
-    project_name = 'yara'
-    old_commit = 'f79be4f2330f4b89ea2f42e1c44ca998c59a0c0f'
-    new_commit = 'f50a39051ea8c7f10d6d8db9656658b49601caef'
-    fuzzer = 'rules_fuzzer'
-    test_data = 'infra/yara_test_data'
-    build_specified_commit.build_fuzzer_from_commit(
-        project_name, old_commit, 'tmp', sanitizer='address')
-    old_error_code = helper.reproduce_impl(project_name, fuzzer, False, [], [],
-                                           test_data)
-    build_specified_commit.build_fuzzer_from_commit(
-        project_name, new_commit, 'tmp', sanitizer='address')
-    new_error_code = helper.reproduce_impl(project_name, fuzzer, False, [], [],
-                                           test_data)
-    self.assertNotEqual(new_error_code, old_error_code)
+
+    with tempfile.TemporaryDirectory() as tmp_dir:
+      test_case = test_repos.TEST_REPOS[0]
+      test_repo_manager = repo_manager.RepoManager(
+          test_case.git_url, tmp_dir, repo_name=test_case.oss_repo_name)
+      build_data = build_specified_commit.BuildData(
+          sanitizer='address',
+          architecture='x86_64',
+          engine='libfuzzer',
+          project_name=test_case.project_name)
+
+      build_specified_commit.build_fuzzers_from_commit(test_case.old_commit,
+                                                       test_repo_manager,
+                                                       build_data)
+      old_error_code = helper.reproduce_impl(test_case.project_name,
+                                             test_case.fuzz_target, False, [],
+                                             [], test_case.test_case_path)
+      build_specified_commit.build_fuzzers_from_commit(test_case.new_commit,
+                                                       test_repo_manager,
+                                                       build_data)
+      new_error_code = helper.reproduce_impl(test_case.project_name,
+                                             test_case.fuzz_target, False, [],
+                                             [], test_case.test_case_path)
+      self.assertNotEqual(new_error_code, old_error_code)
+
+  def test_detect_main_repo_from_commit(self):
+    """Test the detect main repo function from build specific commit module."""
+    for example_repo in test_repos.TEST_REPOS:
+      repo_origin, repo_name = build_specified_commit.detect_main_repo(
+          example_repo.project_name, commit=example_repo.new_commit)
+      self.assertEqual(repo_origin, example_repo.git_url)
+      self.assertEqual(repo_name,
+                       os.path.join('/src', example_repo.oss_repo_name))
+
+    repo_origin, repo_name = build_specified_commit.detect_main_repo(
+        test_repos.INVALID_REPO.project_name,
+        test_repos.INVALID_REPO.new_commit)
+    self.assertIsNone(repo_origin)
+    self.assertIsNone(repo_name)
+
+  def test_detect_main_repo_from_name(self):
+    """Test the detect main repo function from build specific commit module."""
+    for example_repo in test_repos.TEST_REPOS:
+      repo_origin, repo_name = build_specified_commit.detect_main_repo(
+          example_repo.project_name, repo_name=example_repo.git_repo_name)
+      self.assertEqual(repo_origin, example_repo.git_url)
+      self.assertEqual(repo_name,
+                       os.path.join('/src', example_repo.oss_repo_name))
+
+    repo_origin, repo_name = build_specified_commit.detect_main_repo(
+        test_repos.INVALID_REPO.project_name,
+        test_repos.INVALID_REPO.oss_repo_name)
+    self.assertIsNone(repo_origin)
+    self.assertIsNone(repo_name)
 
 
 if __name__ == '__main__':
+
+  # Change to oss-fuzz main directory so helper.py runs correctly.
+  if os.getcwd() != os.path.dirname(TEST_DIR_PATH):
+    os.chdir(os.path.dirname(TEST_DIR_PATH))
   unittest.main()
diff --git a/infra/cifuzz/actions/Dockerfile b/infra/cifuzz/actions/Dockerfile
new file mode 100644
index 0000000..fe69d00
--- /dev/null
+++ b/infra/cifuzz/actions/Dockerfile
@@ -0,0 +1,44 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+# Docker image to run CIFuzz in.
+
+FROM ubuntu:16.04
+
+RUN apt-get update && apt-get install -y git \
+    apt-transport-https \
+    ca-certificates \
+    curl \
+    gnupg2 \
+    software-properties-common \
+    python3
+
+
+RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add - && apt-key fingerprint 0EBFCD88
+RUN add-apt-repository \
+   "deb [arch=amd64] https://download.docker.com/linux/ubuntu \
+   xenial \
+   stable"
+
+RUN apt-get update && apt-get install docker-ce docker-ce-cli containerd.io -y
+
+ENV OSS_FUZZ_ROOT=/opt/oss-fuzz
+RUN git clone https://github.com/google/oss-fuzz.git ${OSS_FUZZ_ROOT}
+
+# Copies your code file from  action repository to the container
+COPY entrypoint.py /opt/entrypoint.py
+
+# Python file to execute when the docker container starts up
+ENTRYPOINT ["python3", "/opt/entrypoint.py"]
diff --git a/infra/cifuzz/actions/action.yml b/infra/cifuzz/actions/action.yml
new file mode 100644
index 0000000..7af4bd4
--- /dev/null
+++ b/infra/cifuzz/actions/action.yml
@@ -0,0 +1,17 @@
+# action.yml
+name: 'build-fuzzers'
+description: "Builds an OSS-Fuzz project's fuzzers."
+inputs:
+  project-name:
+    description: 'Name of the corresponding OSS-Fuzz project.'
+    required: true
+  fuzz-seconds:
+    description: 'The total time allotted for fuzzing in seconds.'
+    required: true
+    default: 360
+runs:
+  using: 'docker'
+  image: 'Dockerfile'
+  env:
+    PROJECT_NAME: ${{ inputs.project-name }}
+    FUZZ_SECONDS: ${{ inputs.fuzz-seconds }}
diff --git a/infra/cifuzz/actions/entrypoint.py b/infra/cifuzz/actions/entrypoint.py
new file mode 100644
index 0000000..e680207
--- /dev/null
+++ b/infra/cifuzz/actions/entrypoint.py
@@ -0,0 +1,86 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Builds and runs specific OSS-Fuzz project's fuzzers for CI tools."""
+import logging
+import os
+import sys
+
+# pylint: disable=wrong-import-position
+sys.path.append(os.path.join(os.environ['OSS_FUZZ_ROOT'], 'infra', 'cifuzz'))
+import cifuzz
+
+# TODO: Turn default logging to INFO when CIFuzz is stable
+logging.basicConfig(
+    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+    level=logging.DEBUG)
+
+
+def main():
+  """Runs OSS-Fuzz project's fuzzers for CI tools.
+  This script is used to kick off the Github Actions CI tool. It is the
+  entrypoint  of the Dockerfile in this directory. This action can be added to
+  any OSS-Fuzz project's workflow that uses Github.
+
+  Required environment variables:
+    PROJECT_NAME: The name of OSS-Fuzz project.
+    FUZZ_TIME: The length of time in seconds that fuzzers are to be run.
+    GITHUB_REPOSITORY: The name of the Github repo that called this script.
+    GITHUB_SHA: The commit SHA that triggered this script.
+    GITHUB_REF: The pull request reference that triggered this script.
+    GITHUB_EVENT_NAME: The name of the hook event that triggered this script.
+
+  Returns:
+    0 on success or 1 on Failure.
+  """
+  oss_fuzz_project_name = os.environ.get('PROJECT_NAME')
+  fuzz_seconds = int(os.environ.get('FUZZ_SECONDS', 360))
+  github_repo_name = os.path.basename(os.environ.get('GITHUB_REPOSITORY'))
+  pr_ref = os.environ.get('GITHUB_REF')
+  commit_sha = os.environ.get('GITHUB_SHA')
+  event = os.environ.get('GITHUB_EVENT_NAME')
+
+  # Get the shared volume directory and create required directorys.
+  workspace = os.environ.get('GITHUB_WORKSPACE')
+  if not workspace:
+    logging.error('This script needs to be run in the Github action context.')
+    return 1
+
+  if event == 'push' and not cifuzz.build_fuzzers(
+      oss_fuzz_project_name, github_repo_name, workspace,
+      commit_sha=commit_sha):
+    logging.error('Error building fuzzers for project %s with commit %s.',
+                  oss_fuzz_project_name, commit_sha)
+    return 1
+  if event == 'pull_request' and not cifuzz.build_fuzzers(
+      oss_fuzz_project_name, github_repo_name, workspace, pr_ref=pr_ref):
+    logging.error('Error building fuzzers for project %s with pull request %s.',
+                  oss_fuzz_project_name, pr_ref)
+    return 1
+
+  # Run the specified project's fuzzers from the build.
+  run_status, bug_found = cifuzz.run_fuzzers(oss_fuzz_project_name,
+                                             fuzz_seconds, workspace)
+  if not run_status:
+    logging.error('Error occured while running fuzzers for project %s.',
+                  oss_fuzz_project_name)
+    return 1
+  if bug_found:
+    logging.info('Bug found.')
+    # Return 2 when a bug was found by a fuzzer causing the CI to fail.
+    return 2
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/infra/cifuzz/cifuzz.py b/infra/cifuzz/cifuzz.py
new file mode 100644
index 0000000..ce7586e
--- /dev/null
+++ b/infra/cifuzz/cifuzz.py
@@ -0,0 +1,169 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Module used by CI tools in order to interact with fuzzers.
+This module helps CI tools do the following:
+  1. Build fuzzers.
+  2. Run fuzzers.
+Eventually it will be used to help CI tools determine which fuzzers to run.
+"""
+
+import logging
+import os
+import shutil
+import sys
+
+import fuzz_target
+
+# pylint: disable=wrong-import-position
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+import build_specified_commit
+import helper
+import repo_manager
+import utils
+
+# TODO: Turn default logging to WARNING when CIFuzz is stable
+logging.basicConfig(
+    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+    level=logging.DEBUG)
+
+
+def build_fuzzers(project_name,
+                  project_repo_name,
+                  workspace,
+                  pr_ref=None,
+                  commit_sha=None):
+  """Builds all of the fuzzers for a specific OSS-Fuzz project.
+
+  Args:
+    project_name: The name of the OSS-Fuzz project being built.
+    project_repo_name: The name of the projects repo.
+    workspace: The location in a shared volume to store a git repo and build
+      artifacts.
+    pr_ref: The pull request reference to be built.
+    commit_sha: The commit sha for the project to be built at.
+
+  Returns:
+    True if build succeeded or False on failure.
+  """
+  # Validate inputs.
+  assert pr_ref or commit_sha
+  if not os.path.exists(workspace):
+    logging.error('Invalid workspace: %s.', workspace)
+    return False
+
+  git_workspace = os.path.join(workspace, 'storage')
+  os.makedirs(git_workspace, exist_ok=True)
+  out_dir = os.path.join(workspace, 'out')
+  os.makedirs(out_dir, exist_ok=True)
+
+  # Detect repo information.
+  inferred_url, oss_fuzz_repo_path = build_specified_commit.detect_main_repo(
+      project_name, repo_name=project_repo_name)
+  if not inferred_url or not oss_fuzz_repo_path:
+    logging.error('Could not detect repo from project %s.', project_name)
+    return False
+  src_in_docker = os.path.dirname(oss_fuzz_repo_path)
+  oss_fuzz_repo_name = os.path.basename(oss_fuzz_repo_path)
+
+  # Checkout projects repo in the shared volume.
+  build_repo_manager = repo_manager.RepoManager(inferred_url,
+                                                git_workspace,
+                                                repo_name=oss_fuzz_repo_name)
+  try:
+    if pr_ref:
+      build_repo_manager.checkout_pr(pr_ref)
+    else:
+      build_repo_manager.checkout_commit(commit_sha)
+  except RuntimeError:
+    logging.error('Can not check out requested state.')
+    return False
+  except ValueError:
+    logging.error('Invalid commit SHA requested %s.', commit_sha)
+    return False
+
+  # Build Fuzzers using docker run.
+  command = [
+      '--cap-add', 'SYS_PTRACE', '-e', 'FUZZING_ENGINE=libfuzzer', '-e',
+      'SANITIZER=address', '-e', 'ARCHITECTURE=x86_64'
+  ]
+  container = utils.get_container_name()
+  if container:
+    command += ['-e', 'OUT=' + out_dir, '--volumes-from', container]
+    bash_command = 'rm -rf {0} && cp -r {1} {2} && compile'.format(
+        os.path.join(src_in_docker, oss_fuzz_repo_name, '*'),
+        os.path.join(git_workspace, oss_fuzz_repo_name), src_in_docker)
+  else:
+    command += [
+        '-e', 'OUT=' + '/out', '-v',
+        '%s:%s' % (os.path.join(git_workspace, oss_fuzz_repo_name),
+                   os.path.join(src_in_docker, oss_fuzz_repo_name)), '-v',
+        '%s:%s' % (out_dir, '/out')
+    ]
+    bash_command = 'compile'
+
+  command.extend([
+      'gcr.io/oss-fuzz/' + project_name,
+      '/bin/bash',
+      '-c',
+  ])
+  command.append(bash_command)
+  if helper.docker_run(command):
+    logging.error('Building fuzzers failed.')
+    return False
+  return True
+
+
+def run_fuzzers(project_name, fuzz_seconds, workspace):
+  """Runs all fuzzers for a specific OSS-Fuzz project.
+
+  Args:
+    project_name: The name of the OSS-Fuzz project being built.
+    fuzz_seconds: The total time allotted for fuzzing.
+    workspace: The location in a shared volume to store a git repo and build
+      artifacts.
+
+  Returns:
+    (True if run was successful, True if bug was found).
+  """
+  # Validate inputs.
+  if not os.path.exists(workspace):
+    logging.error('Invalid workspace: %s.', workspace)
+    return False, False
+  out_dir = os.path.join(workspace, 'out')
+  if not fuzz_seconds or fuzz_seconds < 1:
+    logging.error('Fuzz_seconds argument must be greater than 1, but was: %s.',
+                  format(fuzz_seconds))
+    return False, False
+
+  # Get fuzzer information.
+  fuzzer_paths = utils.get_fuzz_targets(out_dir)
+  if not fuzzer_paths:
+    logging.error('No fuzzers were found in out directory: %s.',
+                  format(out_dir))
+    return False, False
+  fuzz_seconds_per_target = fuzz_seconds // len(fuzzer_paths)
+
+  # Run fuzzers for alotted time.
+  for fuzzer_path in fuzzer_paths:
+    target = fuzz_target.FuzzTarget(project_name, fuzzer_path,
+                                    fuzz_seconds_per_target, out_dir)
+    test_case, stack_trace = target.fuzz()
+    if not test_case or not stack_trace:
+      logging.info('Fuzzer %s, finished running.', target.target_name)
+    else:
+      logging.info('Fuzzer %s, detected error: %s.', target.target_name,
+                   stack_trace)
+      shutil.move(test_case, os.path.join(out_dir, 'testcase'))
+      return True, True
+  return True, False
diff --git a/infra/cifuzz/cifuzz_test.py b/infra/cifuzz/cifuzz_test.py
new file mode 100644
index 0000000..7c17b6f
--- /dev/null
+++ b/infra/cifuzz/cifuzz_test.py
@@ -0,0 +1,158 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Test the functionality of the cifuzz module's functions:
+1. Building fuzzers.
+2. Running fuzzers.
+"""
+
+import os
+import sys
+import tempfile
+import unittest
+
+# pylint: disable=wrong-import-position
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+import cifuzz
+
+# NOTE: This integration test relies on
+# https://github.com/google/oss-fuzz/tree/master/projects/example project
+EXAMPLE_PROJECT = 'example'
+
+
+class BuildFuzzersIntegrationTest(unittest.TestCase):
+  """Test build_fuzzers function in the utils module."""
+
+  def test_valid_commit(self):
+    """Test building fuzzers with valid inputs."""
+    with tempfile.TemporaryDirectory() as tmp_dir:
+      out_path = os.path.join(tmp_dir, 'out')
+      os.mkdir(out_path)
+      self.assertTrue(
+          cifuzz.build_fuzzers(
+              EXAMPLE_PROJECT,
+              'oss-fuzz',
+              tmp_dir,
+              commit_sha='0b95fe1039ed7c38fea1f97078316bfc1030c523'))
+      self.assertTrue(os.path.exists(os.path.join(out_path, 'do_stuff_fuzzer')))
+
+  def test_valid_pull_request(self):
+    """Test building fuzzers with valid pull request."""
+    with tempfile.TemporaryDirectory() as tmp_dir:
+      out_path = os.path.join(tmp_dir, 'out')
+      os.mkdir(out_path)
+      self.assertTrue(
+          cifuzz.build_fuzzers(EXAMPLE_PROJECT,
+                               'oss-fuzz',
+                               tmp_dir,
+                               pr_ref='refs/pull/3310/merge'))
+      self.assertTrue(os.path.exists(os.path.join(out_path, 'do_stuff_fuzzer')))
+
+  def test_invalid_pull_request(self):
+    """Test building fuzzers with invalid pull request."""
+    with tempfile.TemporaryDirectory() as tmp_dir:
+      out_path = os.path.join(tmp_dir, 'out')
+      os.mkdir(out_path)
+      self.assertFalse(
+          cifuzz.build_fuzzers(EXAMPLE_PROJECT,
+                               'oss-fuzz',
+                               tmp_dir,
+                               pr_ref='ref-1/merge'))
+
+  def test_invalid_project_name(self):
+    """Test building fuzzers with invalid project name."""
+    with tempfile.TemporaryDirectory() as tmp_dir:
+      self.assertFalse(
+          cifuzz.build_fuzzers(
+              'not_a_valid_project',
+              'oss-fuzz',
+              tmp_dir,
+              commit_sha='0b95fe1039ed7c38fea1f97078316bfc1030c523'))
+
+  def test_invalid_repo_name(self):
+    """Test building fuzzers with invalid repo name."""
+    with tempfile.TemporaryDirectory() as tmp_dir:
+      self.assertFalse(
+          cifuzz.build_fuzzers(
+              EXAMPLE_PROJECT,
+              'not-real-repo',
+              tmp_dir,
+              commit_sha='0b95fe1039ed7c38fea1f97078316bfc1030c523'))
+
+  def test_invalid_commit_sha(self):
+    """Test building fuzzers with invalid commit SHA."""
+    with tempfile.TemporaryDirectory() as tmp_dir:
+      with self.assertRaises(AssertionError):
+        cifuzz.build_fuzzers(EXAMPLE_PROJECT,
+                             'oss-fuzz',
+                             tmp_dir,
+                             commit_sha='')
+
+  def test_invalid_workspace(self):
+    """Test building fuzzers with invalid workspace."""
+    self.assertFalse(
+        cifuzz.build_fuzzers(
+            EXAMPLE_PROJECT,
+            'oss-fuzz',
+            'not/a/dir',
+            commit_sha='0b95fe1039ed7c38fea1f97078316bfc1030c523',
+        ))
+
+
+class RunFuzzersIntegrationTest(unittest.TestCase):
+  """Test build_fuzzers function in the utils module."""
+
+  def test_valid(self):
+    """Test run_fuzzers with a valid build."""
+    with tempfile.TemporaryDirectory() as tmp_dir:
+      out_path = os.path.join(tmp_dir, 'out')
+      os.mkdir(out_path)
+      self.assertTrue(
+          cifuzz.build_fuzzers(
+              EXAMPLE_PROJECT,
+              'oss-fuzz',
+              tmp_dir,
+              commit_sha='0b95fe1039ed7c38fea1f97078316bfc1030c523'))
+      self.assertTrue(os.path.exists(os.path.join(out_path, 'do_stuff_fuzzer')))
+      run_success, bug_found = cifuzz.run_fuzzers(EXAMPLE_PROJECT, 5, tmp_dir)
+    self.assertTrue(run_success)
+    self.assertTrue(bug_found)
+
+  def test_invlid_build(self):
+    """Test run_fuzzers with an invalid build."""
+    with tempfile.TemporaryDirectory() as tmp_dir:
+      out_path = os.path.join(tmp_dir, 'out')
+      os.mkdir(out_path)
+      run_success, bug_found = cifuzz.run_fuzzers(EXAMPLE_PROJECT, 5, tmp_dir)
+    self.assertFalse(run_success)
+    self.assertFalse(bug_found)
+
+  def test_invalid_fuzz_seconds(self):
+    """Tests run_fuzzers with an invalid fuzz seconds."""
+    with tempfile.TemporaryDirectory() as tmp_dir:
+      out_path = os.path.join(tmp_dir, 'out')
+      os.mkdir(out_path)
+      run_success, bug_found = cifuzz.run_fuzzers(EXAMPLE_PROJECT, 0, tmp_dir)
+    self.assertFalse(run_success)
+    self.assertFalse(bug_found)
+
+  def test_invalid_out_dir(self):
+    """Tests run_fuzzers with an invalid out directory."""
+    run_success, bug_found = cifuzz.run_fuzzers(EXAMPLE_PROJECT, 5,
+                                                'not/a/valid/path')
+    self.assertFalse(run_success)
+    self.assertFalse(bug_found)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/infra/cifuzz/fuzz_target.py b/infra/cifuzz/fuzz_target.py
new file mode 100644
index 0000000..9272bd2
--- /dev/null
+++ b/infra/cifuzz/fuzz_target.py
@@ -0,0 +1,108 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""A module to handle running a fuzz target for a specified amount of time."""
+import logging
+import os
+import re
+import subprocess
+import sys
+
+# pylint: disable=wrong-import-position
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+import utils
+
+# TODO: Turn default logging to WARNING when CIFuzz is stable
+logging.basicConfig(
+    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+    level=logging.DEBUG)
+
+
+class FuzzTarget:
+  """A class to manage a single fuzz target.
+
+  Attributes:
+    project_name: The name of the OSS-Fuzz project the target is associated.
+    target_name: The name of the fuzz target.
+    duration: The length of time in seconds that the target should run.
+    target_path: The location of the fuzz target binary.
+  """
+
+  def __init__(self, project_name, target_path, duration, out_dir):
+    """Represents a single fuzz target.
+
+    Args:
+      project_name: The OSS-Fuzz project of this target.
+      target_path: The location of the fuzz target binary.
+      duration: The length of time  in seconds the target should run.
+      out_dir: The location of where the output from crashes should be stored.
+    """
+    self.target_name = os.path.basename(target_path)
+    self.duration = duration
+    self.project_name = project_name
+    self.target_path = target_path
+    self.out_dir = out_dir
+
+  def fuzz(self):
+    """Starts the fuzz target run for the length of time specified by duration.
+
+    Returns:
+      (test_case, stack trace) if found or (None, None) on timeout or error.
+    """
+    logging.info('Fuzzer %s, started.', self.target_name)
+    docker_container = utils.get_container_name()
+    command = ['docker', 'run', '--rm', '--privileged']
+    if docker_container:
+      command += [
+          '--volumes-from', docker_container, '-e', 'OUT=' + self.out_dir
+      ]
+    else:
+      command += ['-v', '%s:%s' % (self.out_dir, '/out')]
+
+    command += [
+        '-e', 'FUZZING_ENGINE=libfuzzer', '-e', 'SANITIZER=address', '-e',
+        'RUN_FUZZER_MODE=interactive', 'gcr.io/oss-fuzz-base/base-runner',
+        'bash', '-c', 'run_fuzzer {0}'.format(self.target_name)
+    ]
+    logging.info('Running command: %s', ' '.join(command))
+    process = subprocess.Popen(command,
+                               stdout=subprocess.PIPE,
+                               stderr=subprocess.PIPE)
+
+    try:
+      _, err = process.communicate(timeout=self.duration)
+    except subprocess.TimeoutExpired:
+      logging.info('Fuzzer %s, finished with timeout.', self.target_name)
+      return None, None
+
+    logging.info('Fuzzer %s, ended before timeout.', self.target_name)
+    err_str = err.decode('ascii')
+    test_case = self.get_test_case(err_str)
+    if not test_case:
+      logging.error('No test case found in stack trace.', file=sys.stderr)
+      return None, None
+    return test_case, err_str
+
+  def get_test_case(self, error_string):
+    """Gets the file from a fuzzer run stack trace.
+
+    Args:
+      error_string: The stack trace string containing the error.
+
+    Returns:
+      The error test case or None if not found.
+    """
+    match = re.search(r'\bTest unit written to \.\/([^\s]+)', error_string)
+    if match:
+      return os.path.join(self.out_dir, match.group(1))
+    return None
diff --git a/infra/dev-requirements.txt b/infra/dev-requirements.txt
new file mode 100644
index 0000000..55e3b50
--- /dev/null
+++ b/infra/dev-requirements.txt
@@ -0,0 +1,5 @@
+# Requirements for submitting code changes to infra/ (needed by presubmit.py).
+pylint==2.4.4
+yapf==0.28.0
+PyYAML==5.1
+
diff --git a/infra/gcb/build_and_run_coverage.py b/infra/gcb/build_and_run_coverage.py
index 7c6ea5b..b94fe35 100644
--- a/infra/gcb/build_and_run_coverage.py
+++ b/infra/gcb/build_and_run_coverage.py
@@ -11,41 +11,31 @@
 import sys
 import urlparse
 
+import build_lib
 import build_project
 
 SANITIZER = 'coverage'
 CONFIGURATION = ['FUZZING_ENGINE=libfuzzer', 'SANITIZER=%s' % SANITIZER]
 PLATFORM = 'linux'
 
-# Where corpus backups can be downloaded from.
-CORPUS_BACKUP_URL = ('/{project}-backup.clusterfuzz-external.appspot.com/'
-                     'corpus/libFuzzer/{fuzzer}/latest.zip')
-
-# Cloud Builder has a limit of 100 build steps and 100 arguments for each step.
-CORPUS_DOWNLOAD_BATCH_SIZE = 100
-
 COVERAGE_BUILD_TAG = 'coverage'
 
-# Needed for reading public target.list.* files.
-GCS_URL_BASENAME = 'https://storage.googleapis.com/'
-
 # Where code coverage reports need to be uploaded to.
 COVERAGE_BUCKET_NAME = 'oss-fuzz-coverage'
 
 # Link to the code coverage report in HTML format.
-HTML_REPORT_URL_FORMAT = (
-    GCS_URL_BASENAME + COVERAGE_BUCKET_NAME +
-    '/{project}/reports/{date}/{platform}/index.html')
+HTML_REPORT_URL_FORMAT = (build_lib.GCS_URL_BASENAME + COVERAGE_BUCKET_NAME +
+                          '/{project}/reports/{date}/{platform}/index.html')
 
 # This is needed for ClusterFuzz to pick up the most recent reports data.
-LATEST_REPORT_INFO_URL = (
-    '/' + COVERAGE_BUCKET_NAME + '/latest_report_info/{project}.json')
+LATEST_REPORT_INFO_URL = ('/' + COVERAGE_BUCKET_NAME +
+                          '/latest_report_info/{project}.json')
 
 # Link where to upload code coverage report files to.
 UPLOAD_URL_FORMAT = 'gs://' + COVERAGE_BUCKET_NAME + '/{project}/{type}/{date}'
 
-# TODO(#2817): Support code coverage for Go projects.
-GO_FUZZ_BUILD = 'go-fuzz-build -libfuzzer'
+# Languages from project.yaml that have code coverage support.
+LANGUAGES_WITH_COVERAGE_SUPPORT = ['c', 'cpp']
 
 
 def skip_build(message):
@@ -58,8 +48,7 @@
 
 
 def usage():
-  sys.stderr.write(
-    "Usage: " + sys.argv[0] + " <project_dir>\n")
+  sys.stderr.write("Usage: " + sys.argv[0] + " <project_dir>\n")
   exit(1)
 
 
@@ -70,14 +59,13 @@
     skip_build('Project "%s" is disabled.' % project_name)
 
   build_script_path = os.path.join(project_dir, 'build.sh')
-  with open(build_script_path) as fh:
-    if GO_FUZZ_BUILD in fh.read():
-      skip_build('Project "%s" uses go-fuzz, coverage is not supported yet.' %
-                 project_name)
-
-  fuzz_targets = get_targets_list(project_name)
-  if not fuzz_targets:
-    skip_build('No fuzz targets found for project "%s".' % project_name)
+  if os.path.exists(build_script_path):
+    with open(build_script_path) as fh:
+      if project_yaml['language'] not in LANGUAGES_WITH_COVERAGE_SUPPORT:
+        skip_build(('Project "{project_name}" is written in "{language}", '
+                    'coverage is not supported yet.').format(
+                        project_name=project_name,
+                        language=project_yaml['language']))
 
   dockerfile_path = os.path.join(project_dir, 'Dockerfile')
   name = project_yaml['name']
@@ -87,7 +75,8 @@
   build_steps = [
       {
           'args': [
-              'clone', 'https://github.com/google/oss-fuzz.git',
+              'clone',
+              'https://github.com/google/oss-fuzz.git',
           ],
           'name': 'gcr.io/cloud-builders/git',
       },
@@ -102,8 +91,7 @@
           'dir': 'oss-fuzz/projects/' + name,
       },
       {
-          'name':
-              image,
+          'name': image,
           'args': [
               'bash', '-c',
               'srcmap > /workspace/srcmap.json && cat /workspace/srcmap.json'
@@ -120,170 +108,166 @@
   if not workdir:
     workdir = '/src'
 
+  failure_msg = ('*' * 80 + '\nCoverage build failed.\nTo reproduce, run:\n'
+                 'python infra/helper.py build_image {name}\n'
+                 'python infra/helper.py build_fuzzers --sanitizer coverage '
+                 '{name}\n' + '*' * 80).format(name=name)
+
   # Compilation step.
-  build_steps.append(
-      {
-          'name': image,
-          'env': env,
-          'args': [
-              'bash',
-              '-c',
-               # Remove /out to make sure there are non instrumented binaries.
-               # `cd /src && cd {workdir}` (where {workdir} is parsed from the
-               # Dockerfile). Container Builder overrides our workdir so we need
-               # to add this step to set it back.
-               'rm -r /out && cd /src && cd {1} && mkdir -p {0} && compile'.format(out, workdir),
-          ],
-      }
-  )
+  build_steps.append({
+      'name':
+          image,
+      'env':
+          env,
+      'args': [
+          'bash',
+          '-c',
+          # Remove /out to make sure there are non instrumented binaries.
+          # `cd /src && cd {workdir}` (where {workdir} is parsed from the
+          # Dockerfile). Container Builder overrides our workdir so we need
+          # to add this step to set it back.
+          ('rm -r /out && cd /src && cd {workdir} && mkdir -p {out} && '
+           'compile || (echo "{failure_msg}" && false)'
+          ).format(workdir=workdir, out=out, failure_msg=failure_msg),
+      ],
+  })
 
-  # Split fuzz targets into batches of CORPUS_DOWNLOAD_BATCH_SIZE.
-  for i in xrange(0,  len(fuzz_targets), CORPUS_DOWNLOAD_BATCH_SIZE):
-    download_corpus_args = []
-    for binary_name in fuzz_targets[i : i+CORPUS_DOWNLOAD_BATCH_SIZE]:
-      qualified_name = binary_name
-      qualified_name_prefix = '%s_' % project_name
-      if not binary_name.startswith(qualified_name_prefix):
-        qualified_name = qualified_name_prefix + binary_name
+  download_corpora_step = build_lib.download_corpora_step(project_name)
+  if not download_corpora_step:
+    skip_build("Skipping code coverage build for %s.\n" % project_name)
 
-      url = build_project.get_signed_url(
-          CORPUS_BACKUP_URL.format(
-              project=project_name, fuzzer=qualified_name),
-          method='GET')
+  build_steps.append(download_corpora_step)
 
-      corpus_archive_path = os.path.join('/corpus', binary_name + '.zip')
-      download_corpus_args.append('%s %s' % (corpus_archive_path, url))
-
-    # Download corpus.
-    build_steps.append(
-        {
-            'name': 'gcr.io/oss-fuzz-base/base-runner',
-            'entrypoint': 'download_corpus',
-            'args': download_corpus_args,
-            'volumes': [{'name': 'corpus', 'path': '/corpus'}],
-        }
-    )
+  failure_msg = ('*' * 80 + '\nCode coverage report generation failed.\n'
+                 'To reproduce, run:\n'
+                 'python infra/helper.py build_image {name}\n'
+                 'python infra/helper.py build_fuzzers --sanitizer coverage '
+                 '{name}\n'
+                 'python infra/helper.py coverage {name}\n' +
+                 '*' * 80).format(name=name)
 
   # Unpack the corpus and run coverage script.
-  build_steps.append(
-      {
-          'name': 'gcr.io/oss-fuzz-base/base-runner',
-          'env': env + [
+  build_steps.append({
+      'name':
+          'gcr.io/oss-fuzz-base/base-runner',
+      'env':
+          env + [
               'HTTP_PORT=',
-              'COVERAGE_EXTRA_ARGS=%s' % project_yaml['coverage_extra_args'].strip()
+              'COVERAGE_EXTRA_ARGS=%s' %
+              project_yaml['coverage_extra_args'].strip()
           ],
-          'args': [
-              'bash',
-              '-c',
-              'for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*}; done && coverage',
-          ],
-          'volumes': [{'name': 'corpus', 'path': '/corpus'}],
-      }
-  )
+      'args': [
+          'bash', '-c',
+          ('for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*} || ('
+           'echo "Failed to unpack the corpus for $(basename ${f%%.*}). '
+           'This usually means that corpus backup for a particular fuzz '
+           'target does not exist. If a fuzz target was added in the last '
+           '24 hours, please wait one more day. Otherwise, something is '
+           'wrong with the fuzz target or the infrastructure, and corpus '
+           'pruning task does not finish successfully." && exit 1'
+           '); done && coverage || (echo "' + failure_msg + '" && false)')
+      ],
+      'volumes': [{
+          'name': 'corpus',
+          'path': '/corpus'
+      }],
+  })
 
   # Upload the report.
-  upload_report_url = UPLOAD_URL_FORMAT.format(
-      project=project_name, type='reports', date=report_date)
-  build_steps.append(
-      {
-          'name': 'gcr.io/cloud-builders/gsutil',
-          'args': [
-              '-m', 'cp', '-r',
-              os.path.join(out, 'report'),
-              upload_report_url,
-          ],
-      }
-  )
+  upload_report_url = UPLOAD_URL_FORMAT.format(project=project_name,
+                                               type='reports',
+                                               date=report_date)
+  build_steps.append({
+      'name':
+          'gcr.io/cloud-builders/gsutil',
+      'args': [
+          '-m',
+          'cp',
+          '-r',
+          os.path.join(out, 'report'),
+          upload_report_url,
+      ],
+  })
 
   # Upload the fuzzer stats.
-  upload_fuzzer_stats_url = UPLOAD_URL_FORMAT.format(
-      project=project_name, type='fuzzer_stats', date=report_date)
-  build_steps.append(
-      {
-          'name': 'gcr.io/cloud-builders/gsutil',
-          'args': [
-              '-m', 'cp', '-r',
-              os.path.join(out, 'fuzzer_stats'),
-              upload_fuzzer_stats_url,
-          ],
-      }
-  )
+  upload_fuzzer_stats_url = UPLOAD_URL_FORMAT.format(project=project_name,
+                                                     type='fuzzer_stats',
+                                                     date=report_date)
+  build_steps.append({
+      'name':
+          'gcr.io/cloud-builders/gsutil',
+      'args': [
+          '-m',
+          'cp',
+          '-r',
+          os.path.join(out, 'fuzzer_stats'),
+          upload_fuzzer_stats_url,
+      ],
+  })
 
   # Upload the fuzzer logs.
-  build_steps.append(
-      {
-          'name': 'gcr.io/cloud-builders/gsutil',
-          'args': [
-              '-m', 'cp', '-r',
-              os.path.join(out, 'logs'),
-              UPLOAD_URL_FORMAT.format(
-                  project=project_name, type='logs', date=report_date),
-          ],
-      }
-  )
+  build_steps.append({
+      'name':
+          'gcr.io/cloud-builders/gsutil',
+      'args': [
+          '-m',
+          'cp',
+          '-r',
+          os.path.join(out, 'logs'),
+          UPLOAD_URL_FORMAT.format(project=project_name,
+                                   type='logs',
+                                   date=report_date),
+      ],
+  })
 
   # Upload srcmap.
-  srcmap_upload_url = UPLOAD_URL_FORMAT.format(
-      project=project_name, type='srcmap', date=report_date)
+  srcmap_upload_url = UPLOAD_URL_FORMAT.format(project=project_name,
+                                               type='srcmap',
+                                               date=report_date)
   srcmap_upload_url = srcmap_upload_url.rstrip('/') + '.json'
-  build_steps.append(
-      {
-          'name': 'gcr.io/cloud-builders/gsutil',
-          'args': [
-              'cp',
-              '/workspace/srcmap.json',
-              srcmap_upload_url,
-          ],
-      }
-  )
+  build_steps.append({
+      'name': 'gcr.io/cloud-builders/gsutil',
+      'args': [
+          'cp',
+          '/workspace/srcmap.json',
+          srcmap_upload_url,
+      ],
+  })
 
   # Update the latest report information file for ClusterFuzz.
-  latest_report_info_url = build_project.get_signed_url(
+  latest_report_info_url = build_lib.get_signed_url(
       LATEST_REPORT_INFO_URL.format(project=project_name),
       method='PUT',
       content_type='application/json')
-  latest_report_info_body = json.dumps(
-      {
-          'fuzzer_stats_dir': upload_fuzzer_stats_url,
-          'html_report_url': HTML_REPORT_URL_FORMAT.format(
-              project=project_name, date=report_date, platform=PLATFORM),
-          'report_date': report_date,
-          'report_summary_path': os.path.join(
-              upload_report_url, PLATFORM, 'summary.json'),
-      }
-  )
+  latest_report_info_body = json.dumps({
+      'fuzzer_stats_dir':
+          upload_fuzzer_stats_url,
+      'html_report_url':
+          HTML_REPORT_URL_FORMAT.format(project=project_name,
+                                        date=report_date,
+                                        platform=PLATFORM),
+      'report_date':
+          report_date,
+      'report_summary_path':
+          os.path.join(upload_report_url, PLATFORM, 'summary.json'),
+  })
 
-  build_steps.append(
-      {
-          'name': 'gcr.io/cloud-builders/curl',
-          'args': [
-              '-H', 'Content-Type: application/json',
-              '-X', 'PUT',
-              '-d', latest_report_info_body,
-              latest_report_info_url,
-          ],
-      }
-  )
+  build_steps.append({
+      'name':
+          'gcr.io/cloud-builders/curl',
+      'args': [
+          '-H',
+          'Content-Type: application/json',
+          '-X',
+          'PUT',
+          '-d',
+          latest_report_info_body,
+          latest_report_info_url,
+      ],
+  })
   return build_steps
 
 
-def get_targets_list(project_name):
-  # libFuzzer ASan is the default configuration, get list of targets from it.
-  url = build_project.get_targets_list_url(
-      build_project.ENGINE_INFO['libfuzzer'].upload_bucket,
-      project_name,
-      'address')
-
-  url = urlparse.urljoin(GCS_URL_BASENAME , url)
-  r = requests.get(url)
-  if not r.status_code == 200:
-    sys.stderr.write('Failed to get list of targets from "%s".\n' % url)
-    sys.stderr.write('Status code: %d \t\tText:\n%s\n' % (r.status_code, r.text))
-    return None
-
-  return r.text.split()
-
-
 def main():
   if len(sys.argv) != 2:
     usage()
diff --git a/infra/gcb/build_lib.py b/infra/gcb/build_lib.py
new file mode 100644
index 0000000..d350873
--- /dev/null
+++ b/infra/gcb/build_lib.py
@@ -0,0 +1,134 @@
+"""Utility module for Google Cloud Build scripts."""
+import base64
+import collections
+import os
+import requests
+import sys
+import time
+import urllib
+import urlparse
+
+from oauth2client.service_account import ServiceAccountCredentials
+
+BUILD_TIMEOUT = 12 * 60 * 60
+
+# Needed for reading public target.list.* files.
+GCS_URL_BASENAME = 'https://storage.googleapis.com/'
+
+GCS_UPLOAD_URL_FORMAT = '/{0}/{1}/{2}'
+
+# Where corpus backups can be downloaded from.
+CORPUS_BACKUP_URL = ('/{project}-backup.clusterfuzz-external.appspot.com/'
+                     'corpus/libFuzzer/{fuzzer}/latest.zip')
+
+# Cloud Builder has a limit of 100 build steps and 100 arguments for each step.
+CORPUS_DOWNLOAD_BATCH_SIZE = 100
+
+TARGETS_LIST_BASENAME = 'targets.list'
+
+EngineInfo = collections.namedtuple(
+    'EngineInfo',
+    ['upload_bucket', 'supported_sanitizers', 'supported_architectures'])
+
+ENGINE_INFO = {
+    'libfuzzer':
+        EngineInfo(upload_bucket='clusterfuzz-builds',
+                   supported_sanitizers=['address', 'memory', 'undefined'],
+                   supported_architectures=['x86_64', 'i386']),
+    'afl':
+        EngineInfo(upload_bucket='clusterfuzz-builds-afl',
+                   supported_sanitizers=['address'],
+                   supported_architectures=['x86_64']),
+    'honggfuzz':
+        EngineInfo(upload_bucket='clusterfuzz-builds-honggfuzz',
+                   supported_sanitizers=['address', 'memory', 'undefined'],
+                   supported_architectures=['x86_64']),
+    'dataflow':
+        EngineInfo(upload_bucket='clusterfuzz-builds-dataflow',
+                   supported_sanitizers=['dataflow'],
+                   supported_architectures=['x86_64']),
+    'none':
+        EngineInfo(upload_bucket='clusterfuzz-builds-no-engine',
+                   supported_sanitizers=['address'],
+                   supported_architectures=['x86_64']),
+}
+
+
+def get_targets_list_filename(sanitizer):
+  return TARGETS_LIST_BASENAME + '.' + sanitizer
+
+
+def get_targets_list_url(bucket, project, sanitizer):
+  filename = get_targets_list_filename(sanitizer)
+  url = GCS_UPLOAD_URL_FORMAT.format(bucket, project, filename)
+  return url
+
+
+def _get_targets_list(project_name):
+  # libFuzzer ASan is the default configuration, get list of targets from it.
+  url = get_targets_list_url(ENGINE_INFO['libfuzzer'].upload_bucket,
+                             project_name, 'address')
+
+  url = urlparse.urljoin(GCS_URL_BASENAME, url)
+  response = requests.get(url)
+  if not response.status_code == 200:
+    sys.stderr.write('Failed to get list of targets from "%s".\n' % url)
+    sys.stderr.write('Status code: %d \t\tText:\n%s\n' %
+                     (response.status_code, response.text))
+    return None
+
+  return response.text.split()
+
+
+def get_signed_url(path, method='PUT', content_type=''):
+  timestamp = int(time.time() + BUILD_TIMEOUT)
+  blob = '{0}\n\n{1}\n{2}\n{3}'.format(method, content_type, timestamp, path)
+
+  creds = ServiceAccountCredentials.from_json_keyfile_name(
+      os.environ['GOOGLE_APPLICATION_CREDENTIALS'])
+  client_id = creds.service_account_email
+  signature = base64.b64encode(creds.sign_blob(blob)[1])
+  values = {
+      'GoogleAccessId': client_id,
+      'Expires': timestamp,
+      'Signature': signature,
+  }
+
+  return ('https://storage.googleapis.com{0}?'.format(path) +
+          urllib.urlencode(values))
+
+
+def download_corpora_step(project_name):
+  """Returns a GCB step for downloading corpora backups for the given project.
+  """
+  fuzz_targets = _get_targets_list(project_name)
+  if not fuzz_targets:
+    sys.stderr.write('No fuzz targets found for project "%s".\n' % project_name)
+    return None
+
+  # Split fuzz targets into batches of CORPUS_DOWNLOAD_BATCH_SIZE.
+  for i in range(0, len(fuzz_targets), CORPUS_DOWNLOAD_BATCH_SIZE):
+    download_corpus_args = []
+    for binary_name in fuzz_targets[i:i + CORPUS_DOWNLOAD_BATCH_SIZE]:
+      qualified_name = binary_name
+      qualified_name_prefix = '%s_' % project_name
+      if not binary_name.startswith(qualified_name_prefix):
+        qualified_name = qualified_name_prefix + binary_name
+
+      url = get_signed_url(CORPUS_BACKUP_URL.format(project=project_name,
+                                                    fuzzer=qualified_name),
+                           method='GET')
+
+      corpus_archive_path = os.path.join('/corpus', binary_name + '.zip')
+      download_corpus_args.append('%s %s' % (corpus_archive_path, url))
+
+    step = {
+        'name': 'gcr.io/oss-fuzz-base/base-runner',
+        'entrypoint': 'download_corpus',
+        'args': download_corpus_args,
+        'volumes': [{
+            'name': 'corpus',
+            'path': '/corpus'
+        }],
+    }
+    return step
diff --git a/infra/gcb/build_project.py b/infra/gcb/build_project.py
index fcdb1a0..f45b099 100644
--- a/infra/gcb/build_project.py
+++ b/infra/gcb/build_project.py
@@ -4,22 +4,19 @@
 Usage: build_project.py <project_dir>
 """
 
-import base64
-import collections
+from __future__ import print_function
+
 import datetime
 import json
 import os
 import re
 import sys
-import time
-import urllib
 import yaml
 
 from oauth2client.client import GoogleCredentials
-from oauth2client.service_account import ServiceAccountCredentials
 from googleapiclient.discovery import build
 
-BUILD_TIMEOUT = 12 * 60 * 60
+import build_lib
 
 FUZZING_BUILD_TAG = 'fuzzing'
 
@@ -37,46 +34,10 @@
     'engine-none': ['FUZZING_ENGINE=none'],
 }
 
-EngineInfo = collections.namedtuple(
-    'EngineInfo',
-    ['upload_bucket', 'supported_sanitizers', 'supported_architectures'])
-
-ENGINE_INFO = {
-    'libfuzzer':
-        EngineInfo(
-            upload_bucket='clusterfuzz-builds',
-            supported_sanitizers=['address', 'memory', 'undefined'],
-            supported_architectures=['x86_64', 'i386']),
-    'afl':
-        EngineInfo(
-            upload_bucket='clusterfuzz-builds-afl',
-            supported_sanitizers=['address'],
-            supported_architectures=['x86_64']),
-    'honggfuzz':
-        EngineInfo(
-            upload_bucket='clusterfuzz-builds-honggfuzz',
-            supported_sanitizers=['address', 'memory', 'undefined'],
-            supported_architectures=['x86_64']),
-    'dataflow':
-        EngineInfo(
-            upload_bucket='clusterfuzz-builds-dataflow',
-            supported_sanitizers=['dataflow'],
-            supported_architectures=['x86_64']),
-    'none':
-        EngineInfo(
-            upload_bucket='clusterfuzz-builds-no-engine',
-            supported_sanitizers=['address'],
-            supported_architectures=['x86_64']),
-}
-
 DEFAULT_ARCHITECTURES = ['x86_64']
-DEFAULT_ENGINES = ['libfuzzer', 'afl']
+DEFAULT_ENGINES = ['libfuzzer', 'afl', 'honggfuzz']
 DEFAULT_SANITIZERS = ['address', 'undefined']
 
-TARGETS_LIST_BASENAME = 'targets.list'
-
-UPLOAD_URL_FORMAT = '/{0}/{1}/{2}'
-
 
 def usage():
   sys.stderr.write('Usage: ' + sys.argv[0] + ' <project_dir>\n')
@@ -97,29 +58,12 @@
     project_yaml.setdefault('run_tests', True)
     project_yaml.setdefault('coverage_extra_args', '')
     project_yaml.setdefault('labels', {})
+    project_yaml.setdefault('language', 'cpp')
     return project_yaml
 
 
-def get_signed_url(path, method='PUT', content_type=''):
-  timestamp = int(time.time() + BUILD_TIMEOUT)
-  blob = '{0}\n\n{1}\n{2}\n{3}'.format(method, content_type, timestamp, path)
-
-  creds = ServiceAccountCredentials.from_json_keyfile_name(
-      os.environ['GOOGLE_APPLICATION_CREDENTIALS'])
-  client_id = creds.service_account_email
-  signature = base64.b64encode(creds.sign_blob(blob)[1])
-  values = {
-      'GoogleAccessId': client_id,
-      'Expires': timestamp,
-      'Signature': signature,
-  }
-
-  return ('https://storage.googleapis.com{0}?'.format(path) +
-          urllib.urlencode(values))
-
-
 def is_supported_configuration(fuzzing_engine, sanitizer, architecture):
-  fuzzing_engine_info = ENGINE_INFO[fuzzing_engine]
+  fuzzing_engine_info = build_lib.ENGINE_INFO[fuzzing_engine]
   if architecture == 'i386' and sanitizer != 'address':
     return False
   return (sanitizer in fuzzing_engine_info.supported_sanitizers and
@@ -216,17 +160,18 @@
         stamped_name = '-'.join([name, sanitizer, ts])
         zip_file = stamped_name + '.zip'
         stamped_srcmap_file = stamped_name + '.srcmap.json'
-        bucket = ENGINE_INFO[fuzzing_engine].upload_bucket
+        bucket = build_lib.ENGINE_INFO[fuzzing_engine].upload_bucket
         if architecture != 'x86_64':
           bucket += '-' + architecture
-        upload_url = get_signed_url(
-            UPLOAD_URL_FORMAT.format(bucket, name, zip_file))
-        srcmap_url = get_signed_url(
-            UPLOAD_URL_FORMAT.format(bucket, name, stamped_srcmap_file))
+        upload_url = build_lib.get_signed_url(
+            build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, name, zip_file))
+        srcmap_url = build_lib.get_signed_url(
+            build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, name,
+                                                   stamped_srcmap_file))
 
-        targets_list_filename = get_targets_list_filename(sanitizer)
-        targets_list_url = get_signed_url(
-            get_targets_list_url(bucket, name, sanitizer))
+        targets_list_filename = build_lib.get_targets_list_filename(sanitizer)
+        targets_list_url = build_lib.get_signed_url(
+            build_lib.get_targets_list_url(bucket, name, sanitizer))
 
         env.append('OUT=' + out)
         env.append('MSAN_LIBS_PATH=/workspace/msan')
@@ -236,6 +181,16 @@
         if not workdir:
           workdir = '/src'
 
+        failure_msg = ('*' * 80 + '\nFailed to build.\nTo reproduce, run:\n'
+                       'python infra/helper.py build_image {name}\n'
+                       'python infra/helper.py build_fuzzers --sanitizer '
+                       '{sanitizer} --engine {engine} --architecture '
+                       '{architecture} {name}\n' + '*' * 80).format(
+                           name=name,
+                           sanitizer=sanitizer,
+                           engine=fuzzing_engine,
+                           architecture=architecture)
+
         build_steps.append(
             # compile
             {
@@ -251,8 +206,9 @@
                     # `cd /src && cd {workdir}` (where {workdir} is parsed from
                     # the Dockerfile). Container Builder overrides our workdir
                     # so we need to add this step to set it back.
-                    'rm -r /out && cd /src && cd {1} && mkdir -p {0} && compile'
-                    .format(out, workdir),
+                    ('rm -r /out && cd /src && cd {workdir} && mkdir -p {out} && '
+                     'compile || (echo "{failure_msg}" && false)'
+                    ).format(workdir=workdir, out=out, failure_msg=failure_msg),
                 ],
             })
 
@@ -271,12 +227,31 @@
           })
 
         if run_tests:
+          failure_msg = ('*' * 80 + '\nBuild checks failed.\n'
+                         'To reproduce, run:\n'
+                         'python infra/helper.py build_image {name}\n'
+                         'python infra/helper.py build_fuzzers --sanitizer '
+                         '{sanitizer} --engine {engine} --architecture '
+                         '{architecture} {name}\n'
+                         'python infra/helper.py check_build --sanitizer '
+                         '{sanitizer} --engine {engine} --architecture '
+                         '{architecture} {name}\n' + '*' * 80).format(
+                             name=name,
+                             sanitizer=sanitizer,
+                             engine=fuzzing_engine,
+                             architecture=architecture)
+
           build_steps.append(
               # test binaries
               {
-                  'name': 'gcr.io/oss-fuzz-base/base-runner',
-                  'env': env,
-                  'args': ['bash', '-c', 'test_all'],
+                  'name':
+                      'gcr.io/oss-fuzz-base/base-runner',
+                  'env':
+                      env,
+                  'args': [
+                      'bash', '-c',
+                      'test_all || (echo "{0}" && false)'.format(failure_msg)
+                  ],
               })
 
         if project_yaml['labels']:
@@ -293,6 +268,13 @@
               ],
           })
 
+        if sanitizer == 'dataflow' and fuzzing_engine == 'dataflow':
+          dataflow_steps = dataflow_post_build_steps(name, env)
+          if dataflow_steps:
+            build_steps.extend(dataflow_steps)
+          else:
+            sys.stderr.write('Skipping dataflow post build steps.\n')
+
         build_steps.extend([
             # generate targets list
             {
@@ -313,7 +295,8 @@
                     image,
                 'args': [
                     'bash', '-c',
-                    'cd {0} && zip -r {1} *'.format(out, zip_file)
+                    'cd {out} && zip -r {zip_file} *'.format(out=out,
+                                                             zip_file=zip_file)
                 ],
             },
             # upload srcmap
@@ -355,22 +338,35 @@
   return build_steps
 
 
+def dataflow_post_build_steps(project_name, env):
+  steps = []
+  download_corpora_step = build_lib.download_corpora_step(project_name)
+  if not download_corpora_step:
+    return None
+
+  steps = [download_corpora_step]
+  steps.append({
+      'name': 'gcr.io/oss-fuzz-base/base-runner',
+      'env': env,
+      'args': [
+          'bash', '-c',
+          ('for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*}; done && '
+           'collect_dft || (echo "DFT collection failed." && false)')
+      ],
+      'volumes': [{
+          'name': 'corpus',
+          'path': '/corpus'
+      }],
+  })
+  return steps
+
+
 def get_logs_url(build_id):
   URL_FORMAT = ('https://console.developers.google.com/logs/viewer?'
                 'resource=build%2Fbuild_id%2F{0}&project=oss-fuzz')
   return URL_FORMAT.format(build_id)
 
 
-def get_targets_list_filename(sanitizer):
-  return TARGETS_LIST_BASENAME + '.' + sanitizer
-
-
-def get_targets_list_url(bucket, project, sanitizer):
-  filename = get_targets_list_filename(sanitizer)
-  url = UPLOAD_URL_FORMAT.format(bucket, project, filename)
-  return url
-
-
 def run_build(build_steps, project_name, tag):
   options = {}
   if 'GCB_OPTIONS' in os.environ:
@@ -378,7 +374,7 @@
 
   build_body = {
       'steps': build_steps,
-      'timeout': str(BUILD_TIMEOUT) + 's',
+      'timeout': str(build_lib.BUILD_TIMEOUT) + 's',
       'options': options,
       'logsBucket': GCB_LOGS_BUCKET,
       'tags': [project_name + '-' + tag,],
@@ -386,12 +382,12 @@
 
   credentials = GoogleCredentials.get_application_default()
   cloudbuild = build('cloudbuild', 'v1', credentials=credentials)
-  build_info = cloudbuild.projects().builds().create(
-      projectId='oss-fuzz', body=build_body).execute()
+  build_info = cloudbuild.projects().builds().create(projectId='oss-fuzz',
+                                                     body=build_body).execute()
   build_id = build_info['metadata']['build']['id']
 
-  print >> sys.stderr, 'Logs:', get_logs_url(build_id)
-  print build_id
+  print('Logs:', get_logs_url(build_id), file=sys.stderr)
+  print(build_id)
 
 
 def main():
diff --git a/infra/gcb/cancel.py b/infra/gcb/cancel.py
index 331244f..8393a51 100755
--- a/infra/gcb/cancel.py
+++ b/infra/gcb/cancel.py
@@ -15,7 +15,6 @@
 import yaml
 
 from oauth2client.client import GoogleCredentials
-from oauth2client.service_account import ServiceAccountCredentials
 from googleapiclient.discovery import build
 
 
@@ -32,8 +31,9 @@
 
   credentials = GoogleCredentials.get_application_default()
   cloudbuild = build('cloudbuild', 'v1', credentials=credentials)
-  print cloudbuild.projects().builds().cancel(
-      projectId='oss-fuzz', id=build_id, body={}).execute()
+  print cloudbuild.projects().builds().cancel(projectId='oss-fuzz',
+                                              id=build_id,
+                                              body={}).execute()
 
 
 if __name__ == '__main__':
diff --git a/infra/helper.py b/infra/helper.py
index f09c0c1..5a3880b 100755
--- a/infra/helper.py
+++ b/infra/helper.py
@@ -14,6 +14,8 @@
 # limitations under the License.
 #
 ################################################################################
+"""Helper script for OSS-Fuzz users. Can do common tasks like building
+projects/fuzzers, running them etc."""
 
 from __future__ import print_function
 from multiprocessing.dummy import Pool as ThreadPool
@@ -26,7 +28,6 @@
 import re
 import subprocess
 import sys
-import tempfile
 import templates
 
 OSSFUZZ_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
@@ -46,7 +47,7 @@
 MAX_PROJECT_NAME_LENGTH = 26
 
 if sys.version_info[0] >= 3:
-    raw_input = input
+  raw_input = input  # pylint: disable=invalid-name
 
 CORPUS_URL_FORMAT = (
     'gs://{project_name}-corpus.clusterfuzz-external.appspot.com/libFuzzer/'
@@ -56,7 +57,8 @@
     'libFuzzer/{fuzz_target}/')
 
 
-def main():
+def main():  # pylint: disable=too-many-branches,too-many-return-statements,too-many-statements
+  """Get subcommand from program arguments and do it."""
   os.chdir(OSSFUZZ_DIR)
   if not os.path.exists(BUILD_DIR):
     os.mkdir(BUILD_DIR)
@@ -68,12 +70,14 @@
       'generate', help='Generate files for new project.')
   generate_parser.add_argument('project_name')
 
-  build_image_parser = subparsers.add_parser(
-      'build_image', help='Build an image.')
+  build_image_parser = subparsers.add_parser('build_image',
+                                             help='Build an image.')
   build_image_parser.add_argument('project_name')
-  build_image_parser.add_argument('--pull', action='store_true',
+  build_image_parser.add_argument('--pull',
+                                  action='store_true',
                                   help='Pull latest base image.')
-  build_image_parser.add_argument('--no-pull', action='store_true',
+  build_image_parser.add_argument('--no-pull',
+                                  action='store_true',
                                   help='Do not pull latest base image.')
 
   build_fuzzers_parser = subparsers.add_parser(
@@ -83,12 +87,15 @@
   _add_sanitizer_args(build_fuzzers_parser)
   _add_environment_args(build_fuzzers_parser)
   build_fuzzers_parser.add_argument('project_name')
-  build_fuzzers_parser.add_argument('source_path', help='path of local source',
+  build_fuzzers_parser.add_argument('source_path',
+                                    help='path of local source',
                                     nargs='?')
-  build_fuzzers_parser.add_argument('--clean', dest='clean',
+  build_fuzzers_parser.add_argument('--clean',
+                                    dest='clean',
                                     action='store_true',
                                     help='clean existing artifacts.')
-  build_fuzzers_parser.add_argument('--no-clean', dest='clean',
+  build_fuzzers_parser.add_argument('--no-clean',
+                                    dest='clean',
                                     action='store_false',
                                     help='do not clean existing artifacts '
                                     '(default).')
@@ -98,12 +105,13 @@
       'check_build', help='Checks that fuzzers execute without errors.')
   _add_architecture_args(check_build_parser)
   _add_engine_args(check_build_parser, choices=['libfuzzer', 'afl', 'dataflow'])
-  _add_sanitizer_args(
-      check_build_parser, choices=['address', 'memory', 'undefined', 'dataflow'])
+  _add_sanitizer_args(check_build_parser,
+                      choices=['address', 'memory', 'undefined', 'dataflow'])
   _add_environment_args(check_build_parser)
   check_build_parser.add_argument('project_name', help='name of the project')
-  check_build_parser.add_argument(
-      'fuzzer_name', help='name of the fuzzer', nargs='?')
+  check_build_parser.add_argument('fuzzer_name',
+                                  help='name of the fuzzer',
+                                  nargs='?')
 
   run_fuzzer_parser = subparsers.add_parser(
       'run_fuzzer', help='Run a fuzzer in the emulated fuzzing environment.')
@@ -112,39 +120,51 @@
   _add_environment_args(run_fuzzer_parser)
   run_fuzzer_parser.add_argument('project_name', help='name of the project')
   run_fuzzer_parser.add_argument('fuzzer_name', help='name of the fuzzer')
-  run_fuzzer_parser.add_argument('fuzzer_args', help='arguments to pass to the fuzzer',
+  run_fuzzer_parser.add_argument('fuzzer_args',
+                                 help='arguments to pass to the fuzzer',
                                  nargs=argparse.REMAINDER)
 
   coverage_parser = subparsers.add_parser(
       'coverage', help='Generate code coverage report for the project.')
-  coverage_parser.add_argument('--no-corpus-download', action='store_true',
+  coverage_parser.add_argument('--no-corpus-download',
+                               action='store_true',
                                help='do not download corpus backup from '
                                'OSS-Fuzz; use corpus located in '
                                'build/corpus/<project>/<fuzz_target>/')
-  coverage_parser.add_argument('--port', default='8008', help='specify port for'
+  coverage_parser.add_argument('--port',
+                               default='8008',
+                               help='specify port for'
                                ' a local HTTP server rendering coverage report')
-  coverage_parser.add_argument('--fuzz-target', help='specify name of a fuzz '
+  coverage_parser.add_argument('--fuzz-target',
+                               help='specify name of a fuzz '
                                'target to be run for generating coverage '
                                'report')
-  coverage_parser.add_argument('--corpus-dir', help='specify location of corpus'
+  coverage_parser.add_argument('--corpus-dir',
+                               help='specify location of corpus'
                                ' to be used (requires --fuzz-target argument)')
   coverage_parser.add_argument('project_name', help='name of the project')
-  coverage_parser.add_argument('extra_args', help='additional arguments to '
-                               'pass to llvm-cov utility.', nargs='*')
+  coverage_parser.add_argument('extra_args',
+                               help='additional arguments to '
+                               'pass to llvm-cov utility.',
+                               nargs='*')
 
   download_corpora_parser = subparsers.add_parser(
       'download_corpora', help='Download all corpora for a project.')
-  download_corpora_parser.add_argument('--fuzz-target', help='specify name of a fuzz target')
-  download_corpora_parser.add_argument('project_name', help='name of the project')
+  download_corpora_parser.add_argument('--fuzz-target',
+                                       help='specify name of a fuzz target')
+  download_corpora_parser.add_argument('project_name',
+                                       help='name of the project')
 
-  reproduce_parser = subparsers.add_parser(
-      'reproduce', help='Reproduce a crash.')
-  reproduce_parser.add_argument('--valgrind', action='store_true',
+  reproduce_parser = subparsers.add_parser('reproduce',
+                                           help='Reproduce a crash.')
+  reproduce_parser.add_argument('--valgrind',
+                                action='store_true',
                                 help='run with valgrind')
   reproduce_parser.add_argument('project_name', help='name of the project')
   reproduce_parser.add_argument('fuzzer_name', help='name of the fuzzer')
   reproduce_parser.add_argument('testcase_path', help='path of local testcase')
-  reproduce_parser.add_argument('fuzzer_args', help='arguments to pass to the fuzzer',
+  reproduce_parser.add_argument('fuzzer_args',
+                                help='arguments to pass to the fuzzer',
                                 nargs=argparse.REMAINDER)
   _add_environment_args(reproduce_parser)
 
@@ -156,8 +176,7 @@
   _add_sanitizer_args(shell_parser)
   _add_environment_args(shell_parser)
 
-  pull_images_parser = subparsers.add_parser('pull_images',
-                                             help='Pull base images.')
+  subparsers.add_parser('pull_images', help='Pull base images.')
 
   args = parser.parse_args()
 
@@ -171,23 +190,23 @@
 
   if args.command == 'generate':
     return generate(args)
-  elif args.command == 'build_image':
+  if args.command == 'build_image':
     return build_image(args)
-  elif args.command == 'build_fuzzers':
+  if args.command == 'build_fuzzers':
     return build_fuzzers(args)
-  elif args.command == 'check_build':
+  if args.command == 'check_build':
     return check_build(args)
-  elif args.command == 'download_corpora':
+  if args.command == 'download_corpora':
     return download_corpora(args)
-  elif args.command == 'run_fuzzer':
+  if args.command == 'run_fuzzer':
     return run_fuzzer(args)
-  elif args.command == 'coverage':
+  if args.command == 'coverage':
     return coverage(args)
-  elif args.command == 'reproduce':
+  if args.command == 'reproduce':
     return reproduce(args)
-  elif args.command == 'shell':
+  if args.command == 'shell':
     return shell(args)
-  elif args.command == 'pull_images':
+  if args.command == 'pull_images':
     return pull_images(args)
 
   return 0
@@ -266,16 +285,16 @@
   parser.add_argument('--architecture', default='x86_64', choices=choices)
 
 
-def _add_engine_args(
-        parser,
-        choices=('libfuzzer', 'afl', 'honggfuzz', 'dataflow', 'none')):
+def _add_engine_args(parser,
+                     choices=('libfuzzer', 'afl', 'honggfuzz', 'dataflow',
+                              'none')):
   """Add common engine args."""
   parser.add_argument('--engine', default='libfuzzer', choices=choices)
 
 
-def _add_sanitizer_args(
-        parser,
-        choices=('address', 'memory', 'undefined', 'coverage', 'dataflow')):
+def _add_sanitizer_args(parser,
+                        choices=('address', 'memory', 'undefined', 'coverage',
+                                 'dataflow')):
   """Add common sanitizer args."""
   parser.add_argument(
       '--sanitizer',
@@ -286,7 +305,8 @@
 
 def _add_environment_args(parser):
   """Add common environment args."""
-  parser.add_argument('-e', action='append',
+  parser.add_argument('-e',
+                      action='append',
                       help="set environment variable e.g. VAR=value")
 
 
@@ -308,7 +328,9 @@
   if no_cache:
     build_args.append('--no-cache')
 
-  build_args += ['-t', 'gcr.io/%s/%s' % (image_project, image_name), dockerfile_dir]
+  build_args += [
+      '-t', 'gcr.io/%s/%s' % (image_project, image_name), dockerfile_dir
+  ]
 
   return docker_build(build_args, pull=pull)
 
@@ -318,13 +340,15 @@
   return sum([['-e', v] for v in env_list], [])
 
 
+WORKDIR_REGEX = re.compile(r'\s*WORKDIR\s*([^\s]+)')
+
+
 def _workdir_from_dockerfile(project_name):
   """Parse WORKDIR from the Dockerfile for the given project."""
-  WORKDIR_REGEX = re.compile(r'\s*WORKDIR\s*([^\s]+)')
   dockerfile_path = get_dockerfile_path(project_name)
 
-  with open(dockerfile_path) as f:
-    lines = f.readlines()
+  with open(dockerfile_path) as file_handle:
+    lines = file_handle.readlines()
 
   for line in reversed(lines):  # reversed to get last WORKDIR.
     match = re.match(WORKDIR_REGEX, line)
@@ -342,7 +366,12 @@
 
 def docker_run(run_args, print_output=True):
   """Call `docker run`."""
-  command = ['docker', 'run', '--rm', '-i', '--privileged']
+  command = ['docker', 'run', '--rm', '--privileged']
+
+  # Support environments with a TTY.
+  if sys.stdin.isatty():
+    command.append('-i')
+
   command.extend(run_args)
 
   print('Running:', _get_command_string(command))
@@ -352,8 +381,8 @@
 
   try:
     subprocess.check_call(command, stdout=stdout, stderr=subprocess.STDOUT)
-  except subprocess.CalledProcessError as e:
-    return e.returncode
+  except subprocess.CalledProcessError as error:
+    return error.returncode
 
   return 0
 
@@ -376,7 +405,7 @@
   return True
 
 
-def docker_pull(image, pull=False):
+def docker_pull(image):
   """Call `docker pull`."""
   command = ['docker', 'pull', image]
   print('Running:', _get_command_string(command))
@@ -416,8 +445,16 @@
   return 1
 
 
-def build_fuzzers_impl(project_name, clean, engine, sanitizer, architecture,
-                       env_to_add, source_path, no_cache=False):
+def build_fuzzers_impl(  # pylint: disable=too-many-arguments
+    project_name,
+    clean,
+    engine,
+    sanitizer,
+    architecture,
+    env_to_add,
+    source_path,
+    no_cache=False,
+    mount_location=None):
   """Build fuzzers."""
   if not build_image_impl(project_name, no_cache=no_cache):
     return 1
@@ -428,11 +465,11 @@
 
     # Clean old and possibly conflicting artifacts in project's out directory.
     docker_run([
-        '-v', '%s:/out' % project_out_dir,
-        '-t', 'gcr.io/oss-fuzz/%s' % project_name,
-        '/bin/bash', '-c', 'rm -rf /out/*'
+        '-v',
+        '%s:/out' % project_out_dir, '-t',
+        'gcr.io/oss-fuzz/%s' % project_name, '/bin/bash', '-c', 'rm -rf /out/*'
     ])
-     
+
   else:
     print('Keeping existing build artifacts as-is (if any).')
   env = [
@@ -448,47 +485,50 @@
   # Copy instrumented libraries.
   if sanitizer == 'memory':
     docker_run([
-        '-v', '%s:/work' % project_work_dir,
-        'gcr.io/oss-fuzz-base/msan-builder',
-        'bash', '-c', 'cp -r /msan /work'])
+        '-v',
+        '%s:/work' % project_work_dir, 'gcr.io/oss-fuzz-base/msan-builder',
+        'bash', '-c', 'cp -r /msan /work'
+    ])
     env.append('MSAN_LIBS_PATH=' + '/work/msan')
 
-  command = (
-      ['docker', 'run', '--rm', '-i', '--cap-add', 'SYS_PTRACE'] +
-      _env_to_docker_args(env))
+  command = ['--cap-add', 'SYS_PTRACE'] + _env_to_docker_args(env)
   if source_path:
     workdir = _workdir_from_dockerfile(project_name)
     if workdir == '/src':
-      print('Cannot use local checkout with "WORKDIR /src".', file=sys.stderr)
+      print('Cannot use local checkout with "WORKDIR: /src".', file=sys.stderr)
       return 1
+    if not mount_location:
+      command += [
+          '-v',
+          '%s:%s' % (_get_absolute_path(source_path), workdir),
+      ]
+    else:
+      command += [
+          '-v',
+          '%s:%s' % (_get_absolute_path(source_path), mount_location),
+      ]
 
-    command += [
-        '-v',
-        '%s:%s' % (_get_absolute_path(source_path), workdir),
-    ]
   command += [
-      '-v', '%s:/out' % project_out_dir,
-      '-v', '%s:/work' % project_work_dir,
-      '-t', 'gcr.io/oss-fuzz/%s' % project_name
+      '-v',
+      '%s:/out' % project_out_dir, '-v',
+      '%s:/work' % project_work_dir, '-t',
+      'gcr.io/oss-fuzz/%s' % project_name
   ]
 
-  print('Running:', _get_command_string(command))
-
-  try:
-    subprocess.check_call(command)
-  except subprocess.CalledProcessError:
-    print('Fuzzers build failed.', file=sys.stderr)
-    return 1
+  result_code = docker_run(command)
+  if result_code:
+    print('Building fuzzers failed.', file=sys.stderr)
+    return result_code
 
   # Patch MSan builds to use instrumented shared libraries.
   if sanitizer == 'memory':
-    docker_run([
-        '-v', '%s:/out' % project_out_dir,
-        '-v', '%s:/work' % project_work_dir
-    ] + _env_to_docker_args(env) + [
-        'gcr.io/oss-fuzz-base/base-msan-builder',
-        'patch_build.py', '/out'
-    ])
+    docker_run(
+        [
+            '-v',
+            '%s:/out' % project_out_dir, '-v',
+            '%s:/work' % project_work_dir
+        ] + _env_to_docker_args(env) +
+        ['gcr.io/oss-fuzz-base/base-msan-builder', 'patch_build.py', '/out'])
 
   return 0
 
@@ -496,8 +536,8 @@
 def build_fuzzers(args):
   """Build fuzzers."""
   return build_fuzzers_impl(args.project_name, args.clean, args.engine,
-                            args.sanitizer, args.architecture,
-                            args.e, args.source_path)
+                            args.sanitizer, args.architecture, args.e,
+                            args.source_path)
 
 
 def check_build(args):
@@ -518,15 +558,13 @@
     env += args.e
 
   run_args = _env_to_docker_args(env) + [
-      '-v', '%s:/out' % _get_output_dir(args.project_name),
-      '-t', 'gcr.io/oss-fuzz-base/base-runner'
+      '-v',
+      '%s:/out' % _get_output_dir(args.project_name), '-t',
+      'gcr.io/oss-fuzz-base/base-runner'
   ]
 
   if args.fuzzer_name:
-    run_args += [
-        'bad_build_check',
-        os.path.join('/out', args.fuzzer_name)
-    ]
+    run_args += ['test_one', os.path.join('/out', args.fuzzer_name)]
   else:
     run_args.append('test_all')
 
@@ -564,14 +602,11 @@
 
   corpus_backup_url = CORPUS_BACKUP_URL_FORMAT.format(project_name=project_name,
                                                       fuzz_target=fuzz_target)
-  command = [
-      'gsutil',
-      'ls',
-      corpus_backup_url
-  ]
+  command = ['gsutil', 'ls', corpus_backup_url]
 
-  corpus_listing = subprocess.Popen(
-      command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+  corpus_listing = subprocess.Popen(command,
+                                    stdout=subprocess.PIPE,
+                                    stderr=subprocess.PIPE)
   output, error = corpus_listing.communicate()
 
   # Some fuzz targets (e.g. new ones) may not have corpus yet, just skip those.
@@ -583,38 +618,17 @@
   if output:
     latest_backup_url = output.splitlines()[-1]
     archive_path = corpus_dir + '.zip'
-    command = [
-        'gsutil',
-        '-q',
-        'cp',
-        latest_backup_url,
-        archive_path
-    ]
+    command = ['gsutil', '-q', 'cp', latest_backup_url, archive_path]
     subprocess.check_call(command)
 
-    command = [
-        'unzip',
-        '-q',
-        '-o',
-        archive_path,
-        '-d',
-        corpus_dir
-    ]
+    command = ['unzip', '-q', '-o', archive_path, '-d', corpus_dir]
     subprocess.check_call(command)
     os.remove(archive_path)
   else:
     # Sync the working corpus copy if a minimized backup is not available.
     corpus_url = CORPUS_URL_FORMAT.format(project_name=project_name,
                                           fuzz_target=fuzz_target)
-    command = [
-        'gsutil',
-        '-m',
-        '-q',
-        'rsync',
-        '-R',
-        corpus_url,
-        corpus_dir
-    ]
+    command = ['gsutil', '-m', '-q', 'rsync', '-R', corpus_url, corpus_dir]
     subprocess.check_call(command)
 
 
@@ -627,9 +641,10 @@
     with open(os.devnull, 'w') as stdout:
       subprocess.check_call(['gsutil', '--version'], stdout=stdout)
   except OSError:
-    print('ERROR: gsutil not found. Please install it from '
-          'https://cloud.google.com/storage/docs/gsutil_install',
-          file=sys.stderr)
+    print(
+        'ERROR: gsutil not found. Please install it from '
+        'https://cloud.google.com/storage/docs/gsutil_install',
+        file=sys.stderr)
     return False
 
   if args.fuzz_target:
@@ -645,12 +660,14 @@
     try:
       _get_latest_corpus(args.project_name, fuzz_target, corpus_dir)
       return True
-    except Exception as e:
-      print('ERROR: corpus download for %s failed: %s' % (fuzz_target, str(e)),
+    except Exception as error:  # pylint:disable=broad-except
+      print('ERROR: corpus download for %s failed: %s' %
+            (fuzz_target, str(error)),
             file=sys.stderr)
       return False
 
-  print('Downloading corpora for %s project to %s' % (args.project_name, corpus_dir))
+  print('Downloading corpora for %s project to %s' %
+        (args.project_name, corpus_dir))
   thread_pool = ThreadPool(multiprocessing.cpu_count())
   return all(thread_pool.map(_download_for_single_target, fuzz_targets))
 
@@ -658,9 +675,10 @@
 def coverage(args):
   """Generate code coverage using clang source based code coverage."""
   if args.corpus_dir and not args.fuzz_target:
-    print('ERROR: --corpus-dir requires specifying a particular fuzz target '
-          'using --fuzz-target',
-          file=sys.stderr)
+    print(
+        'ERROR: --corpus-dir requires specifying a particular fuzz target '
+        'using --fuzz-target',
+        file=sys.stderr)
     return 1
 
   if not check_project_exists(args.project_name):
@@ -683,17 +701,20 @@
   if args.corpus_dir:
     if not os.path.exists(args.corpus_dir):
       print('ERROR: the path provided in --corpus-dir argument does not exist',
-          file=sys.stderr)
+            file=sys.stderr)
       return 1
     corpus_dir = os.path.realpath(args.corpus_dir)
-    run_args.extend(['-v', '%s:/corpus/%s' % (corpus_dir,  args.fuzz_target)])
+    run_args.extend(['-v', '%s:/corpus/%s' % (corpus_dir, args.fuzz_target)])
   else:
     run_args.extend(['-v', '%s:/corpus' % _get_corpus_dir(args.project_name)])
 
   run_args.extend([
-      '-v', '%s:/out' % _get_output_dir(args.project_name),
-      '-p', '%s:%s' % (args.port, args.port),
-      '-t', 'gcr.io/oss-fuzz-base/base-runner',
+      '-v',
+      '%s:/out' % _get_output_dir(args.project_name),
+      '-p',
+      '%s:%s' % (args.port, args.port),
+      '-t',
+      'gcr.io/oss-fuzz-base/base-runner',
   ])
 
   run_args.append('coverage')
@@ -727,8 +748,10 @@
     env += args.e
 
   run_args = _env_to_docker_args(env) + [
-      '-v', '%s:/out' % _get_output_dir(args.project_name),
-      '-t', 'gcr.io/oss-fuzz-base/base-runner',
+      '-v',
+      '%s:/out' % _get_output_dir(args.project_name),
+      '-t',
+      'gcr.io/oss-fuzz-base/base-runner',
       'run_fuzzer',
       args.fuzzer_name,
   ] + args.fuzzer_args
@@ -738,11 +761,13 @@
 
 def reproduce(args):
   """Reproduce a specific test case from a specific project."""
-  return reproduce_impl(args.project_name, args.fuzzer_name, args.valgrind, args.env_to_add,
-                        fuzzer_args, args.testcase_path)
+  return reproduce_impl(args.project_name, args.fuzzer_name, args.valgrind,
+                        args.e, args.fuzzer_args, args.testcase_path)
 
 
-def reproduce_impl(project_name, fuzzer_name, valgrind, env_to_add, fuzzer_args, testcase_path):
+def reproduce_impl(  # pylint: disable=too-many-arguments
+    project_name, fuzzer_name, valgrind, env_to_add, fuzzer_args,
+    testcase_path):
   """Reproduces a testcase in the container."""
   if not check_project_exists(project_name):
     return 1
@@ -765,9 +790,12 @@
     env += env_to_add
 
   run_args = _env_to_docker_args(env) + [
-      '-v', '%s:/out' % _get_output_dir(project_name),
-      '-v', '%s:/testcase' % _get_absolute_path(testcase_path),
-      '-t', 'gcr.io/oss-fuzz-base/%s' % image_name,
+      '-v',
+      '%s:/out' % _get_output_dir(project_name),
+      '-v',
+      '%s:/testcase' % _get_absolute_path(testcase_path),
+      '-t',
+      'gcr.io/oss-fuzz-base/%s' % image_name,
       'reproduce',
       fuzzer_name,
       '-runs=100',
@@ -780,38 +808,39 @@
   """Generate empty project files."""
   if len(args.project_name) > MAX_PROJECT_NAME_LENGTH:
     print('Project name needs to be less than or equal to %d characters.' %
-          MAX_PROJECT_NAME_LENGTH, file=sys.stderr)
+          MAX_PROJECT_NAME_LENGTH,
+          file=sys.stderr)
     return 1
 
   if not VALID_PROJECT_NAME_REGEX.match(args.project_name):
     print('Invalid project name.', file=sys.stderr)
     return 1
 
-  dir = os.path.join('projects', args.project_name)
+  directory = os.path.join('projects', args.project_name)
 
   try:
-    os.mkdir(dir)
-  except OSError as e:
-    if e.errno != errno.EEXIST:
+    os.mkdir(directory)
+  except OSError as error:
+    if error.errno != errno.EEXIST:
       raise
-    print(dir, 'already exists.', file=sys.stderr)
+    print(directory, 'already exists.', file=sys.stderr)
     return 1
 
-  print('Writing new files to', dir)
+  print('Writing new files to', directory)
 
   template_args = {
-    'project_name': args.project_name,
-    'year': datetime.datetime.now().year
+      'project_name': args.project_name,
+      'year': datetime.datetime.now().year
   }
-  with open(os.path.join(dir, 'project.yaml'), 'w') as f:
-    f.write(templates.PROJECT_YAML_TEMPLATE % template_args)
+  with open(os.path.join(directory, 'project.yaml'), 'w') as file_handle:
+    file_handle.write(templates.PROJECT_YAML_TEMPLATE % template_args)
 
-  with open(os.path.join(dir, 'Dockerfile'), 'w') as f:
-    f.write(templates.DOCKER_TEMPLATE % template_args)
+  with open(os.path.join(directory, 'Dockerfile'), 'w') as file_handle:
+    file_handle.write(templates.DOCKER_TEMPLATE % template_args)
 
-  build_sh_path = os.path.join(dir, 'build.sh')
-  with open(build_sh_path, 'w') as f:
-    f.write(templates.BUILD_TEMPLATE % template_args)
+  build_sh_path = os.path.join(directory, 'build.sh')
+  with open(build_sh_path, 'w') as file_handle:
+    file_handle.write(templates.BUILD_TEMPLATE % template_args)
 
   os.chmod(build_sh_path, 0o755)
   return 0
@@ -839,17 +868,17 @@
     out_dir = _get_output_dir(args.project_name)
 
   run_args = _env_to_docker_args(env) + [
-      '-v', '%s:/out' % out_dir,
-      '-v', '%s:/work' % _get_work_dir(args.project_name),
-      '-t', 'gcr.io/%s/%s' % (image_project, args.project_name),
-      '/bin/bash'
+      '-v',
+      '%s:/out' % out_dir, '-v',
+      '%s:/work' % _get_work_dir(args.project_name), '-t',
+      'gcr.io/%s/%s' % (image_project, args.project_name), '/bin/bash'
   ]
 
   docker_run(run_args)
   return 0
 
 
-def pull_images(args):
+def pull_images(_):
   """Pull base images."""
   for base_image in BASE_IMAGES:
     if not docker_pull(base_image):
diff --git a/infra/presubmit.py b/infra/presubmit.py
new file mode 100755
index 0000000..7be16a8
--- /dev/null
+++ b/infra/presubmit.py
@@ -0,0 +1,354 @@
+#!/usr/bin/env python3
+# Copyright 2020 Google LLC.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+"""Check code for common issues before submitting."""
+
+import argparse
+import os
+import subprocess
+import sys
+import yaml
+
+_SRC_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+
+def _is_project_file(actual_path, expected_filename):
+  """Returns True if actual_path's name is |expected_filename| and is a file
+  that exists and is in in projects/."""
+  if os.path.basename(actual_path) != expected_filename:
+    return False
+
+  if os.path.basename(os.path.dirname(
+      os.path.dirname(actual_path))) != 'projects':
+    return False
+
+  return os.path.exists(actual_path)
+
+
+# TODO: Check for -fsanitize=fuzzer in files as well.
+
+
+def _check_one_lib_fuzzing_engine(build_sh_file):
+  """Returns False if |build_sh_file| contains -lFuzzingEngine.
+  This is deprecated behavior. $LIB_FUZZING_ENGINE should be used instead
+  so that -fsanitize=fuzzer is used."""
+  if not _is_project_file(build_sh_file, 'build.sh'):
+    return True
+
+  with open(build_sh_file) as build_sh:
+    build_sh_lines = build_sh.readlines()
+  for line_num, line in enumerate(build_sh_lines):
+    uncommented_code = line.split('#')[0]
+    if '-lFuzzingEngine' in uncommented_code:
+      print(
+          'Error: build.sh contains deprecated "-lFuzzingEngine" on line: {0}. '
+          'Please use "$LIB_FUZZING_ENGINE" instead.'.format(line_num))
+      return False
+  return True
+
+
+def check_lib_fuzzing_engine(paths):
+  """Call _check_one_lib_fuzzing_engine on each path in |paths|. Return True if
+  the result of every call is True."""
+  return all([_check_one_lib_fuzzing_engine(path) for path in paths])
+
+
+class ProjectYamlChecker:
+  """Checks for a project.yaml file."""
+
+  # Sections in a project.yaml and the constant values that they are allowed
+  # to have.
+  SECTIONS_AND_CONSTANTS = {
+      'sanitizers': {'address', 'none', 'memory', 'undefined', 'dataflow'},
+      'architectures': {'i386', 'x86_64'},
+      'fuzzing_engines': {'afl', 'libfuzzer', 'honggfuzz', 'dataflow'},
+  }
+
+  # Note: this list must be updated when we allow new sections.
+  VALID_SECTION_NAMES = [
+      'architectures',
+      'auto_ccs',
+      'coverage_extra_args',
+      'disabled',
+      'fuzzing_engines',
+      'homepage',
+      'primary_contact',
+      'sanitizers',
+      'vendor_ccs',
+      'view_restrictions',
+      'language',
+  ]
+
+  LANGUAGES_SUPPORTED = ['c', 'cpp', 'go', 'rust', 'python']
+
+  # Note that some projects like boost only have auto-ccs. However, forgetting
+  # primary contact is probably a mistake.
+  REQUIRED_SECTIONS = ['primary_contact']
+
+  def __init__(self, filename):
+    self.filename = filename
+    with open(filename) as file_handle:
+      self.data = yaml.safe_load(file_handle)
+
+    self.success = True
+
+  def do_checks(self):
+    """Do all project.yaml checks. Return True if they pass."""
+    if self.is_disabled():
+      return True
+
+    checks = [
+        self.check_project_yaml_constants,
+        self.check_required_sections,
+        self.check_valid_section_names,
+        self.check_valid_emails,
+        self.check_valid_language,
+    ]
+    for check_function in checks:
+      check_function()
+    return self.success
+
+  def is_disabled(self):
+    """Is this project disabled."""
+    return self.data.get('disabled', False)
+
+  def error(self, message):
+    """Print an error message and set self.success to False."""
+    self.success = False
+    print('Error in {filename}: {message}'.format(filename=self.filename,
+                                                  message=message))
+
+  def check_project_yaml_constants(self):
+    """Check that certain sections only have certain constant values."""
+    for section, allowed_constants in self.SECTIONS_AND_CONSTANTS.items():
+      if section not in self.data:
+        continue
+      actual_constants = self.data[section]
+      for constant in actual_constants:
+        if isinstance(constant, str):
+          if constant not in allowed_constants:
+            self.error(('{constant} (in {section} section) is not a valid '
+                        'constant ({allowed_constants}).').format(
+                            constant=constant,
+                            section=section,
+                            allowed_constants=', '.join(allowed_constants)))
+        elif isinstance(constant, dict):
+          # The only alternative value allowed is the experimental flag, i.e.
+          # `constant == {'memory': {'experimental': True}}`. Do not check the
+          # experimental flag, but assert that the sanitizer is a valid one.
+          if (len(constant.keys()) > 1 or
+              list(constant.keys())[0] not in allowed_constants):
+            self.error('Not allowed value in the project.yaml: ' +
+                       str(constant))
+        else:
+          self.error('Not allowed value in the project.yaml: ' + str(constant))
+
+  def check_valid_section_names(self):
+    """Check that only valid sections are included."""
+    for name in self.data:
+      if name not in self.VALID_SECTION_NAMES:
+        self.error('{name} is not a valid section name ({valid_names})'.format(
+            name=name, valid_names=self.VALID_SECTION_NAMES))
+
+  def check_required_sections(self):
+    """Check that all required sections are present."""
+    for section in self.REQUIRED_SECTIONS:
+      if section not in self.data:
+        self.error(section + ' section is missing.')
+
+  def check_valid_emails(self):
+    """Check that emails are valid looking."""
+    # Get email addresses.
+    email_addresses = []
+    primary_contact = self.data.get('primary_contact')
+    if primary_contact:
+      email_addresses.append(primary_contact)
+    auto_ccs = self.data.get('auto_ccs')
+    if auto_ccs:
+      email_addresses.extend(auto_ccs)
+
+    # Sanity check them.
+    for email_address in email_addresses:
+      if '@' not in email_address or '.' not in email_address:
+        self.error(email_address + ' is an invalid email address.')
+
+  def check_valid_language(self):
+    """Check that the language specified is valid."""
+    language = self.data.get('language')
+    if not language:
+      return
+
+    if language not in self.LANGUAGES_SUPPORTED:
+      self.error('{language} is not supported ({supported}).'.format(
+          language=language, supported=self.LANGUAGES_SUPPORTED))
+
+
+def _check_one_project_yaml(project_yaml_filename):
+  """Do checks on the project.yaml file."""
+  if not _is_project_file(project_yaml_filename, 'project.yaml'):
+    return True
+
+  checker = ProjectYamlChecker(project_yaml_filename)
+  return checker.do_checks()
+
+
+def check_project_yaml(paths):
+  """Call _check_one_project_yaml on each path in |paths|. Return True if
+  the result of every call is True."""
+  return all([_check_one_project_yaml(path) for path in paths])
+
+
+def do_checks(changed_files):
+  """Run all presubmit checks return False if any fails."""
+  checks = [
+      check_license, yapf, lint, check_project_yaml, check_lib_fuzzing_engine
+  ]
+  # Use a list comprehension here and in other cases where we use all() so that
+  # we don't quit early on failure. This is more user-friendly since the more
+  # errors we spit out at once, the less frequently the less check-fix-check
+  # cycles they need to do.
+  return all([check(changed_files) for check in checks])
+
+
+_CHECK_LICENSE_FILENAMES = ['Dockerfile']
+_CHECK_LICENSE_EXTENSIONS = [
+    '.bash',
+    '.c',
+    '.cc',
+    '.cpp',
+    '.css',
+    '.h',
+    '.htm',
+    '.html',
+    '.js',
+    '.proto',
+    '.py',
+    '.sh',
+]
+
+_LICENSE_STRING = 'http://www.apache.org/licenses/LICENSE-2.0'
+
+
+def check_license(paths):
+  """Validate license header."""
+  if not paths:
+    return True
+
+  success = True
+  for path in paths:
+    filename = os.path.basename(path)
+    extension = os.path.splitext(path)[1]
+    if (filename not in _CHECK_LICENSE_FILENAMES and
+        extension not in _CHECK_LICENSE_EXTENSIONS):
+      continue
+
+    with open(path) as file_handle:
+      if _LICENSE_STRING not in file_handle.read():
+        print('Missing license header in file %s.' % str(path))
+        success = False
+
+  return success
+
+
+def bool_to_returncode(success):
+  """Return 0 if |success|. Otherwise return 1."""
+  if success:
+    print('Success.')
+    return 0
+
+  print('Failed.')
+  return 1
+
+
+def is_python(path):
+  """Returns True if |path| ends in .py."""
+  return os.path.splitext(path)[1] == '.py'
+
+
+def lint(paths):
+  """Run python's linter on |paths| if it is a python file. Return False if it
+  fails linting."""
+  paths = [path for path in paths if is_python(path)]
+  if not paths:
+    return True
+
+  command = ['python3', '-m', 'pylint', '-j', '0']
+  command.extend(paths)
+
+  returncode = subprocess.run(command, check=False).returncode
+  return returncode == 0
+
+
+def yapf(paths, validate=True):
+  """Do yapf on |path| if it is Python file. Only validates format if
+  |validate| otherwise, formats the file. Returns False if validation
+  or formatting fails."""
+  paths = [path for path in paths if is_python(path)]
+  if not paths:
+    return True
+
+  validate_argument = '-d' if validate else '-i'
+  command = ['yapf', validate_argument, '-p']
+  command.extend(paths)
+
+  returncode = subprocess.run(command, check=False).returncode
+  return returncode == 0
+
+
+def get_changed_files():
+  """Return a list of absolute paths of files changed in this git branch."""
+  # FIXME: This doesn't work if branch is behind master.
+  diff_command = ['git', 'diff', '--name-only', 'FETCH_HEAD']
+  return [
+      os.path.abspath(path)
+      for path in subprocess.check_output(diff_command).decode().splitlines()
+      if os.path.isfile(path)
+  ]
+
+
+def main():
+  """Check changes on a branch for common issues before submitting."""
+  # Get program arguments.
+  parser = argparse.ArgumentParser(description='Presubmit script for oss-fuzz.')
+  parser.add_argument('command',
+                      choices=['format', 'lint', 'license'],
+                      nargs='?')
+  args = parser.parse_args()
+
+  changed_files = get_changed_files()
+
+  os.chdir(_SRC_ROOT)
+
+  # Do one specific check if the user asked for it.
+  if args.command == 'format':
+    success = yapf(changed_files, False)
+    return bool_to_returncode(success)
+
+  if args.command == 'lint':
+    success = lint(changed_files)
+    return bool_to_returncode(success)
+
+  if args.command == 'license':
+    success = check_license(changed_files)
+    return bool_to_returncode(success)
+
+  # Otherwise, do all of them.
+  success = do_checks(changed_files)
+  return bool_to_returncode(success)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/infra/repo_manager.py b/infra/repo_manager.py
index 9a93df9..cf98ab9 100644
--- a/infra/repo_manager.py
+++ b/infra/repo_manager.py
@@ -23,34 +23,34 @@
 """
 import os
 import shutil
-import subprocess
+
+import utils
 
 
-class RepoManagerError(Exception):
-  """Class to describe the exceptions in RepoManager."""
-
-
-class RepoManager(object):
+class RepoManager:
   """Class to manage git repos from python.
 
   Attributes:
-    repo_url: The location of the git repo
-    base_dir: The location of where the repo clone is stored locally
-    repo_name: The name of the github project
-    repo_dir: The location of the main repo
+    repo_url: The location of the git repo.
+    base_dir: The location of where the repo clone is stored locally.
+    repo_name: The name of the GitHub project.
+    repo_dir: The location of the main repo.
   """
 
-  def __init__(self, repo_url, base_dir):
+  def __init__(self, repo_url, base_dir, repo_name=None):
     """Constructs a repo manager class.
 
     Args:
-      repo_url: The github url needed to clone
-      base_dir: The full filepath where the git repo is located
+      repo_url: The github url needed to clone.
+      base_dir: The full file-path where the git repo is located.
+      repo_name: The name of the directory the repo is cloned to.
     """
-
     self.repo_url = repo_url
     self.base_dir = base_dir
-    self.repo_name = self.repo_url.split('/')[-1].strip('.git')
+    if repo_name:
+      self.repo_name = repo_name
+    else:
+      self.repo_name = os.path.basename(self.repo_url).strip('.git')
     self.repo_dir = os.path.join(self.base_dir, self.repo_name)
     self._clone()
 
@@ -58,46 +58,21 @@
     """Creates a clone of the repo in the specified directory.
 
       Raises:
-        RepoManagerError if the repo was not able to be cloned
+        ValueError: when the repo is not able to be cloned.
     """
     if not os.path.exists(self.base_dir):
       os.makedirs(self.base_dir)
     self.remove_repo()
-    self._run_command(['git', 'clone', self.repo_url],
-                      self.base_dir,
-                      check_result=True)
+    out, err = utils.execute(['git', 'clone', self.repo_url, self.repo_name],
+                             location=self.base_dir)
     if not self._is_git_repo():
-      raise RepoManagerError('%s is not a git repo' % self.repo_url)
-
-  def _run_command(self, command, location='.', check_result=False):
-    """ Runs a shell command in the specified directory location.
-
-    Args:
-      command: The command as a list to be run
-      location: The directory the command is run in
-      check_result: Should an exception be thrown on failed command
-
-    Returns:
-      The stdout of the command, the error code
-
-    Raises:
-      RepoManagerError: running a command resulted in an error
-    """
-    process = subprocess.Popen(command, stdout=subprocess.PIPE, cwd=location)
-    out, err = process.communicate()
-    if check_result and (process.returncode or err):
-      raise RepoManagerError(
-          'Error: %s running command: %s with return code: %s' %
-          (err, command, process.returncode))
-    if out is not None:
-      out = out.decode('ascii')
-    return out, process.returncode
+      raise ValueError('%s is not a git repo' % self.repo_url)
 
   def _is_git_repo(self):
     """Test if the current repo dir is a git repo or not.
 
     Returns:
-      True if the current repo_dir is a valid git repo
+      True if the current repo_dir is a valid git repo.
     """
     git_path = os.path.join(self.repo_dir, '.git')
     return os.path.isdir(git_path)
@@ -106,93 +81,103 @@
     """Checks to see if a commit exists in the project repo.
 
     Args:
-      commit: The commit SHA you are checking
+      commit: The commit SHA you are checking.
 
     Returns:
-      True if the commit exits in the project
-
-    Raises:
-      ValueException: an empty string was passed in as a commit
+      True if the commit exits in the project.
     """
-
-    # Handle the exception case, if empty string is passed _run_command will
-    # raise a ValueError
     if not commit.rstrip():
-      raise ValueError('An empty string is not a valid commit SHA')
+      return False
 
-    _, err_code = self._run_command(['git', 'cat-file', '-e', commit],
-                                    self.repo_dir)
+    _, err_code = utils.execute(['git', 'cat-file', '-e', commit],
+                                self.repo_dir)
     return not err_code
 
   def get_current_commit(self):
     """Gets the current commit SHA of the repo.
 
     Returns:
-      The current active commit SHA
+      The current active commit SHA.
     """
-    out, _ = self._run_command(['git', 'rev-parse', 'HEAD'],
-                               self.repo_dir,
-                               check_result=True)
+    out, _ = utils.execute(['git', 'rev-parse', 'HEAD'],
+                           self.repo_dir,
+                           check_result=True)
     return out.strip('\n')
 
   def get_commit_list(self, old_commit, new_commit):
     """Gets the list of commits(inclusive) between the old and new commits.
 
     Args:
-      old_commit: The oldest commit to be in the list
-      new_commit: The newest commit to be in the list
+      old_commit: The oldest commit to be in the list.
+      new_commit: The newest commit to be in the list.
 
     Returns:
-      The list of commit SHAs from newest to oldest
+      The list of commit SHAs from newest to oldest.
 
     Raises:
-      RepoManagerError when commits dont exist
+      ValueError: When either the old or new commit does not exist.
+      RuntimeError: When there is an error getting the commit list.
     """
 
     if not self.commit_exists(old_commit):
-      raise RepoManagerError('The old commit %s does not exist' % old_commit)
+      raise ValueError('The old commit %s does not exist' % old_commit)
     if not self.commit_exists(new_commit):
-      raise RepoManagerError('The new commit %s does not exist' % new_commit)
+      raise ValueError('The new commit %s does not exist' % new_commit)
     if old_commit == new_commit:
       return [old_commit]
-    out, err_code = self._run_command(
+    out, err_code = utils.execute(
         ['git', 'rev-list', old_commit + '..' + new_commit], self.repo_dir)
     commits = out.split('\n')
     commits = [commit for commit in commits if commit]
     if err_code or not commits:
-      raise RepoManagerError('Error getting commit list between %s and %s ' %
-                             (old_commit, new_commit))
+      raise RuntimeError('Error getting commit list between %s and %s ' %
+                         (old_commit, new_commit))
 
     # Make sure result is inclusive
     commits.append(old_commit)
     return commits
 
+  def fetch_unshallow(self):
+    """Gets the current git repository history."""
+    git_path = os.path.join(self.repo_dir, '.git', 'shallow')
+    if os.path.exists(git_path):
+      utils.execute(['git', 'fetch', '--unshallow'],
+                    self.repo_dir,
+                    check_result=True)
+
+  def checkout_pr(self, pr_ref):
+    """Checks out a remote pull request.
+
+    Args:
+      pr_ref: The pull request reference to be checked out.
+    """
+    self.fetch_unshallow()
+    utils.execute(['git', 'fetch', 'origin', pr_ref],
+                  self.repo_dir,
+                  check_result=True)
+    utils.execute(['git', 'checkout', '-f', 'FETCH_HEAD'],
+                  self.repo_dir,
+                  check_result=True)
+
   def checkout_commit(self, commit):
     """Checks out a specific commit from the repo.
 
     Args:
-      commit: The commit SHA to be checked out
+      commit: The commit SHA to be checked out.
 
     Raises:
-      RepoManagerError when checkout is not successful
+      RuntimeError: when checkout is not successful.
+      ValueError: when commit does not exist.
     """
+    self.fetch_unshallow()
     if not self.commit_exists(commit):
-      raise RepoManagerError('Commit %s does not exist in current branch' %
-                             commit)
-
-    git_path = os.path.join(self.repo_dir, '.git', 'shallow')
-    if os.path.exists(git_path):
-      self._run_command(['git', 'fetch', '--unshallow'],
-                        self.repo_dir,
-                        check_result=True)
-    self._run_command(['git', 'checkout', '-f', commit],
-                      self.repo_dir,
-                      check_result=True)
-    self._run_command(['git', 'clean', '-fxd'],
-                      self.repo_dir,
-                      check_result=True)
+      raise ValueError('Commit %s does not exist in current branch' % commit)
+    utils.execute(['git', 'checkout', '-f', commit],
+                  self.repo_dir,
+                  check_result=True)
+    utils.execute(['git', 'clean', '-fxd'], self.repo_dir, check_result=True)
     if self.get_current_commit() != commit:
-      raise RepoManagerError('Error checking out commit %s' % commit)
+      raise RuntimeError('Error checking out commit %s' % commit)
 
   def remove_repo(self):
     """Attempts to remove the git repo. """
diff --git a/infra/repo_manager_test.py b/infra/repo_manager_test.py
index c8627f6..f489b2d 100644
--- a/infra/repo_manager_test.py
+++ b/infra/repo_manager_test.py
@@ -11,67 +11,126 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing perepo_managerissions and
 # limitations under the License.
-"""Test the functionality of the RepoManager class
-The will consist of the following functional tests
-  1. Cloning of directory in desired location
-  2. Checking out a specific commit
-  3. Can get a list of commits between two SHAs
-"""
+"""Test the functionality of the RepoManager class."""
 
 import os
 import unittest
+import tempfile
 
 import repo_manager
 
+OSS_FUZZ_REPO = 'https://github.com/google/oss-fuzz'
+
 
 class TestRepoManager(unittest.TestCase):
   """Class to test the functionality of the RepoManager class."""
 
-  curl_repo = 'https://github.com/curl/curl'
 
-  def test_clone_correctly(self):
+class RepoManagerCloneUnitTests(unittest.TestCase):
+  """Class to test the functionality of clone of the RepoManager class."""
+
+  def test_clone_valid_repo(self):
     """Tests the correct location of the git repo."""
-    test_repo_manager = repo_manager.RepoManager(self.curl_repo, 'tmp')
-    git_path = os.path.join(test_repo_manager.base_dir,
-                            test_repo_manager.repo_name, '.git')
-    self.assertTrue(os.path.isdir(git_path))
-    test_repo_manager.remove_repo()
-    with self.assertRaises(repo_manager.RepoManagerError):
-      test_repo_manager = repo_manager.RepoManager(' ', 'tmp')
+    with tempfile.TemporaryDirectory() as tmp_dir:
+      test_repo_manager = repo_manager.RepoManager(OSS_FUZZ_REPO, tmp_dir)
+      git_path = os.path.join(test_repo_manager.base_dir,
+                              test_repo_manager.repo_name, '.git')
+      self.assertTrue(os.path.isdir(git_path))
+      test_repo_manager.remove_repo()
 
-  def test_checkout_commit(self):
+  def test_clone_invalid_repo(self):
+    """Test that constructing RepoManager with an invalid repo will fail."""
+    with tempfile.TemporaryDirectory() as tmp_dir:
+      with self.assertRaises(ValueError):
+        repo_manager.RepoManager(' ', tmp_dir)
+      with self.assertRaises(ValueError):
+        repo_manager.RepoManager('not_a_valid_repo', tmp_dir)
+      with self.assertRaises(ValueError):
+        repo_manager.RepoManager('https://github.com/oss-fuzz-not-real.git',
+                                 tmp_dir)
+
+
+class RepoManagerCheckoutUnitTests(unittest.TestCase):
+  """Class to test the functionality of checkout of the RepoManager class."""
+
+  def test_checkout_valid_commit(self):
     """Tests that the git checkout command works."""
-    test_repo_manager = repo_manager.RepoManager(self.curl_repo, 'tmp')
-    commit_to_test = '036ebac0134de3b72052a46f734e4ca81bb96055'
-    test_repo_manager.checkout_commit(commit_to_test)
-    self.assertEqual(commit_to_test, test_repo_manager.get_current_commit())
-    with self.assertRaises(ValueError):
-      test_repo_manager.checkout_commit(' ')
-    with self.assertRaises(repo_manager.RepoManagerError):
-      test_repo_manager.checkout_commit(
-          'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')
-    test_repo_manager.remove_repo()
+    with tempfile.TemporaryDirectory() as tmp_dir:
+      test_repo_manager = repo_manager.RepoManager(OSS_FUZZ_REPO, tmp_dir)
+      commit_to_test = '04ea24ee15bbe46a19e5da6c5f022a2ffdfbdb3b'
+      test_repo_manager.checkout_commit(commit_to_test)
+      self.assertEqual(commit_to_test, test_repo_manager.get_current_commit())
 
-  def test_get_commit_list(self):
-    """Tests an accurate commit list can be retrived from the repo manager."""
-    test_repo_manager = repo_manager.RepoManager(self.curl_repo, 'tmp')
-    old_commit = '7cf18b05e04bbb0f08c74d2567b0648f6c31a952'
-    new_commit = '113db127ee2b2f874dfcce406103ffe666e11953'
-    commit_list = [
-        '113db127ee2b2f874dfcce406103ffe666e11953',
-        '793e37767581aec7102d2ecafa34fc1316b1b31f',
-        '9a2cbf30b81a2b57149bb20e78e2e4cb5c2ff389',
-        '7cf18b05e04bbb0f08c74d2567b0648f6c31a952'
-    ]
-    result_list = test_repo_manager.get_commit_list(old_commit, new_commit)
-    self.assertListEqual(commit_list, result_list)
-    with self.assertRaises(repo_manager.RepoManagerError):
-      test_repo_manager.get_commit_list('asafd', new_commit)
-    with self.assertRaises(repo_manager.RepoManagerError):
-      test_repo_manager.get_commit_list(new_commit, 'asdfasdf')
-    with self.assertRaises(repo_manager.RepoManagerError):
-      # Testing commits out of order
-      test_repo_manager.get_commit_list(new_commit, old_commit)
+  def test_checkout_invalid_commit(self):
+    """Tests that the git checkout invalid commit fails."""
+    with tempfile.TemporaryDirectory() as tmp_dir:
+      test_repo_manager = repo_manager.RepoManager(OSS_FUZZ_REPO, tmp_dir)
+      with self.assertRaises(ValueError):
+        test_repo_manager.checkout_commit(' ')
+      with self.assertRaises(ValueError):
+        test_repo_manager.checkout_commit(
+            'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')
+      with self.assertRaises(ValueError):
+        test_repo_manager.checkout_commit('not-a-valid-commit')
+
+
+class RepoManagerGetCommitListUnitTests(unittest.TestCase):
+  """Class to test the functionality of get commit list in the
+   RepoManager class."""
+
+  def test_get_valid_commit_list(self):
+    """Tests an accurate commit list can be retrieved from the repo manager."""
+    with tempfile.TemporaryDirectory() as tmp_dir:
+      test_repo_manager = repo_manager.RepoManager(OSS_FUZZ_REPO, tmp_dir)
+      old_commit = '04ea24ee15bbe46a19e5da6c5f022a2ffdfbdb3b'
+      new_commit = 'fa662173bfeb3ba08d2e84cefc363be11e6c8463'
+      commit_list = [
+          'fa662173bfeb3ba08d2e84cefc363be11e6c8463',
+          '17035317a44fa89d22fe6846d868d4bf57def78b',
+          '97dee00a3c4ce95071c3e061592f5fd577dea886',
+          '04ea24ee15bbe46a19e5da6c5f022a2ffdfbdb3b'
+      ]
+      result_list = test_repo_manager.get_commit_list(old_commit, new_commit)
+      self.assertListEqual(commit_list, result_list)
+
+  def test_invalid_commit_list(self):
+    """Tests that the propper Errors are thrown when invalid commits are
+    passed."""
+    with tempfile.TemporaryDirectory() as tmp_dir:
+      old_commit = '04ea24ee15bbe46a19e5da6c5f022a2ffdfbdb3b'
+      new_commit = 'fa662173bfeb3ba08d2e84cefc363be11e6c8463'
+      test_repo_manager = repo_manager.RepoManager(OSS_FUZZ_REPO, tmp_dir)
+      with self.assertRaises(ValueError):
+        test_repo_manager.get_commit_list('fakecommit', new_commit)
+      with self.assertRaises(ValueError):
+        test_repo_manager.get_commit_list(new_commit, 'fakecommit')
+      with self.assertRaises(RuntimeError):
+        # pylint: disable=arguments-out-of-order
+        test_repo_manager.get_commit_list(new_commit, old_commit)
+
+
+class RepoManagerCheckoutPullRequestUnitTests(unittest.TestCase):
+  """Class to test the functionality of checkout_pr of the RepoManager class."""
+
+  def test_checkout_valid_pull_request(self):
+    """Tests that the git checkout pull request works."""
+    with tempfile.TemporaryDirectory() as tmp_dir:
+      test_repo_manager = repo_manager.RepoManager(OSS_FUZZ_REPO, tmp_dir)
+      test_repo_manager.checkout_pr('refs/pull/3310/merge')
+      self.assertEqual(test_repo_manager.get_current_commit(),
+                       'ff00c1685ccf32f729cf6c834e641223ce6262e4')
+
+  def test_checkout_invalid_pull_request(self):
+    """Tests that the git checkout invalid pull request fails."""
+    with tempfile.TemporaryDirectory() as tmp_dir:
+      test_repo_manager = repo_manager.RepoManager(OSS_FUZZ_REPO, tmp_dir)
+      with self.assertRaises(RuntimeError):
+        test_repo_manager.checkout_pr(' ')
+      with self.assertRaises(RuntimeError):
+        test_repo_manager.checkout_pr(
+            'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')
+      with self.assertRaises(RuntimeError):
+        test_repo_manager.checkout_pr('not/a/valid/pr')
 
 
 if __name__ == '__main__':
diff --git a/infra/test_repos.py b/infra/test_repos.py
new file mode 100644
index 0000000..6100369
--- /dev/null
+++ b/infra/test_repos.py
@@ -0,0 +1,77 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""This module contains a list of test repository's used in unit/integration
+tests.
+
+Note: If you notice tests failing for unexpected reasons, make sure the data
+in the test repos are correct. This is because the test repos are dynamic and
+may change.
+
+Note: This should be removed when a better method of testing is established.
+"""
+
+import collections
+import os
+
+ExampleRepo = collections.namedtuple('ExampleRepo', [
+    'project_name', 'oss_repo_name', 'git_repo_name', 'git_url', 'new_commit',
+    'old_commit', 'intro_commit', 'fuzz_target', 'test_case_path'
+])
+
+TEST_DIR_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)),
+                             'testcases')
+
+# WARNING: These tests  are dependent upon the following repos existing and
+# the specified commits existing.
+TEST_REPOS = [
+    ExampleRepo(project_name='usrsctp',
+                oss_repo_name='usrsctp',
+                git_repo_name='usrsctp',
+                git_url='https://github.com/weinrank/usrsctp',
+                old_commit='4886aaa49fb90e479226fcfc3241d74208908232',
+                new_commit='c710749b1053978179a027973a3ea3bccf20ee5c',
+                intro_commit='457d6ead58e82584d9dcb826f6739347f59ebd3a',
+                fuzz_target='fuzzer_connect',
+                test_case_path=os.path.join(TEST_DIR_PATH,
+                                            'usrsctp_test_data')),
+    ExampleRepo(project_name='curl',
+                oss_repo_name='curl',
+                git_repo_name='curl',
+                git_url='https://github.com/curl/curl.git',
+                old_commit='df26f5f9c36e19cd503c0e462e9f72ad37b84c82',
+                new_commit='dda418266c99ceab368d723facb52069cbb9c8d5',
+                intro_commit='df26f5f9c36e19cd503c0e462e9f72ad37b84c82',
+                fuzz_target='curl_fuzzer_ftp',
+                test_case_path=os.path.join(TEST_DIR_PATH, 'curl_test_data')),
+    ExampleRepo(project_name='libarchive',
+                oss_repo_name='libarchive',
+                git_repo_name='libarchive',
+                git_url='https://github.com/libarchive/libarchive.git',
+                old_commit='5bd2a9b6658a3a6efa20bb9ad75bd39a44d71da6',
+                new_commit='458e49358f17ec58d65ab1c45cf299baaf3c98d1',
+                intro_commit='840266712006de5e737f8052db920dfea2be4260',
+                fuzz_target='libarchive_fuzzer',
+                test_case_path=os.path.join(TEST_DIR_PATH,
+                                            'libarchive_test_data'))
+]
+
+INVALID_REPO = ExampleRepo(project_name='notaproj',
+                           oss_repo_name='notarepo',
+                           git_repo_name='notarepo',
+                           git_url='invalid.git',
+                           old_commit='aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
+                           new_commit='aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
+                           intro_commit='aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
+                           fuzz_target='NONEFUZZER',
+                           test_case_path='not/a/path')
diff --git a/infra/testcases/curl_test_data b/infra/testcases/curl_test_data
new file mode 100644
index 0000000..ed4b54e
--- /dev/null
+++ b/infra/testcases/curl_test_data
Binary files differ
diff --git a/infra/testcases/libarchive_test_data b/infra/testcases/libarchive_test_data
new file mode 100644
index 0000000..928bfec
--- /dev/null
+++ b/infra/testcases/libarchive_test_data
Binary files differ
diff --git a/infra/testcases/ndpi_test_data b/infra/testcases/ndpi_test_data
new file mode 100644
index 0000000..010af86
--- /dev/null
+++ b/infra/testcases/ndpi_test_data
Binary files differ
diff --git a/infra/testcases/usrsctp_test_data b/infra/testcases/usrsctp_test_data
new file mode 100644
index 0000000..fa90322
--- /dev/null
+++ b/infra/testcases/usrsctp_test_data
Binary files differ
diff --git a/infra/yara_test_data b/infra/testcases/yara_test_data
similarity index 100%
rename from infra/yara_test_data
rename to infra/testcases/yara_test_data
diff --git a/infra/utils.py b/infra/utils.py
new file mode 100644
index 0000000..8f0e4e2
--- /dev/null
+++ b/infra/utils.py
@@ -0,0 +1,122 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Utilities for OSS-Fuzz infrastructure."""
+
+import os
+import re
+import stat
+import subprocess
+
+import helper
+
+ALLOWED_FUZZ_TARGET_EXTENSIONS = ['', '.exe']
+FUZZ_TARGET_SEARCH_STRING = 'LLVMFuzzerTestOneInput'
+VALID_TARGET_NAME = re.compile(r'^[a-zA-Z0-9_-]+$')
+
+
+def chdir_to_root():
+  """Changes cwd to OSS-Fuzz root directory."""
+  # Change to oss-fuzz main directory so helper.py runs correctly.
+  if os.getcwd() != helper.OSSFUZZ_DIR:
+    os.chdir(helper.OSSFUZZ_DIR)
+
+
+def execute(command, location=None, check_result=False):
+  """ Runs a shell command in the specified directory location.
+
+  Args:
+    command: The command as a list to be run.
+    location: The directory the command is run in.
+    check_result: Should an exception be thrown on failed command.
+
+  Returns:
+    The stdout of the command, the error code.
+
+  Raises:
+    RuntimeError: running a command resulted in an error.
+  """
+
+  if not location:
+    location = os.getcwd()
+  process = subprocess.Popen(command, stdout=subprocess.PIPE, cwd=location)
+  out, err = process.communicate()
+  if check_result and (process.returncode or err):
+    raise RuntimeError('Error: %s\n Command: %s\n Return code: %s\n Out: %s' %
+                       (err, command, process.returncode, out))
+  if out is not None:
+    out = out.decode('ascii').rstrip()
+  return out, process.returncode
+
+
+def get_fuzz_targets(path):
+  """Get list of fuzz targets in a directory.
+
+  Args:
+    path: A path to search for fuzz targets in.
+
+  Returns:
+    A list of paths to fuzzers or an empty list if None.
+  """
+  if not os.path.exists(path):
+    return []
+  fuzz_target_paths = []
+  for root, _, _ in os.walk(path):
+    for filename in os.listdir(path):
+      file_path = os.path.join(root, filename)
+      if is_fuzz_target_local(file_path):
+        fuzz_target_paths.append(file_path)
+
+  return fuzz_target_paths
+
+
+def get_container_name():
+  """Gets the name of the current docker container you are in.
+  /proc/self/cgroup can be used to check control groups e.g. Docker.
+  See: https://docs.docker.com/config/containers/runmetrics/ for more info.
+
+  Returns:
+    Container name or None if not in a container.
+  """
+  with open('/proc/self/cgroup') as file_handle:
+    if 'docker' not in file_handle.read():
+      return None
+  with open('/etc/hostname') as file_handle:
+    return file_handle.read().strip()
+
+
+def is_fuzz_target_local(file_path):
+  """Returns whether |file_path| is a fuzz target binary (local path).
+  Copied from clusterfuzz src/python/bot/fuzzers/utils.py
+  with slight modifications.
+  """
+  filename, file_extension = os.path.splitext(os.path.basename(file_path))
+  if not VALID_TARGET_NAME.match(filename):
+    # Check fuzz target has a valid name (without any special chars).
+    return False
+
+  if file_extension not in ALLOWED_FUZZ_TARGET_EXTENSIONS:
+    # Ignore files with disallowed extensions (to prevent opening e.g. .zips).
+    return False
+
+  if not os.path.exists(file_path) or not os.access(file_path, os.X_OK):
+    return False
+
+  if filename.endswith('_fuzzer'):
+    return True
+
+  if os.path.exists(file_path) and not stat.S_ISREG(os.stat(file_path).st_mode):
+    return False
+
+  with open(file_path, 'rb') as file_handle:
+    return file_handle.read().find(FUZZ_TARGET_SEARCH_STRING.encode()) != -1
diff --git a/infra/utils_test.py b/infra/utils_test.py
new file mode 100644
index 0000000..ab3d216
--- /dev/null
+++ b/infra/utils_test.py
@@ -0,0 +1,102 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Test the functionality of the utils module's functions:
+1. is_fuzz_target_local
+2. get_fuzz_targets
+3. get_env_var
+"""
+
+import os
+import unittest
+
+import utils
+import helper
+
+EXAMPLE_PROJECT = 'example'
+
+
+class IsFuzzTargetLocalUnitTest(unittest.TestCase):
+  """Test is_fuzz_target_local function in the utils module."""
+
+  def test_invalid_filepath(self):
+    """Test the function with an invalid file path."""
+    is_local = utils.is_fuzz_target_local('not/a/real/file')
+    self.assertFalse(is_local)
+    is_local = utils.is_fuzz_target_local('')
+    self.assertFalse(is_local)
+    is_local = utils.is_fuzz_target_local(' ')
+    self.assertFalse(is_local)
+
+  def test_valid_filepath(self):
+    """Checks is_fuzz_target_local function with a valid filepath."""
+    utils.chdir_to_root()
+    helper.build_fuzzers_impl(EXAMPLE_PROJECT,
+                              True,
+                              'libfuzzer',
+                              'address',
+                              'x86_64', [],
+                              None,
+                              no_cache=False,
+                              mount_location=None)
+    is_local = utils.is_fuzz_target_local(
+        os.path.join(helper.OSSFUZZ_DIR, 'build', 'out', EXAMPLE_PROJECT,
+                     'do_stuff_fuzzer'))
+    self.assertTrue(is_local)
+    is_local = utils.is_fuzz_target_local(
+        os.path.join(helper.OSSFUZZ_DIR, 'build', 'out', EXAMPLE_PROJECT,
+                     'do_stuff_fuzzer.dict'))
+    self.assertFalse(is_local)
+
+
+class GetFuzzTargetsUnitTest(unittest.TestCase):
+  """Test get_fuzz_targets function in the utils module."""
+
+  def test_valid_filepath(self):
+    """Tests that fuzz targets can be retrieved once the fuzzers are built."""
+    utils.chdir_to_root()
+    helper.build_fuzzers_impl(EXAMPLE_PROJECT,
+                              True,
+                              'libfuzzer',
+                              'address',
+                              'x86_64', [],
+                              None,
+                              no_cache=False,
+                              mount_location=None)
+    fuzz_targets = utils.get_fuzz_targets(
+        os.path.join(helper.OSSFUZZ_DIR, 'build', 'out', EXAMPLE_PROJECT))
+    self.assertCountEqual(fuzz_targets, [
+        os.path.join(helper.OSSFUZZ_DIR, 'build', 'out', EXAMPLE_PROJECT,
+                     'do_stuff_fuzzer')
+    ])
+    fuzz_targets = utils.get_fuzz_targets(
+        os.path.join(helper.OSSFUZZ_DIR, 'infra'))
+    self.assertFalse(fuzz_targets)
+
+  def test_invalid_filepath(self):
+    """Tests what get_fuzz_targets return when invalid filepath is used."""
+    utils.chdir_to_root()
+    helper.build_fuzzers_impl(EXAMPLE_PROJECT,
+                              True,
+                              'libfuzzer',
+                              'address',
+                              'x86_64', [],
+                              None,
+                              no_cache=False,
+                              mount_location=None)
+    fuzz_targets = utils.get_fuzz_targets('not/a/valid/file/path')
+    self.assertFalse(fuzz_targets)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/projects/proj4/build.sh b/projects/arrow/Dockerfile
old mode 100755
new mode 100644
similarity index 61%
rename from projects/proj4/build.sh
rename to projects/arrow/Dockerfile
index 37c65f7..110056d
--- a/projects/proj4/build.sh
+++ b/projects/arrow/Dockerfile
@@ -1,5 +1,4 @@
-#!/bin/bash -eu
-# Copyright 2016 Google Inc.
+# Copyright 2020 Google Inc.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -15,10 +14,16 @@
 #
 ################################################################################
 
-./autogen.sh
-./configure --disable-shared
-make clean -s
-make -j$(nproc) -s
+FROM gcr.io/oss-fuzz-base/base-builder
+MAINTAINER dev@arrow.apache.org
 
-./test/fuzzers/build_google_oss_fuzzers.sh
-./test/fuzzers/build_seed_corpus.sh
+ENV DEBIAN_FRONTEND noninteractive
+RUN apt-get update -y -q && \
+    apt-get update -y -q && \
+    apt-get install -y -q --no-install-recommends \
+        cmake \
+        ninja-build \
+        python3
+
+RUN git clone --depth=1 https://github.com/apache/arrow.git $SRC/arrow
+COPY build.sh $SRC/
diff --git a/projects/arrow/build.sh b/projects/arrow/build.sh
new file mode 100755
index 0000000..dad1c0d
--- /dev/null
+++ b/projects/arrow/build.sh
@@ -0,0 +1,61 @@
+#!/bin/bash -eu
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+
+set -ex
+
+ARROW=${SRC}/arrow/cpp
+
+cd ${WORK}
+
+cmake ${ARROW} -GNinja \
+    -DCMAKE_BUILD_TYPE=Release \
+    -DARROW_DEPENDENCY_SOURCE=BUNDLED \
+    -DCMAKE_C_FLAGS="${CFLAGS}" \
+    -DCMAKE_CXX_FLAGS="${CXXFLAGS}" \
+    -DARROW_EXTRA_ERROR_CONTEXT=off \
+    -DARROW_JEMALLOC=off \
+    -DARROW_MIMALLOC=off \
+    -DARROW_FILESYSTEM=off \
+    -DARROW_PARQUET=off \
+    -DARROW_BUILD_SHARED=off \
+    -DARROW_BUILD_STATIC=on \
+    -DARROW_BUILD_TESTS=off \
+    -DARROW_BUILD_INTEGRATION=off \
+    -DARROW_BUILD_BENCHMARKS=off \
+    -DARROW_BUILD_EXAMPLES=off \
+    -DARROW_BUILD_UTILITIES=off \
+    -DARROW_TEST_LINKAGE=static \
+    -DPARQUET_BUILD_EXAMPLES=off \
+    -DPARQUET_BUILD_EXECUTABLES=off \
+    -DPARQUET_REQUIRE_ENCRYPTION=off \
+    -DARROW_WITH_BROTLI=off \
+    -DARROW_WITH_BZ2=off \
+    -DARROW_WITH_LZ4=off \
+    -DARROW_WITH_SNAPPY=off \
+    -DARROW_WITH_ZLIB=off \
+    -DARROW_WITH_ZSTD=off \
+    -DARROW_USE_GLOG=off \
+    -DARROW_USE_ASAN=off \
+    -DARROW_USE_UBSAN=off \
+    -DARROW_USE_TSAN=off \
+    -DARROW_FUZZING=on \
+
+cmake --build .
+
+cp -a release/* ${OUT}
+
+${ARROW}/build-support/fuzzing/generate_corpuses.sh ${OUT}
diff --git a/projects/arrow/project.yaml b/projects/arrow/project.yaml
new file mode 100644
index 0000000..27a1456
--- /dev/null
+++ b/projects/arrow/project.yaml
@@ -0,0 +1,10 @@
+homepage: "https://arrow.apache.org/"
+primary_contact: "antoine@python.org"
+auto_ccs:
+  - "bengilgit@gmail.com"
+  - "emkornfield@gmail.com"
+  - "fsaintjacques@gmail.com"
+  - "micahk@google.com"
+  - "neal@rstudio.com"
+  - "szucs.krisztian@gmail.com"
+  - "wesmckinn@gmail.com"
diff --git "a/projects/assimp/\043project.yaml\043" "b/projects/assimp/\043project.yaml\043"
new file mode 100644
index 0000000..612e75b
--- /dev/null
+++ "b/projects/assimp/\043project.yaml\043"
@@ -0,0 +1,10 @@
+homepage: "https://github.com/assimp/assimp"
+primary_contact: "kim.kulling@googlemail.com"
+auto_ccs:
+  - "kientzle@gmail.com"
+  - "martin@matuska.org"
+sanitizers:
+  - address
+  - memory:
+     experimental: True
+  - undefined
diff --git a/projects/binutils/build.sh b/projects/binutils/build.sh
index c5476fd..0c6fcc4 100755
--- a/projects/binutils/build.sh
+++ b/projects/binutils/build.sh
@@ -16,6 +16,10 @@
 ################################################################################
 
 # build project
+if [ "$SANITIZER" = undefined ]; then
+    export CFLAGS="$CFLAGS -fno-sanitize=unsigned-integer-overflow"
+    export CXXFLAGS="$CXXFLAGS -fno-sanitize=unsigned-integer-overflow"
+fi
 cd binutils-gdb
 ./configure --disable-gdb --enable-targets=all
 make MAKEINFO=true && true
diff --git a/projects/capstone/project.yaml b/projects/capstone/project.yaml
index 2c07cbb..ea91d82 100644
--- a/projects/capstone/project.yaml
+++ b/projects/capstone/project.yaml
@@ -1,8 +1,14 @@
 homepage: "https://www.capstone-engine.org"
 primary_contact: "capstone.engine@gmail.com"
-auto_ccs : "p.antoine@catenacyber.fr"
-
+auto_ccs : 
+  - "p.antoine@catenacyber.fr"
+fuzzing_engines:
+  - libfuzzer
+  - afl
+  - honggfuzz
+  - dataflow
 sanitizers:
-- address
-- memory
-- undefined
+  - address
+  - memory
+  - undefined
+  - dataflow
diff --git a/projects/clamav/build.sh b/projects/clamav/build.sh
index c383ab6..716bc8a 100755
--- a/projects/clamav/build.sh
+++ b/projects/clamav/build.sh
@@ -21,7 +21,7 @@
 rm -rf ${WORK}/build
 mkdir -p ${WORK}/build
 cd ${WORK}/build
-${SRC}/clamav-devel/configure --enable-fuzz=yes --with-libjson=no --with-pcre=no --enable-static=yes --enable-shared=no --disable-llvm --host=x86_64-unknown-linux-gnu
+ac_cv_c_mmap_anonymous=no ${SRC}/clamav-devel/configure --disable-mempool --enable-fuzz=yes --with-libjson=no --with-pcre=no --enable-static=yes --enable-shared=no --disable-llvm --host=x86_64-unknown-linux-gnu
 make clean
 make -j"$(nproc)"
 
diff --git a/projects/cmark/project.yaml b/projects/cmark/project.yaml
index 2f23d1e..06a0be9 100644
--- a/projects/cmark/project.yaml
+++ b/projects/cmark/project.yaml
@@ -2,10 +2,17 @@
 primary_contact: "jgm@berkeley.edu"
 auto_ccs:
   - "kivikakk@github.com"
+  - "wellnhofer@aevum.de"
+fuzzing_engines:
+  - libfuzzer
+  - afl
+  - honggfuzz
+  - dataflow
 sanitizers:
   - address
   - memory
   - undefined
+  - dataflow
 architectures:
   - x86_64
   - i386
diff --git a/projects/cryptofuzz/project.yaml b/projects/cryptofuzz/project.yaml
index f6e0361..9ac4f6a 100644
--- a/projects/cryptofuzz/project.yaml
+++ b/projects/cryptofuzz/project.yaml
@@ -25,6 +25,7 @@
     - "jjones@mozilla.com"
     - "sledru@mozilla.com"
     - "kjacobs@mozilla.com"
+    - "matthias.st.pierre@gmail.com"
 sanitizers:
  - address
  - undefined
diff --git a/projects/django/build.sh b/projects/django/build.sh
index 4c51804..25d7594 100755
--- a/projects/django/build.sh
+++ b/projects/django/build.sh
@@ -41,7 +41,7 @@
     ;;
 esac
 
-export CPYTHON_INSTALL_PATH=$OUT/cpython-install
+export CPYTHON_INSTALL_PATH=$SRC/cpython-install
 rm -rf $CPYTHON_INSTALL_PATH
 mkdir $CPYTHON_INSTALL_PATH
 
@@ -57,6 +57,8 @@
 make -j$(nproc)
 make install
 
+cp -R $CPYTHON_INSTALL_PATH $OUT/
+
 rm -rf $OUT/django-dependencies
 mkdir $OUT/django-dependencies
 $CPYTHON_INSTALL_PATH/bin/pip3 install asgiref pytz sqlparse -t $OUT/django-dependencies
diff --git a/projects/ecc-diff-fuzzer/build.sh b/projects/ecc-diff-fuzzer/build.sh
index 71e05ba..248941c 100755
--- a/projects/ecc-diff-fuzzer/build.sh
+++ b/projects/ecc-diff-fuzzer/build.sh
@@ -106,6 +106,7 @@
 cp fuzz_ec.dict $OUT/
 
 $CC $CFLAGS -I. -c fuzz_ec.c -o fuzz_ec.o
+$CC $CFLAGS -I. -c fail.c -o fail.o
 $CC $CFLAGS -I. -I../mbedtls/include -I../mbedtls/crypto/include -c modules/mbedtls.c -o mbedtls.o
 $CC $CFLAGS -I. -I../openssl/include -c modules/openssl.c -o openssl.o
 $CC $CFLAGS -DWITH_STDLIB -I. -I../libecc/src -c modules/libecc.c -o libecc.o
@@ -114,4 +115,4 @@
 $CC $CFLAGS -I. -I../ -c modules/nettle.c -o nettle.o
 $CXX $CXXFLAGS -std=c++11 -I. -I../ -I../botan/build/include -c modules/botan.cpp -o botan.o
 
-$CXX $CXXFLAGS fuzz_ec.o mbedtls.o libecc.o openssl.o gcrypt.o cryptopp.o nettle.o botan.o -o $OUT/fuzz_ec ../mbedtls/crypto/library/libmbedcrypto.a ../libecc/build/libec.a ../libecc/src/external_deps/rand.o ../openssl/libcrypto.a ../nettle/libhogweed.a ../nettle/libnettle.a ../nettle/gmp-6.1.2/.libs/libgmp.a ../gcrypt/src/.libs/libgcrypt.a ../cryptopp/libcryptopp.a ../botan/libbotan-2.a -lgpg-error $LIB_FUZZING_ENGINE
+$CXX $CXXFLAGS fuzz_ec.o fail.o mbedtls.o libecc.o openssl.o gcrypt.o cryptopp.o nettle.o botan.o -o $OUT/fuzz_ec ../mbedtls/crypto/library/libmbedcrypto.a ../libecc/build/libec.a ../libecc/src/external_deps/rand.o ../openssl/libcrypto.a ../nettle/libhogweed.a ../nettle/libnettle.a ../nettle/gmp-6.1.2/.libs/libgmp.a ../gcrypt/src/.libs/libgcrypt.a ../cryptopp/libcryptopp.a ../botan/libbotan-2.a -lgpg-error $LIB_FUZZING_ENGINE
diff --git a/projects/envoy/build.sh b/projects/envoy/build.sh
index 934211d..39ef1b2 100755
--- a/projects/envoy/build.sh
+++ b/projects/envoy/build.sh
@@ -119,6 +119,8 @@
   mkdir -p "${CORPUS_UNTAR_PATH}"
   tar -C "${CORPUS_UNTAR_PATH}" -xvf bazel-bin/"${t}"_corpus_tar.tar
   TARGET_BASE="$(expr "$t" : '.*/\(.*\)_fuzz_test')"
+  # There may be *.dict files in this folder that need to be moved into the OUT dir.
+  find "${CORPUS_UNTAR_PATH}" -type f -name *.dict -exec mv -n {} "${OUT}"/ \;
   zip "${OUT}/${TARGET_BASE}"_fuzz_test_seed_corpus.zip \
     "${CORPUS_UNTAR_PATH}"/*
 done
diff --git a/projects/freetype2/project.yaml b/projects/freetype2/project.yaml
index 3756cd3..e55e3d9 100644
--- a/projects/freetype2/project.yaml
+++ b/projects/freetype2/project.yaml
@@ -7,6 +7,7 @@
   - "ewaldhew@gmail.com"
   - "apodtele@gmail.com"
   - "prince.cherusker@gmail.com"
+  - "drott@chromium.org"
 vendor_ccs:
   - "jkew@mozilla.com"
   - "jmuizelaar@mozilla.com"
diff --git a/projects/gdal/Dockerfile b/projects/gdal/Dockerfile
index f04859e..649fe3f 100644
--- a/projects/gdal/Dockerfile
+++ b/projects/gdal/Dockerfile
@@ -28,7 +28,7 @@
 
 COPY NC4_put_propattr_leak_fix.patch libnetcdf_fix_undefined_left_shift_in_ncx_get_size_t.patch $SRC/
 
-RUN curl ftp://ftp.unidata.ucar.edu/pub/netcdf/netcdf-4.4.1.1.tar.gz > gdal/netcdf-4.4.1.1.tar.gz && \
+RUN curl https://www.gfd-dennou.org/arch/netcdf/unidata-mirror/netcdf-4.4.1.1.tar.gz > gdal/netcdf-4.4.1.1.tar.gz && \
     cd gdal && \
     tar xzf netcdf-4.4.1.1.tar.gz && \
     rm -f netcdf-4.4.1.1.tar.gz && \
diff --git a/projects/ghostscript/Dockerfile b/projects/ghostscript/Dockerfile
index 21dadb5..c4cdbd9 100644
--- a/projects/ghostscript/Dockerfile
+++ b/projects/ghostscript/Dockerfile
@@ -17,8 +17,9 @@
 FROM gcr.io/oss-fuzz-base/base-builder
 MAINTAINER skau@google.com
 
-RUN apt-get update && apt-get install -y autoconf zlibc liblcms2-dev libfreetype6-dev libpng-dev libtiff-dev
+RUN apt-get update && apt-get install -y autoconf zlibc libtool liblcms2-dev libpng-dev libtiff-dev
 RUN git clone --branch branch-2.2 --single-branch --depth 1 https://github.com/apple/cups.git cups
+RUN git clone --branch VER-2-10-1 --single-branch --depth 1 https://git.savannah.nongnu.org/git/freetype/freetype2.git freetype
 RUN git clone --single-branch --depth 1 git://git.ghostscript.com/ghostpdl.git ghostpdl
 
 RUN mkdir ghostpdl/fuzz
diff --git a/projects/ghostscript/build.sh b/projects/ghostscript/build.sh
index b6f9a82..11ca104 100755
--- a/projects/ghostscript/build.sh
+++ b/projects/ghostscript/build.sh
@@ -36,14 +36,16 @@
 rm -rf tiff || die
 rm -rf zlib || die
 
-export CUPSCONFIG="$WORK/cups-config"
+mv ../freetype freetype
+
+CUPSCONFIG="$WORK/cups-config"
 CUPS_CFLAGS=$($CUPSCONFIG --cflags)
 CUPS_LDFLAGS=$($CUPSCONFIG --ldflags)
 CUPS_LIBS=$($CUPSCONFIG --image --libs)
 export CXXFLAGS="$CXXFLAGS $CUPS_CFLAGS"
 
-./autogen.sh
-CPPFLAGS="${CPPFLAGS:-} $CUPS_CFLAGS" ./configure \
+CPPFLAGS="${CPPFLAGS:-} $CUPS_CFLAGS" ./autogen.sh \
+  CUPSCONFIG=$CUPSCONFIG \
   --enable-freetype --enable-fontconfig \
   --enable-cups --with-ijs --with-jbig2dec \
   --with-drivers=cups,ljet4,laserjet,pxlmono,pxlcolor,pcl3,uniprint
diff --git a/projects/gnutls/build.sh b/projects/gnutls/build.sh
index 83a23b5..a442827 100755
--- a/projects/gnutls/build.sh
+++ b/projects/gnutls/build.sh
@@ -77,7 +77,8 @@
 ./bootstrap
 ASAN_OPTIONS=detect_leaks=0 LIBS="-lunistring" CXXFLAGS="$CXXFLAGS -L$DEPS_PATH/lib" \
   ./configure --enable-fuzzer-target --disable-gcc-warnings --enable-static --disable-shared --disable-doc --disable-tests \
-    --disable-tools --disable-cxx --disable-maintainer-mode --disable-libdane --without-p11-kit $GNUTLS_CONFIGURE_FLAGS
+    --disable-tools --disable-cxx --disable-maintainer-mode --disable-libdane --without-p11-kit \
+    --disable-full-test-suite $GNUTLS_CONFIGURE_FLAGS
 
 # Do not use the syscall interface for randomness in oss-fuzz, it seems
 # to confuse memory sanitizer.
diff --git a/projects/gnutls/project.yaml b/projects/gnutls/project.yaml
index 34d15bb..87928f7 100644
--- a/projects/gnutls/project.yaml
+++ b/projects/gnutls/project.yaml
@@ -1,11 +1,11 @@
 homepage: "https://www.gnutls.org"
 primary_contact: "n.mavrogiannopoulos@gmail.com"
 auto_ccs:
-  - "alex.gaynor@gmail.com"
   - "daiki.ueno@gmail.com"
   - "rockdaboot@gmail.com"
   - "nisse@google.com"
   - "anderjuaristi.cictg@gmail.com"
+  - "dbaryshkov@gmail.com"
 
 sanitizers:
   - address
diff --git a/projects/go-attestation/project.yaml b/projects/go-attestation/project.yaml
index ac3f56b..3dd47d8 100644
--- a/projects/go-attestation/project.yaml
+++ b/projects/go-attestation/project.yaml
@@ -7,3 +7,4 @@
   - libfuzzer
 sanitizers:
   - address
+language: go
diff --git a/projects/go-json-iterator/project.yaml b/projects/go-json-iterator/project.yaml
index 2fc93ba..101f0d4 100644
--- a/projects/go-json-iterator/project.yaml
+++ b/projects/go-json-iterator/project.yaml
@@ -1,7 +1,7 @@
 homepage: "https://jsoniter.com"
 primary_contact: "taowen@gmail.com"
 auto_ccs : "p.antoine@catenacyber.fr"
-
+language: go
 fuzzing_engines:
 - libfuzzer
 sanitizers:
diff --git a/projects/golang-protobuf/project.yaml b/projects/golang-protobuf/project.yaml
index 7c49f77..71ee2df 100644
--- a/projects/golang-protobuf/project.yaml
+++ b/projects/golang-protobuf/project.yaml
@@ -6,3 +6,4 @@
  - address
 fuzzing_engines:
  - libfuzzer
+language: go
diff --git a/projects/golang/project.yaml b/projects/golang/project.yaml
index 2fe5b28..939f457 100644
--- a/projects/golang/project.yaml
+++ b/projects/golang/project.yaml
@@ -4,6 +4,7 @@
  - "golang-fuzz@googlegroups.com"
  - "mmoroz@chromium.org"
  - "josharian@gmail.com"
+language: go
 sanitizers:
  - address
 fuzzing_engines:
diff --git a/projects/gonids/project.yaml b/projects/gonids/project.yaml
index 19e4401..2b36088 100644
--- a/projects/gonids/project.yaml
+++ b/projects/gonids/project.yaml
@@ -1,7 +1,7 @@
 homepage: "https://github.com/google/gonids"
 primary_contact: "duane.security@gmail.com"
 auto_ccs : "p.antoine@catenacyber.fr"
-
+language: go
 fuzzing_engines:
 - libfuzzer
 sanitizers:
diff --git a/projects/grpc/build.sh b/projects/grpc/build.sh
index 0942b07..8583153 100755
--- a/projects/grpc/build.sh
+++ b/projects/grpc/build.sh
@@ -19,7 +19,6 @@
 set -o nounset
 
 readonly FUZZER_DICTIONARIES=(
-  test/core/end2end/fuzzers/api_fuzzer.dictionary
   test/core/end2end/fuzzers/hpack.dictionary
 )
 
@@ -33,7 +32,6 @@
   test/core/slice:percent_decode_fuzzer
   test/core/slice:percent_encode_fuzzer
   test/core/transport/chttp2:hpack_parser_fuzzer
-  test/core/end2end/fuzzers:api_fuzzer
   test/core/end2end/fuzzers:client_fuzzer
   test/core/end2end/fuzzers:server_fuzzer
   test/core/security:ssl_server_fuzzer
@@ -139,7 +137,6 @@
 zip "${OUT}/percent_decode_fuzzer_seed_corpus.zip" test/core/slice/percent_decode_corpus/*
 zip "${OUT}/percent_encode_fuzzer_seed_corpus.zip" test/core/slice/percent_encode_corpus/*
 zip "${OUT}/hpack_parser_fuzzer_seed_corpus.zip" test/core/transport/chttp2/hpack_parser_corpus/*
-zip "${OUT}/api_fuzzer_seed_corpus.zip" test/core/end2end/fuzzers/api_fuzzer_corpus/*
 zip "${OUT}/client_fuzzer_seed_corpus.zip" test/core/end2end/fuzzers/client_fuzzer_corpus/*
 zip "${OUT}/server_fuzzer_seed_corpus.zip" test/core/end2end/fuzzers/server_fuzzer_corpus/*
 zip "${OUT}/ssl_server_fuzzer_seed_corpus.zip" test/core/security/corpus/ssl_server_corpus/*
diff --git a/projects/grpc/project.yaml b/projects/grpc/project.yaml
index 34468a4..3807e7c 100644
--- a/projects/grpc/project.yaml
+++ b/projects/grpc/project.yaml
@@ -1,15 +1,12 @@
 homepage: "http://www.grpc.io/"
-primary_contact: "yangg@google.com"
+primary_contact: "nnoble@google.com"
 auto_ccs:
-  - "guantaol@google.com"
-  - "hcaseyal@google.com"
-  - "juanlishen@google.com"
-  - "mhaidry@google.com"
+  - "donnadionne@google.com"
+  - "veblush@google.com"
   - "roth@google.com"
-  - "nnoble@google.com"
-  - "sheenaqotj@google.com"
-  - "vpai@google.com"
+  - "karthikrs@google.com"
   - "yashkt@google.com"
+  - "jiangtao@google.com"
 fuzzing_engines:
   - libfuzzer
 coverage_extra_args: -ignore-filename-regex=.*\.cache.*
diff --git a/projects/harfbuzz/build.sh b/projects/harfbuzz/build.sh
index 74c4f7d..c3d0520 100755
--- a/projects/harfbuzz/build.sh
+++ b/projects/harfbuzz/build.sh
@@ -43,6 +43,7 @@
 	test/shaping/data/text-rendering-tests/fonts \
 	test/api/fonts \
 	test/fuzzing/fonts \
+	perf/fonts \
 	; do
 	cp $d/* all-fonts/
 done
diff --git a/projects/harfbuzz/project.yaml b/projects/harfbuzz/project.yaml
index 8b3dd2c..005daa5 100644
--- a/projects/harfbuzz/project.yaml
+++ b/projects/harfbuzz/project.yaml
@@ -12,14 +12,21 @@
   - "cchapman@adobe.com"
   - "ariza@typekit.com"
   - "qxliu@google.com"
+  - "ckitagawa@google.com"
 vendor_ccs:
   - "jmuizelaar@mozilla.com"
   - "lsalzman@mozilla.com"
   - "twsmith@mozilla.com"
+fuzzing_engines:
+ - libfuzzer
+ - afl
+ - honggfuzz
+ - dataflow
 sanitizers:
  - address
  - undefined
  - memory
+ - dataflow
 architectures:
   - x86_64
   - i386
diff --git a/projects/json-c/project.yaml b/projects/json-c/project.yaml
index af56702..435a803 100644
--- a/projects/json-c/project.yaml
+++ b/projects/json-c/project.yaml
@@ -2,6 +2,15 @@
 primary_contact: "erh+git@nimenees.com"
 auto_ccs:
   - "chriswwolfe@gmail.com"
+fuzzing_engines:
+  - libfuzzer
+  - afl
+  - honggfuzz
+  - dataflow
+sanitizers:
+  - address
+  - undefined
+  - dataflow
 architectures:
   - x86_64
   - i386
diff --git a/projects/knot-dns/Dockerfile b/projects/knot-dns/Dockerfile
index 22b1039..4977181 100644
--- a/projects/knot-dns/Dockerfile
+++ b/projects/knot-dns/Dockerfile
@@ -28,7 +28,9 @@
  make \
  pkg-config \
  texinfo \
- wget
+ wget \
+ libev4 \
+ libev-dev
 
 ENV GNULIB_TOOL $SRC/gnulib/gnulib-tool
 RUN git clone git://git.savannah.gnu.org/gnulib.git
diff --git a/projects/kubernetes/project.yaml b/projects/kubernetes/project.yaml
index 695a571..e834026 100644
--- a/projects/kubernetes/project.yaml
+++ b/projects/kubernetes/project.yaml
@@ -4,3 +4,4 @@
   - libfuzzer
 sanitizers:
   - address
+language: go
diff --git a/projects/libavif/Dockerfile b/projects/libavif/Dockerfile
new file mode 100644
index 0000000..a5e48a9
--- /dev/null
+++ b/projects/libavif/Dockerfile
@@ -0,0 +1,29 @@
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+
+FROM gcr.io/oss-fuzz-base/base-builder
+MAINTAINER joedrago@gmail.com
+
+ADD bionic.list /etc/apt/sources.list.d/bionic.list
+ADD nasm_apt.pin /etc/apt/preferences
+
+RUN apt-get update && \
+    apt-get install --no-install-recommends -y curl python3-pip python3-setuptools python3-wheel cmake nasm git && \
+    pip3 install meson ninja
+
+RUN git clone --depth 1 https://github.com/AOMediaCodec/libavif.git libavif
+WORKDIR libavif
+COPY build.sh avif_decode_fuzzer.cc avif_decode_seed_corpus.zip $SRC/
diff --git a/projects/libavif/avif_decode_fuzzer.cc b/projects/libavif/avif_decode_fuzzer.cc
new file mode 100644
index 0000000..5747367
--- /dev/null
+++ b/projects/libavif/avif_decode_fuzzer.cc
@@ -0,0 +1,65 @@
+// Copyright 2020 Google Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+//###############################################################################
+
+#include "avif/avif.h"
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size) {
+  avifROData raw;
+  raw.data = Data;
+  raw.size = Size;
+
+  avifDecoder *decoder = avifDecoderCreate();
+  // avifDecoderSetSource(decoder, AVIF_DECODER_SOURCE_PRIMARY_ITEM);
+  avifResult result = avifDecoderParse(decoder, &raw);
+  if (result == AVIF_RESULT_OK) {
+    // printf("AVIF container reports dimensions: %ux%u (@ %u bpc)\n",
+    //        decoder->containerWidth, decoder->containerHeight,
+    //        decoder->containerDepth);
+    for (int loop = 0; loop < 2; ++loop) {
+      // printf("Image decoded: %s\n", inputFilename);
+      // printf(" * %2.2f seconds, %d images\n", decoder->duration,
+      //        decoder->imageCount);
+      int frameIndex = 0;
+      while (avifDecoderNextImage(decoder) == AVIF_RESULT_OK) {
+        // printf("  * Decoded frame [%d] [pts %2.2f] [duration %2.2f] "
+        //        "[keyframe:%s nearest:%u]: %dx%d\n",
+        //        frameIndex, decoder->imageTiming.pts,
+        //        decoder->imageTiming.duration,
+        //        avifDecoderIsKeyframe(decoder, frameIndex) ? "true" : "false",
+        //        avifDecoderNearestKeyframe(decoder, frameIndex),
+        //        decoder->image->width, decoder->image->height);
+        ++frameIndex;
+      }
+
+      if (loop != 1) {
+        result = avifDecoderReset(decoder);
+        if (result == AVIF_RESULT_OK) {
+          // printf("Decoder reset! Decoding one more time.\n");
+        } else {
+          // printf("ERROR: Failed to reset decode: %s\n",
+          //        avifResultToString(result));
+          break;
+        }
+      }
+    }
+  } else {
+    // printf("ERROR: Failed to decode image: %s\n",
+    // avifResultToString(result));
+  }
+
+  avifDecoderDestroy(decoder);
+  return 0; // Non-zero return values are reserved for future use.
+}
diff --git a/projects/libavif/avif_decode_seed_corpus.zip b/projects/libavif/avif_decode_seed_corpus.zip
new file mode 100644
index 0000000..eb04c20
--- /dev/null
+++ b/projects/libavif/avif_decode_seed_corpus.zip
Binary files differ
diff --git a/projects/libavif/bionic.list b/projects/libavif/bionic.list
new file mode 100644
index 0000000..8621803
--- /dev/null
+++ b/projects/libavif/bionic.list
@@ -0,0 +1,2 @@
+# use nasm 2.13.02 from bionic
+deb http://archive.ubuntu.com/ubuntu/ bionic universe
diff --git a/projects/libavif/build.sh b/projects/libavif/build.sh
new file mode 100755
index 0000000..bf2bf4c
--- /dev/null
+++ b/projects/libavif/build.sh
@@ -0,0 +1,36 @@
+#!/bin/bash -eu
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+
+# build dav1d
+cd ext && bash dav1d.cmd && cd ..
+
+# build libavif
+mkdir build
+cd build
+cmake -G Ninja -DBUILD_SHARED_LIBS=0 -DAVIF_CODEC_DAV1D=1 -DAVIF_LOCAL_DAV1D=1 ..
+ninja
+
+# build fuzzer
+$CXX $CXXFLAGS -std=c++11 -I../include \
+    $SRC/avif_decode_fuzzer.cc -o $OUT/avif_decode_fuzzer \
+    $LIB_FUZZING_ENGINE libavif.a ../ext/dav1d/build/src/libdav1d.a
+
+# copy seed corpus
+cp $SRC/avif_decode_seed_corpus.zip $OUT/
+
+# show contents of $OUT/ for sanity checking
+find $OUT/
diff --git a/projects/libavif/nasm_apt.pin b/projects/libavif/nasm_apt.pin
new file mode 100644
index 0000000..6909902
--- /dev/null
+++ b/projects/libavif/nasm_apt.pin
@@ -0,0 +1,7 @@
+Package: *
+Pin: release n=bionic
+Pin-Priority: 1
+
+Package: nasm
+Pin: release n=bionic
+Pin-Priority: 555
diff --git a/projects/libavif/project.yaml b/projects/libavif/project.yaml
new file mode 100644
index 0000000..60816fa
--- /dev/null
+++ b/projects/libavif/project.yaml
@@ -0,0 +1,2 @@
+homepage: "https://github.com/AOMediaCodec/libavif"
+primary_contact: "joedrago@gmail.com"
diff --git a/projects/libexif/exif_loader_fuzzer.cc b/projects/libexif/exif_loader_fuzzer.cc
index 7c32c9c..98365b7 100644
--- a/projects/libexif/exif_loader_fuzzer.cc
+++ b/projects/libexif/exif_loader_fuzzer.cc
@@ -12,6 +12,33 @@
   exif_content_foreach_entry(content, content_func, NULL);
 }
 
+static void
+test_exif_data (ExifData *d) {
+  unsigned int i, c;
+  char v[1024], *p;
+  ExifMnoteData *md;
+
+  md = exif_data_get_mnote_data (d);
+  if (!md) {
+    return;
+  }
+
+  exif_mnote_data_ref (md);
+  exif_mnote_data_unref (md);
+
+  c = exif_mnote_data_count (md);
+  for (i = 0; i < c; i++) {
+    const char *name = exif_mnote_data_get_name (md, i);
+    if (!name) {
+      break;
+    }
+    exif_mnote_data_get_title (md, i);
+    exif_mnote_data_get_description (md, i);
+    exif_mnote_data_get_value (md, i, v, sizeof (v));
+  }
+}
+
+
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
   ExifLoader *loader = exif_loader_new();
   ExifData *exif_data;
@@ -25,6 +52,7 @@
     return 0;
   }
   exif_data_foreach_content(exif_data, data_func, NULL);
+  test_exif_data (exif_data);
   exif_loader_unref(loader);
   exif_data_unref(exif_data);
   return 0;
diff --git a/projects/libexif/project.yaml b/projects/libexif/project.yaml
index 4ccd24c..72f2d93 100644
--- a/projects/libexif/project.yaml
+++ b/projects/libexif/project.yaml
@@ -2,6 +2,7 @@
 primary_contact: "dan@coneharvesters.com"
 auto_ccs:
   - paul.l.kehrer@gmail.com
+  - marcus@jet.franken.de
 fuzzing_engines:
   - libfuzzer
   - afl
diff --git a/projects/libpcap/project.yaml b/projects/libpcap/project.yaml
index b0a25ee..c7af8df 100644
--- a/projects/libpcap/project.yaml
+++ b/projects/libpcap/project.yaml
@@ -1,11 +1,16 @@
 homepage: "https://www.tcpdump.org"
 primary_contact: "security@tcpdump.org"
 auto_ccs :
-- "p.antoine@catenacyber.fr"
-- "infra.station@gmail.com"
-- "guy@alum.mit.edu"
-
+  - "p.antoine@catenacyber.fr"
+  - "infra.station@gmail.com"
+  - "guy@alum.mit.edu"
+fuzzing_engines:
+  - libfuzzer
+  - afl
+  - honggfuzz
+  - dataflow
 sanitizers:
-- address
-- memory
-- undefined
+  - address
+  - memory
+  - undefined
+  - dataflow
diff --git a/projects/libplist/project.yaml b/projects/libplist/project.yaml
index 9fa0e1f..6bbfd76 100644
--- a/projects/libplist/project.yaml
+++ b/projects/libplist/project.yaml
@@ -2,7 +2,13 @@
 primary_contact: "nikias.bassen@gmail.com"
 auto_ccs:
   - "nikias@gmx.li"
+fuzzing_engines:
+  - libfuzzer
+  - afl
+  - honggfuzz
+  - dataflow
 sanitizers:
   - address
   - memory
   - undefined
+  - dataflow
diff --git a/projects/libwebp/project.yaml b/projects/libwebp/project.yaml
index 1a3c383..16c58b3 100644
--- a/projects/libwebp/project.yaml
+++ b/projects/libwebp/project.yaml
@@ -1,14 +1,20 @@
 homepage: "https://developers.google.com/speed/webp/"
 primary_contact: "jzern@google.com"
+fuzzing_engines:
+  - libfuzzer
+  - afl
+  - honggfuzz
+  - dataflow
 sanitizers:
-- address
-- undefined
-- memory
+  - address
+  - undefined
+  - memory
+  - dataflow
 auto_ccs:
-- pascal.massimino@gmail.com
-- vrabaud@google.com
-- yguyon@google.com
+  - pascal.massimino@gmail.com
+  - vrabaud@google.com
+  - yguyon@google.com
 vendor_ccs:
-- aosmond@mozilla.com
-- tnikkel@mozilla.com
-- twsmith@mozilla.com
+  - aosmond@mozilla.com
+  - tnikkel@mozilla.com
+  - twsmith@mozilla.com
diff --git a/projects/mtail/project.yaml b/projects/mtail/project.yaml
index 3878cca..af7d2d4 100644
--- a/projects/mtail/project.yaml
+++ b/projects/mtail/project.yaml
@@ -4,3 +4,4 @@
   - libfuzzer
 sanitizers:
   - address
+language: go
diff --git a/projects/mupdf/project.yaml b/projects/mupdf/project.yaml
index 7cc9ab3..a68aec1 100644
--- a/projects/mupdf/project.yaml
+++ b/projects/mupdf/project.yaml
@@ -1,8 +1,14 @@
 homepage: "https://www.mupdf.com"
 primary_contact: tor.andersson@artifex.com
+fuzzing_engines:
+  - libfuzzer
+  - afl
+  - honggfuzz
+  - dataflow
 sanitizers:
   - address
   - memory
+  - dataflow
 auto_ccs:
   - jonathan@titanous.com
   - sebastian.rasmussen@artifex.com
diff --git a/projects/myanmar-tools/Dockerfile b/projects/myanmar-tools/Dockerfile
index 42d1adc..90f94c6 100644
--- a/projects/myanmar-tools/Dockerfile
+++ b/projects/myanmar-tools/Dockerfile
@@ -17,7 +17,8 @@
 
 RUN apt-get update && apt-get -y install \
     build-essential \
-    cmake
+    cmake \
+    libunwind-dev
 RUN git clone https://github.com/google/myanmar-tools.git
 WORKDIR $SRC/myanmar-tools/clients/cpp/
 COPY build.sh $SRC/
diff --git a/projects/mysql-server/fix.diff b/projects/mysql-server/fix.diff
index 562d1b3..7e3e171 100644
--- a/projects/mysql-server/fix.diff
+++ b/projects/mysql-server/fix.diff
@@ -1,8 +1,8 @@
 diff --git a/CMakeLists.txt b/CMakeLists.txt
-index 17939f7c6f4..e05deb5911e 100644
+index ce1d1bb05b5..d1d0b04f202 100644
 --- a/CMakeLists.txt
 +++ b/CMakeLists.txt
-@@ -517,6 +517,7 @@ IF(WITH_JEMALLOC)
+@@ -528,6 +528,7 @@ IF(WITH_JEMALLOC)
    STRING_APPEND(CMAKE_CXX_FLAGS " -fno-builtin-realloc -fno-builtin-free")
  ENDIF()
  
@@ -10,8 +10,8 @@
  OPTION(ENABLED_PROFILING "Enable profiling" ON)
  OPTION(WITHOUT_SERVER OFF)
  IF(UNIX)
-@@ -1324,6 +1325,10 @@ IF(NOT WITHOUT_SERVER)
-   ADD_SUBDIRECTORY(sql)
+@@ -1348,6 +1349,10 @@ IF(NOT WITHOUT_SERVER AND WITH_UNIT_TESTS)
+   TARGET_LINK_LIBRARIES(server_unittest_library ${ICU_LIBRARIES})
  ENDIF()
  
 +IF (FUZZING)
@@ -36,10 +36,10 @@
  
  enum mysql_ssl_mode {
 diff --git a/include/violite.h b/include/violite.h
-index 9f9d6e62e2e..32bed2eeb30 100644
+index 76f2ed2017a..56900e11349 100644
 --- a/include/violite.h
 +++ b/include/violite.h
-@@ -106,12 +106,14 @@ enum enum_vio_type : int {
+@@ -108,12 +108,14 @@ enum enum_vio_type : int {
    */
    VIO_TYPE_PLUGIN = 7,
  
@@ -55,7 +55,7 @@
  };
  
  /**
-@@ -449,4 +451,20 @@ struct Vio {
+@@ -450,4 +452,20 @@ struct Vio {
  #define SSL_handle void *
  #endif
  
@@ -77,11 +77,11 @@
 +
  #endif /* vio_violite_h_ */
 diff --git a/libmysql/CMakeLists.txt b/libmysql/CMakeLists.txt
-index 52b9b61271f..e452fc202a5 100644
+index 0979a2b7b8c..0c896297a9f 100644
 --- a/libmysql/CMakeLists.txt
 +++ b/libmysql/CMakeLists.txt
-@@ -320,7 +320,7 @@ IF(UNIX)
-   ENDIF()
+@@ -324,7 +324,7 @@ IF(UNIX)
+   ADD_INSTALL_RPATH_FOR_OPENSSL(libmysql)
  
    GET_TARGET_PROPERTY(libmysql_link_flags libmysql LINK_FLAGS)
 -  IF(LINK_FLAG_NO_UNDEFINED)
@@ -127,10 +127,10 @@
      *failed = true;
      return 0;
 diff --git a/sql-common/client.cc b/sql-common/client.cc
-index f5e760cc37d..09037a9e236 100644
+index fd36e9950cf..c8cae8c3cbf 100644
 --- a/sql-common/client.cc
 +++ b/sql-common/client.cc
-@@ -5850,6 +5850,12 @@ static mysql_state_machine_status csm_begin_connect(mysql_async_connect *ctx) {
+@@ -5852,6 +5852,12 @@ static mysql_state_machine_status csm_begin_connect(mysql_async_connect *ctx) {
      }
    }
  #endif /* _WIN32 */
@@ -144,10 +144,10 @@
    if (!net->vio &&
        (!mysql->options.protocol ||
 diff --git a/sql/mysqld.cc b/sql/mysqld.cc
-index 178a572a5aa..03b9d6346f9 100644
+index c30315d4702..4413d95915d 100644
 --- a/sql/mysqld.cc
 +++ b/sql/mysqld.cc
-@@ -6353,7 +6353,9 @@ int mysqld_main(int argc, char **argv)
+@@ -6395,7 +6395,9 @@ int mysqld_main(int argc, char **argv)
      unireg_abort(MYSQLD_ABORT_EXIT);  // Will do exit
    }
  
@@ -157,7 +157,7 @@
  
    size_t guardize = 0;
  #ifndef _WIN32
-@@ -6837,8 +6839,10 @@ int mysqld_main(int argc, char **argv)
+@@ -6879,8 +6881,10 @@ int mysqld_main(int argc, char **argv)
      unireg_abort(MYSQLD_ABORT_EXIT);
  
  #ifndef _WIN32
@@ -168,7 +168,7 @@
  #endif
  
    /* set all persistent options */
-@@ -6980,8 +6984,9 @@ int mysqld_main(int argc, char **argv)
+@@ -7022,8 +7026,9 @@ int mysqld_main(int argc, char **argv)
    }
  
    start_handle_manager();
@@ -179,7 +179,7 @@
  
    LogEvent()
        .type(LOG_TYPE_ERROR)
-@@ -7028,6 +7033,10 @@ int mysqld_main(int argc, char **argv)
+@@ -7070,6 +7075,10 @@ int mysqld_main(int argc, char **argv)
  
    (void)RUN_HOOK(server_state, before_handle_connection, (NULL));
  
@@ -190,7 +190,7 @@
  #if defined(_WIN32)
    setup_conn_event_handler_threads();
  #else
-@@ -9850,6 +9859,9 @@ static int get_options(int *argc_ptr, char ***argv_ptr) {
+@@ -9895,6 +9904,9 @@ static int get_options(int *argc_ptr, char ***argv_ptr) {
  
    if (opt_short_log_format) opt_specialflag |= SPECIAL_SHORT_LOG_FORMAT;
  
@@ -226,10 +226,10 @@
          err = errs[id];
        }
 diff --git a/vio/CMakeLists.txt b/vio/CMakeLists.txt
-index 497ab98396c..a6cf2a647a6 100644
+index d44eebce63a..975bc878e17 100644
 --- a/vio/CMakeLists.txt
 +++ b/vio/CMakeLists.txt
-@@ -25,6 +25,7 @@ SET(VIO_SOURCES
+@@ -27,6 +27,7 @@ SET(VIO_SOURCES
    viosocket.cc
    viossl.cc
    viosslfactories.cc
@@ -238,10 +238,10 @@
  
  IF(WIN32)
 diff --git a/vio/vio.cc b/vio/vio.cc
-index 85cc77df645..03ed154dcee 100644
+index f2007bbc928..3b2ca196ec5 100644
 --- a/vio/vio.cc
 +++ b/vio/vio.cc
-@@ -300,6 +300,27 @@ static bool vio_init(Vio *vio, enum enum_vio_type type, my_socket sd,
+@@ -301,6 +301,27 @@ static bool vio_init(Vio *vio, enum enum_vio_type type, my_socket sd,
      return false;
    }
  #endif /* HAVE_OPENSSL */
@@ -269,7 +269,7 @@
    vio->viodelete = vio_delete;
    vio->vioerrno = vio_errno;
    vio->read = vio->read_buffer ? vio_read_buff : vio_read;
-@@ -575,7 +596,8 @@ static const vio_string vio_type_names[] = {{"", 0},
+@@ -576,7 +597,8 @@ static const vio_string vio_type_names[] = {{"", 0},
                                              {STRING_WITH_LEN("SSL/TLS")},
                                              {STRING_WITH_LEN("Shared Memory")},
                                              {STRING_WITH_LEN("Internal")},
@@ -281,10 +281,10 @@
                         int *len) {
 diff --git a/vio/viofuzz.cc b/vio/viofuzz.cc
 new file mode 100644
-index 00000000000..73f29662b96
+index 00000000000..83f22a5dbb9
 --- /dev/null
 +++ b/vio/viofuzz.cc
-@@ -0,0 +1,127 @@
+@@ -0,0 +1,124 @@
 +
 +#include "my_config.h"
 +
@@ -331,16 +331,13 @@
 +
 +bool vio_connect_fuzz(Vio *vio, struct sockaddr *addr, socklen_t len,
 +                        int timeout) {
-+  int ret;
 +  DBUG_ENTER("vio_socket_connect");
 +
 +  /* Only for socket-based transport types. */
 +  DBUG_ASSERT(vio->type == VIO_TYPE_SOCKET || vio->type == VIO_TYPE_TCPIP);
 +
 +  /* Initiate the connection. */
-+  ret=0;
-+
-+  DBUG_RETURN(MY_TEST(ret));
++  return 0;
 +}
 +
 +
diff --git a/projects/openssh/build.sh b/projects/openssh/build.sh
index 0b39dbd..6c7e9e2 100755
--- a/projects/openssh/build.sh
+++ b/projects/openssh/build.sh
@@ -27,24 +27,32 @@
 # Build fuzzers
 STATIC_CRYPTO="-Wl,-Bstatic -lcrypto -Wl,-Bdynamic"
 
+COMMON=ssh-sk-null.o
+
+$CXX $CXXFLAGS -std=c++11 -I. -L. -Lopenbsd-compat -g \
+	regress/misc/fuzz-harness/ssh-sk-null.cc -c -o ssh-sk-null.o
+
 $CXX $CXXFLAGS -std=c++11 -I. -L. -Lopenbsd-compat -g \
 	regress/misc/fuzz-harness/pubkey_fuzz.cc -o $OUT/pubkey_fuzz \
-	-lssh -lopenbsd-compat $STATIC_CRYPTO $LIB_FUZZING_ENGINE
+	-lssh -lopenbsd-compat $COMMON $STATIC_CRYPTO $LIB_FUZZING_ENGINE
 $CXX $CXXFLAGS -std=c++11 -I. -L. -Lopenbsd-compat -g \
 	regress/misc/fuzz-harness/privkey_fuzz.cc -o $OUT/privkey_fuzz \
-	-lssh -lopenbsd-compat $STATIC_CRYPTO $LIB_FUZZING_ENGINE
+	-lssh -lopenbsd-compat $COMMON $STATIC_CRYPTO $LIB_FUZZING_ENGINE
 $CXX $CXXFLAGS -std=c++11 -I. -L. -Lopenbsd-compat -g \
 	regress/misc/fuzz-harness/sig_fuzz.cc -o $OUT/sig_fuzz \
-	-lssh -lopenbsd-compat $STATIC_CRYPTO $LIB_FUZZING_ENGINE
+	-lssh -lopenbsd-compat $COMMON $STATIC_CRYPTO $LIB_FUZZING_ENGINE
 $CXX $CXXFLAGS -std=c++11 -I. -L. -Lopenbsd-compat -g \
 	regress/misc/fuzz-harness/authopt_fuzz.cc -o $OUT/authopt_fuzz \
-	auth-options.o -lssh -lopenbsd-compat $STATIC_CRYPTO $LIB_FUZZING_ENGINE
+	auth-options.o -lssh -lopenbsd-compat $COMMON $STATIC_CRYPTO \
+	$LIB_FUZZING_ENGINE
 $CXX $CXXFLAGS -std=c++11 -I. -L. -Lopenbsd-compat -g \
 	regress/misc/fuzz-harness/sshsig_fuzz.cc -o $OUT/sshsig_fuzz \
-	sshsig.o -lssh -lopenbsd-compat $STATIC_CRYPTO $LIB_FUZZING_ENGINE
+	sshsig.o -lssh -lopenbsd-compat $COMMON $STATIC_CRYPTO \
+	$LIB_FUZZING_ENGINE
 $CXX $CXXFLAGS -std=c++11 -I. -L. -Lopenbsd-compat -g \
 	regress/misc/fuzz-harness/sshsigopt_fuzz.cc -o $OUT/sshsigopt_fuzz \
-	sshsig.o -lssh -lopenbsd-compat $STATIC_CRYPTO $LIB_FUZZING_ENGINE
+	sshsig.o -lssh -lopenbsd-compat $COMMON $STATIC_CRYPTO \
+	$LIB_FUZZING_ENGINE
 
 # Prepare seed corpora
 CASES="$SRC/openssh-fuzz-cases"
diff --git a/projects/openthread/project.yaml b/projects/openthread/project.yaml
index 48ba90c..5709f4b 100644
--- a/projects/openthread/project.yaml
+++ b/projects/openthread/project.yaml
@@ -1,2 +1,11 @@
 homepage: "https://github.com/openthread/openthread"
 primary_contact: "jonhui@google.com"
+fuzzing_engines:
+  - libfuzzer
+  - afl
+  - honggfuzz
+  - dataflow
+sanitizers:
+  - address
+  - undefined
+  - dataflow
diff --git a/projects/openvswitch/build.sh b/projects/openvswitch/build.sh
index 6cd1b2b..350f018 100755
--- a/projects/openvswitch/build.sh
+++ b/projects/openvswitch/build.sh
@@ -15,7 +15,7 @@
 #
 ################################################################################
 
-./boot.sh && ./configure && make -j$(nproc) && make oss-fuzz-targets
+./boot.sh && HAVE_UNWIND=no ./configure --enable-ndebug && make -j$(nproc) && make oss-fuzz-targets
 
 cp $SRC/openvswitch/tests/oss-fuzz/config/*.options $OUT/
 cp $SRC/openvswitch/tests/oss-fuzz/config/*.dict $OUT/
diff --git a/projects/osquery/Dockerfile b/projects/osquery/Dockerfile
index 35ae8a3..58e73df 100755
--- a/projects/osquery/Dockerfile
+++ b/projects/osquery/Dockerfile
@@ -17,7 +17,7 @@
 FROM gcr.io/oss-fuzz-base/base-builder
 MAINTAINER theopolis@osquery.io
 RUN apt-get update
-RUN apt-get install -y --no-install-recommends python python3 bison flex make wget xz-utils
+RUN apt-get install -y --no-install-recommends python python3 bison flex make wget xz-utils libunwind-dev
 
 # Install specific git version.
 RUN export GIT_VER=2.21.0 \
@@ -36,11 +36,6 @@
  && tar xf cmake-3.14.6-Linux-x86_64.tar.gz -C /usr/local --strip 1 \
  && rm cmake-3.14.6-Linux-x86_64.tar.gz
 
-# Install build toolchain
-RUN wget https://github.com/osquery/osquery-toolchain/releases/download/1.0.0/osquery-toolchain-1.0.0.tar.xz \
- && tar xf osquery-toolchain-1.0.0.tar.xz -C /usr/local \
- && rm osquery-toolchain-1.0.0.tar.xz
-
 RUN git clone --depth 1 https://github.com/osquery/osquery osquery
 
 WORKDIR osquery
diff --git a/projects/osquery/build.sh b/projects/osquery/build.sh
index 3815a6d..b5b31d6 100755
--- a/projects/osquery/build.sh
+++ b/projects/osquery/build.sh
@@ -24,19 +24,20 @@
   mv "${SRC}/${PROJECT}-dev" "${SRC}/${PROJECT}" )
 
 pushd "${SRC}/${PROJECT}"
-mkdir build && pushd build
 
-export CXXFLAGS="${CXXFLAGS} -Wl,-lunwind -Wl,-lc++abi"
-export CFLAGS="${CFLAGS} -Wl,-lunwind"
+# Prefer shared libs
+sed -i 's/CMAKE_LINK_SEARCH_START_STATIC ON/CMAKE_LINK_SEARCH_START_STATIC OFF/g' cmake/flags.cmake
+sed -i 's/CMAKE_LINK_SEARCH_END_STATIC ON/CMAKE_LINK_SEARCH_END_STATIC OFF/g' cmake/flags.cmake
+
+mkdir build && pushd build
 
 cmake \
   -DOSQUERY_VERSION:string=0.0.0-fuzz \
   -DOSQUERY_ENABLE_ADDRESS_SANITIZER:BOOL=ON \
   -DOSQUERY_ENABLE_FUZZER_SANITIZERS:BOOL=ON \
-  -DOSQUERY_TOOLCHAIN_SYSROOT=/usr/local/osquery-toolchain \
   ..
 cmake \
-  -DCMAKE_EXE_LINKER_FLAGS=${LIB_FUZZING_ENGINE} \
+  "-DCMAKE_EXE_LINKER_FLAGS=${LIB_FUZZING_ENGINE} -Wl,-rpath,'\$ORIGIN/lib'" \
   ..
 
 # Build harnesses
@@ -48,6 +49,10 @@
 rm -rf "${SRC}/${PROJECT}/libraries/cmake/source/libudev/src/test"
 rm -rf libs/src/patched-source/libudev/src/test
 
+# Move libunwind to output path
+mkdir -p "${OUT}/lib"
+cp /usr/lib/x86_64-linux-gnu/libunwind.so.8 "${OUT}/lib"
+
 # Move harnesses to output path
 cp osquery/main/harnesses/osqueryfuzz-config "${OUT}/osqueryfuzz-config"
 cp osquery/main/harnesses/osqueryfuzz-sqlquery "${OUT}/osqueryfuzz-sqlquery"
@@ -57,4 +62,4 @@
 tools/harnesses/osqueryfuzz_config_corpus.sh "${OUT}/osqueryfuzz-config_seed_corpus.zip"
 tools/harnesses/osqueryfuzz_config_dict.sh "${OUT}/osqueryfuzz-config.dict"
 tools/harnesses/osqueryfuzz_sqlquery_corpus.sh "${OUT}/osqueryfuzz-sqlquery_seed_corpus.zip"
-cp tools/harnesses/osqueryfuzz_sqlquery.dict "${OUT}/osqueryfuzz-sqlquery.dict"
\ No newline at end of file
+cp tools/harnesses/osqueryfuzz_sqlquery.dict "${OUT}/osqueryfuzz-sqlquery.dict"
diff --git a/projects/ots/Dockerfile b/projects/ots/Dockerfile
index c94408a..d86342c 100644
--- a/projects/ots/Dockerfile
+++ b/projects/ots/Dockerfile
@@ -17,7 +17,7 @@
 FROM gcr.io/oss-fuzz-base/base-builder
 MAINTAINER mmoroz@chromium.org
 RUN apt-get update && apt-get install -y python3-pip pkg-config zlib1g-dev && \
-    pip3 install meson ninja
+    pip3 install meson==0.52.0 ninja
 RUN git clone --depth 1 https://github.com/khaledhosny/ots.git
 WORKDIR ots
 RUN git submodule update --init --recursive
diff --git a/projects/pcre2/project.yaml b/projects/pcre2/project.yaml
index 18fcf64..fbe4a4c 100644
--- a/projects/pcre2/project.yaml
+++ b/projects/pcre2/project.yaml
@@ -1,9 +1,15 @@
 homepage: "http://www.pcre.org/"
 primary_contact: "philip.hazel@gmail.com"
+fuzzing_engines:
+  - libfuzzer
+  - afl
+  - honggfuzz
+  - dataflow
 sanitizers:
   - address
   - memory
   - undefined
+  - dataflow
 architectures:
   - x86_64
   - i386
diff --git a/projects/pillow/Dockerfile b/projects/pillow/Dockerfile
new file mode 100644
index 0000000..aa67d85
--- /dev/null
+++ b/projects/pillow/Dockerfile
@@ -0,0 +1,24 @@
+# Copyright 2019 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+
+FROM gcr.io/oss-fuzz-base/base-builder
+MAINTAINER guidovranken@gmail.com
+RUN apt-get update && apt-get install -y make autoconf automake build-essential libbz2-dev libc6-dev libffi-dev libfreetype6-dev libgdbm-dev libjpeg-turbo8-dev liblcms2-dev libncursesw5-dev libreadline-dev libsqlite3-dev libssl-dev libtiff5-dev libtool libwebp-dev make python python-dev python-setuptools tk-dev wget zlib1g-dev libwebp-dev
+RUN wget https://github.com/python/cpython/archive/v3.8.1.tar.gz
+RUN git clone --depth 1 https://github.com/python-pillow/Pillow.git pillow
+RUN git clone --depth 1 https://github.com/guidovranken/oss-fuzz-fuzzers
+WORKDIR pillow
+COPY build.sh $SRC/
diff --git a/projects/pillow/build.sh b/projects/pillow/build.sh
new file mode 100755
index 0000000..1bee1d7
--- /dev/null
+++ b/projects/pillow/build.sh
@@ -0,0 +1,112 @@
+#!/bin/bash -eu
+# Copyright 2019 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+
+# Because Pillow's "./setup.py build_ext --inplace" does not work with custom CC and CFLAGS,
+# it is necessary to build in the following manner:
+#
+# Build CPython without instrumentation/sanitization
+# Build Pillow in a virtualenv based on uninstrumented and unsanitized CPython. Log the build steps to build.sh
+# Build CPython with instrumentation/sanitization
+# Rewrite build.sh to compile Pillow based on CPython with instrumentation/sanitization
+#
+# Why not build Pillow directly with a virtualenv based on instrumented CPython?
+# Because the virtualenv will inherit CC and CFLAGS of the instrumented CPython, and that will fail.
+
+cd $SRC/
+tar zxf v3.8.1.tar.gz
+cd cpython-3.8.1/
+
+# Ignore memory leaks from python scripts invoked in the build
+export ASAN_OPTIONS="detect_leaks=0"
+export MSAN_OPTIONS="halt_on_error=0:exitcode=0:report_umrs=0"
+
+# Remove -pthread from CFLAGS, this trips up ./configure
+# which thinks pthreads are available without any CLI flags
+CFLAGS=${CFLAGS//"-pthread"/}
+
+FLAGS=()
+case $SANITIZER in
+  address)
+    FLAGS+=("--with-address-sanitizer")
+    ;;
+  memory)
+    FLAGS+=("--with-memory-sanitizer")
+    # installing ensurepip takes a while with MSAN instrumentation, so
+    # we disable it here
+    FLAGS+=("--without-ensurepip")
+    # -msan-keep-going is needed to allow MSAN's halt_on_error to function
+    FLAGS+=("CFLAGS=-mllvm -msan-keep-going=1")
+    ;;
+  undefined)
+    FLAGS+=("--with-undefined-behavior-sanitizer")
+    ;;
+esac
+
+export CPYTHON_INSTALL_PATH=$OUT/cpython-install
+rm -rf $CPYTHON_INSTALL_PATH
+mkdir $CPYTHON_INSTALL_PATH
+
+export CPYTHON_UNINSTRUMENTED_INSTALL_PATH=$OUT/cpython-install
+rm -rf $CPYTHON_UNINSTRUMENTED_INSTALL_PATH
+mkdir $CPYTHON_UNINSTRUMENTED_INSTALL_PATH
+
+cd $SRC/
+tar zxf v3.8.1.tar.gz
+
+# Compile uninstrumented CPython
+cp -R $SRC/cpython-3.8.1/ $SRC/cpython-3.8.1-uninstrumented
+cd $SRC/cpython-3.8.1-uninstrumented
+CFLAGS="" CXXFLAGS="" ./configure --prefix=$CPYTHON_UNINSTRUMENTED_INSTALL_PATH
+CFLAGS="" CXXFLAGS="" make -j$(nproc)
+CFLAGS="" CXXFLAGS="" make install
+
+# Compile instrumented CPython
+cd $SRC/cpython-3.8.1/
+cp $SRC/oss-fuzz-fuzzers/pillow/python_coverage.h Python/
+
+# Patch the interpreter to record code coverage
+sed -i '1 s/^.*$/#include "python_coverage.h"/g' Python/ceval.c
+sed -i 's/case TARGET\(.*\): {/\0\nfuzzer_record_code_coverage(f->f_code, f->f_lasti);/g' Python/ceval.c
+
+./configure "${FLAGS[@]}" --prefix=$CPYTHON_INSTALL_PATH
+make -j$(nproc)
+make install
+
+# Compile Pillow fuzzers
+cd $SRC/oss-fuzz-fuzzers/pillow
+rm $CPYTHON_INSTALL_PATH/lib/python3.8/lib-dynload/_tkinter*.so
+make
+cp $SRC/oss-fuzz-fuzzers/pillow/fuzzer-loadimg $OUT/
+cp $SRC/oss-fuzz-fuzzers/pillow/loadimg.py $OUT/
+
+# Create venv for Pillow compilation
+$CPYTHON_UNINSTRUMENTED_INSTALL_PATH/bin/python3 -m venv $SRC/venv
+source $SRC/venv/bin/activate
+
+# Compile Pillow
+cd $SRC/pillow
+CFLAGS="" CXXFLAGS="" ./setup.py build_ext --inplace >build.sh
+grep "^\(gcc\|x86_64-linux-gnu-gcc\|clang\) " build.sh | sed 's/^\(gcc\|x86_64-linux-gnu-gcc\|clang\) /$CC $CFLAGS /g' | sed 's/-DPILLOW_VERSION="\([^"]\+\)"/-DPILLOW_VERSION="\\"\1\\""/g' >build2.sh
+bash build2.sh
+cp -R $SRC/pillow $OUT/
+cp /usr/lib/x86_64-linux-gnu/libjpeg.so.8 $OUT/
+cp /usr/lib/x86_64-linux-gnu/libtiff.so.5 $OUT/
+cp /usr/lib/x86_64-linux-gnu/libjbig.so.0 $OUT/
+cp /usr/lib/x86_64-linux-gnu/libwebp.so.5 $OUT/
+cp /usr/lib/x86_64-linux-gnu/libwebpmux.so.1 $OUT/
+cp /usr/lib/x86_64-linux-gnu/libwebpdemux.so.1 $OUT/
+cp $SRC/oss-fuzz-fuzzers/pillow/corpus.zip $OUT/fuzzer-loadimg_seed_corpus.zip
diff --git a/projects/pillow/project.yaml b/projects/pillow/project.yaml
new file mode 100644
index 0000000..f6041e9
--- /dev/null
+++ b/projects/pillow/project.yaml
@@ -0,0 +1,11 @@
+homepage: "https://python-pillow.org/"
+primary_contact: "guidovranken@gmail.com"
+auto_ccs:
+ - "security@python-pillow.org"
+sanitizers:
+ - address
+ - undefined
+architectures:
+ - x86_64
+fuzzing_engines:
+  - libfuzzer
diff --git a/projects/proj4/Dockerfile b/projects/proj4/Dockerfile
index 12235d2..6e9ca2c 100644
--- a/projects/proj4/Dockerfile
+++ b/projects/proj4/Dockerfile
@@ -16,7 +16,16 @@
 
 FROM gcr.io/oss-fuzz-base/base-builder
 MAINTAINER even.rouault@spatialys.com
-RUN apt-get update && apt-get install -y make autoconf automake libtool g++ sqlite3 libsqlite3-dev pkg-config
-RUN git clone --depth 1 https://github.com/OSGeo/proj.4 proj.4
-WORKDIR proj.4
-COPY build.sh $SRC/
+RUN dpkg --add-architecture i386 && \
+    apt-get update && \
+    apt-get install -y make autoconf automake libtool g++ sqlite3 pkg-config
+
+RUN git clone --depth 1 https://github.com/OSGeo/proj proj
+
+RUN git clone --depth 1 https://github.com/curl/curl.git proj/curl
+
+RUN git clone --depth 1 https://gitlab.com/libtiff/libtiff.git proj/libtiff
+
+WORKDIR proj
+
+RUN cp test/fuzzers/build.sh $SRC/
diff --git a/projects/proj4/project.yaml b/projects/proj4/project.yaml
index 91e991a..71bdcf8 100644
--- a/projects/proj4/project.yaml
+++ b/projects/proj4/project.yaml
@@ -1,6 +1,9 @@
-homepage: "http://proj4.org/"
+homepage: "https://proj.org/"
 primary_contact: "even.rouault@gmail.com"
 auto_ccs:
   - "hobu.inc@gmail.com"
   - "kristianevers@gmail.com"
   - "knudsen.thomas@gmail.com"
+architectures:
+  - x86_64
+  - i386
diff --git a/projects/proxygen/Dockerfile b/projects/proxygen/Dockerfile
index feb12fc..14c3da5 100644
--- a/projects/proxygen/Dockerfile
+++ b/projects/proxygen/Dockerfile
@@ -147,7 +147,8 @@
     zlib1g-dev \
     binutils-dev \
     libsodium-dev \
-    libdouble-conversion-dev
+    libdouble-conversion-dev \
+    libunwind8-dev
 
 # Install patchelf so we can fix path to libunwind
 RUN apt-get install patchelf
diff --git a/projects/qt/Dockerfile b/projects/qt/Dockerfile
new file mode 100644
index 0000000..7d325ca
--- /dev/null
+++ b/projects/qt/Dockerfile
@@ -0,0 +1,26 @@
+# Copyright 2019 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+
+FROM gcr.io/oss-fuzz-base/base-builder
+MAINTAINER rlohningqt@gmail.com
+RUN apt-get update && apt-get install -y build-essential python libxcb-xinerama0-dev && apt-get install --no-install-recommends afl-doc
+RUN git clone --branch 5.15 --depth 1 git://code.qt.io/qt/qt5.git qt
+WORKDIR qt
+RUN perl init-repository --module-subset=qtbase
+
+WORKDIR $SRC
+RUN git clone --depth 1 git://code.qt.io/qt/qtqa.git
+COPY build.sh $SRC/
diff --git a/projects/qt/build.sh b/projects/qt/build.sh
new file mode 100755
index 0000000..619cdcd
--- /dev/null
+++ b/projects/qt/build.sh
@@ -0,0 +1,65 @@
+#!/bin/bash -eu
+# Copyright 2019 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+
+# add the flags to Qt build, gratefully borrowed from karchive
+cd $SRC/qt/qtbase/mkspecs
+sed -i -e "s/QMAKE_CXXFLAGS    += -stdlib=libc++/QMAKE_CXXFLAGS    += -stdlib=libc++  $CXXFLAGS\nQMAKE_CFLAGS += $CFLAGS/g" linux-clang-libc++/qmake.conf
+sed -i -e "s/QMAKE_LFLAGS      += -stdlib=libc++/QMAKE_LFLAGS      += -stdlib=libc++ -lpthread $CXXFLAGS/g" linux-clang-libc++/qmake.conf
+
+# set optimization to O1
+sed -i -e "s/QMAKE_CFLAGS_OPTIMIZE      = -O2/QMAKE_CFLAGS_OPTIMIZE      = -O1/g" common/gcc-base.conf
+sed -i -e "s/QMAKE_CFLAGS_OPTIMIZE_FULL = -O3/QMAKE_CFLAGS_OPTIMIZE_FULL = -O1/g" common/gcc-base.conf
+
+# build project
+cd $WORK
+MAKEFLAGS=-j$(nproc) $SRC/qt/configure -platform linux-clang-libc++ -static -opensource -confirm-license -no-opengl -nomake tests -nomake examples -prefix $OUT
+make -j$(nproc)
+make install
+
+# prepare corpus files
+zip -j $WORK/html $SRC/qtqa/fuzzing/testcases/html/*
+zip -j $WORK/markdown $SRC/qtqa/fuzzing/testcases/markdown/*
+zip -j $WORK/xml $SRC/qtqa/fuzzing/testcases/xml/* /usr/share/afl/testcases/others/xml/*
+
+# build fuzzers
+
+build_fuzzer() {
+    local module=$1
+    local proFilePath=$2
+    local format=${3-""}
+    local dictionary=${4-""}
+    local proFileName=${proFilePath##*/}
+    local exeName=${proFileName%%.*}
+    mkdir build_fuzzer
+    cd build_fuzzer
+    $OUT/bin/qmake $SRC/qt/$module/tests/libfuzzer/$proFilePath
+    make -j$(nproc)
+    mv $exeName $OUT
+    if [ -n "$format" ]; then
+        cp $WORK/$format.zip $OUT/"$exeName"_seed_corpus.zip
+    fi
+    if [ -n "$dictionary" ]; then
+        cp $dictionary $OUT/$exeName.dict
+    fi
+    cd ..
+    rm -r build_fuzzer
+}
+
+build_fuzzer "qtbase" "corelib/serialization/qxmlstream/qxmlstreamreader/readnext/readnext.pro" "xml" "/usr/share/afl/testcases/_extras/xml.dict"
+# build_fuzzer "qtbase" "gui/text/qtextdocument/setHtml/setHtml.pro" "html" "/usr/share/afl/testcases/_extras/html_tags.dict"
+build_fuzzer "qtbase" "gui/text/qtextdocument/setMarkdown/setMarkdown.pro" "markdown"
+build_fuzzer "qtbase" "gui/text/qtextlayout/beginLayout/beginLayout.pro"
diff --git a/projects/qt/project.yaml b/projects/qt/project.yaml
index 48b81a1..960b3bf 100644
--- a/projects/qt/project.yaml
+++ b/projects/qt/project.yaml
@@ -1,2 +1,4 @@
 homepage: "http://qt-project.org"
 primary_contact: "rlohningqt@gmail.com"
+sanitizers:
+ - address
diff --git a/projects/rapidjson/project.yaml b/projects/rapidjson/project.yaml
index d86329c..573235a 100644
--- a/projects/rapidjson/project.yaml
+++ b/projects/rapidjson/project.yaml
@@ -3,8 +3,8 @@
 sanitizers:
  - address
  - undefined
- - memory
-experimental: True
+ - memory:
+    experimental: True
 architectures:
  - x86_64
  - i386
diff --git a/projects/syzkaller/project.yaml b/projects/syzkaller/project.yaml
index b208801..ee6cd84 100644
--- a/projects/syzkaller/project.yaml
+++ b/projects/syzkaller/project.yaml
@@ -4,6 +4,7 @@
   - "andreyknvl@google.com"
   - "mmoroz@chromium.org"
   - "syzkaller@googlegroups.com"
+language: go
 fuzzing_engines:
   - libfuzzer
 sanitizers:
diff --git a/projects/tesseract-ocr/Dockerfile b/projects/tesseract-ocr/Dockerfile
index e22232d..daf6070 100644
--- a/projects/tesseract-ocr/Dockerfile
+++ b/projects/tesseract-ocr/Dockerfile
@@ -20,5 +20,4 @@
 RUN git clone --depth 1 https://github.com/danbloomberg/leptonica
 RUN git clone --depth 1 https://github.com/tesseract-ocr/tesseract
 RUN git clone --depth 1 https://github.com/tesseract-ocr/tessdata
-RUN git clone https://github.com/guidovranken/tesseract-ocr-fuzzers
 COPY build.sh $SRC/
diff --git a/projects/tesseract-ocr/build.sh b/projects/tesseract-ocr/build.sh
index e64539f..1847668 100755
--- a/projects/tesseract-ocr/build.sh
+++ b/projects/tesseract-ocr/build.sh
@@ -27,17 +27,12 @@
 CXXFLAGS="$CXXFLAGS -D_GLIBCXX_DEBUG" ./configure --disable-graphics --disable-shared
 make -j$(nproc)
 
-cd $SRC/tesseract-ocr-fuzzers
-
 cp -R $SRC/tessdata $OUT
 
 $CXX $CXXFLAGS \
-    -I $SRC/tesseract/src/api \
-    -I $SRC/tesseract/src/ccstruct \
-    -I $SRC/tesseract/src/ccmain \
-    -I $SRC/tesseract/src/ccutil \
-     $SRC/tesseract-ocr-fuzzers/fuzzer-api.cpp -o $OUT/fuzzer-api \
-     $SRC/tesseract/src/api/.libs/libtesseract.a \
+    -I $SRC/tesseract/include \
+     $SRC/tesseract/unittest/fuzzers/fuzzer-api.cpp -o $OUT/fuzzer-api \
+     $SRC/tesseract/.libs/libtesseract.a \
      /usr/local/lib/liblept.a \
      /usr/lib/x86_64-linux-gnu/libtiff.a \
      /usr/lib/x86_64-linux-gnu/libpng.a \
@@ -49,13 +44,10 @@
 
 $CXX $CXXFLAGS \
     -DTESSERACT_FUZZER_WIDTH=512 \
-    -DTESSERACT_FUZZER_HEIGHT=512 \
-    -I $SRC/tesseract/src/api \
-    -I $SRC/tesseract/src/ccstruct \
-    -I $SRC/tesseract/src/ccmain \
-    -I $SRC/tesseract/src/ccutil \
-     $SRC/tesseract-ocr-fuzzers/fuzzer-api.cpp -o $OUT/fuzzer-api-512x512 \
-     $SRC/tesseract/src/api/.libs/libtesseract.a \
+    -DTESSERACT_FUZZER_HEIGHT=256 \
+    -I $SRC/tesseract/include \
+     $SRC/tesseract/unittest/fuzzers/fuzzer-api.cpp -o $OUT/fuzzer-api-512x256 \
+     $SRC/tesseract/.libs/libtesseract.a \
      /usr/local/lib/liblept.a \
      /usr/lib/x86_64-linux-gnu/libtiff.a \
      /usr/lib/x86_64-linux-gnu/libpng.a \
diff --git a/projects/tesseract-ocr/project.yaml b/projects/tesseract-ocr/project.yaml
index 70b8e3a..6e6983b 100644
--- a/projects/tesseract-ocr/project.yaml
+++ b/projects/tesseract-ocr/project.yaml
@@ -1,2 +1,4 @@
 homepage: "https://github.com/tesseract-ocr/tesseract"
 primary_contact: "stjoweil@googlemail.com"
+fuzzing_engines:
+  - libfuzzer
diff --git a/projects/tor/build.sh b/projects/tor/build.sh
index 9f0b3db..1c5154a 100644
--- a/projects/tor/build.sh
+++ b/projects/tor/build.sh
@@ -22,7 +22,7 @@
 # Build libevent with proper instrumentation.
 cd ${SRC}/libevent
 sh autogen.sh
-./configure --prefix=${TOR_DEPS}
+./configure --prefix=${TOR_DEPS} --disable-openssl
 make -j$(nproc) clean
 make -j$(nproc) all
 make install
@@ -79,6 +79,8 @@
 
     corpus_dir="${SRC}/tor-fuzz-corpora/${output#oss-fuzz-}"
     if [ -d "${corpus_dir}" ]; then
-      zip -j ${OUT}/${output}_seed_corpus.zip ${corpus_dir}/*
+      set +x
+      zip -q -j ${OUT}/${output}_seed_corpus.zip ${corpus_dir}/*
+      set -x
     fi
 done
diff --git a/projects/tpm2-tss/build.sh b/projects/tpm2-tss/build.sh
index 242787c..9e77883 100644
--- a/projects/tpm2-tss/build.sh
+++ b/projects/tpm2-tss/build.sh
@@ -31,7 +31,8 @@
   --enable-tcti-device=no \
   --enable-tcti-mssim=no \
   --disable-doxygen-doc \
-  --disable-shared
+  --disable-shared \
+  --disable-fapi
 
 sed -i 's/@DX_RULES@/# @DX_RULES@/g' Makefile
 make -j $(nproc) fuzz-targets
diff --git a/projects/unbound/Dockerfile b/projects/unbound/Dockerfile
index fe4a015..0bd7743 100644
--- a/projects/unbound/Dockerfile
+++ b/projects/unbound/Dockerfile
@@ -20,4 +20,8 @@
 RUN git clone --depth=1 https://github.com/NLnetLabs/unbound unbound
 WORKDIR unbound
 COPY parse_packet_fuzzer.c .
+COPY fuzz_1.c .
+COPY fuzz_2.c .
+COPY fuzz_3.c .
+COPY fuzz_4.c .
 COPY build.sh $SRC/
diff --git a/projects/unbound/build.sh b/projects/unbound/build.sh
index 509b694..68cda38 100755
--- a/projects/unbound/build.sh
+++ b/projects/unbound/build.sh
@@ -22,6 +22,10 @@
 make -j6 all
 
 $CC $CFLAGS -I. -DSRCDIR=. -c -o parse_packet_fuzzer.o parse_packet_fuzzer.c
+$CC $CFLAGS -I. -DSRCDIR=. -c -o fuzz_1.o fuzz_1.c
+$CC $CFLAGS -I. -DSRCDIR=. -c -o fuzz_2.o fuzz_2.c
+$CC $CFLAGS -I. -DSRCDIR=. -c -o fuzz_3.o fuzz_3.c
+$CC $CFLAGS -I. -DSRCDIR=. -c -o fuzz_4.o fuzz_4.c
 
 # get the LIBOBJS with the replaced functions needed for linking.
 LIBOBJS=`make --eval 'echolibobjs: ; @echo "$(LIBOBJS)"' echolibobjs`
@@ -46,4 +50,88 @@
   libworker.o context.o \
   $LIBOBJS
 
+$CXX $CXXFLAGS -std=c++11 \
+  $LIB_FUZZING_ENGINE \
+  -lssl -lcrypto -pthread \
+  -o $OUT/fuzz_1_fuzzer \
+  fuzz_1.o \
+  dns.o infra.o rrset.o dname.o \
+  msgencode.o as112.o msgparse.o msgreply.o packed_rrset.o iterator.o \
+  iter_delegpt.o iter_donotq.o iter_fwd.o iter_hints.o iter_priv.o \
+  iter_resptype.o iter_scrub.o iter_utils.o localzone.o mesh.o modstack.o view.o \
+  outbound_list.o alloc.o config_file.o configlexer.o configparser.o \
+  fptr_wlist.o edns.o locks.o log.o mini_event.o module.o net_help.o random.o \
+  rbtree.o regional.o rtt.o dnstree.o lookup3.o lruhash.o slabhash.o \
+  tcp_conn_limit.o timehist.o tube.o winsock_event.o autotrust.o val_anchor.o \
+  validator.o val_kcache.o val_kentry.o val_neg.o val_nsec3.o val_nsec.o \
+  val_secalgo.o val_sigcrypt.o val_utils.o dns64.o cachedb.o redis.o authzone.o \
+  respip.o netevent.o listen_dnsport.o outside_network.o ub_event.o keyraw.o \
+  sbuffer.o wire2str.o parse.o parseutil.o rrdef.o str2wire.o libunbound.o \
+  libworker.o context.o \
+  $LIBOBJS
+
+$CXX $CXXFLAGS -std=c++11 \
+  $LIB_FUZZING_ENGINE \
+  -lssl -lcrypto -pthread \
+  -o $OUT/fuzz_2_fuzzer \
+  fuzz_2.o \
+  dns.o infra.o rrset.o dname.o \
+  msgencode.o as112.o msgparse.o msgreply.o packed_rrset.o iterator.o \
+  iter_delegpt.o iter_donotq.o iter_fwd.o iter_hints.o iter_priv.o \
+  iter_resptype.o iter_scrub.o iter_utils.o localzone.o mesh.o modstack.o view.o \
+  outbound_list.o alloc.o config_file.o configlexer.o configparser.o \
+  fptr_wlist.o edns.o locks.o log.o mini_event.o module.o net_help.o random.o \
+  rbtree.o regional.o rtt.o dnstree.o lookup3.o lruhash.o slabhash.o \
+  tcp_conn_limit.o timehist.o tube.o winsock_event.o autotrust.o val_anchor.o \
+  validator.o val_kcache.o val_kentry.o val_neg.o val_nsec3.o val_nsec.o \
+  val_secalgo.o val_sigcrypt.o val_utils.o dns64.o cachedb.o redis.o authzone.o \
+  respip.o netevent.o listen_dnsport.o outside_network.o ub_event.o keyraw.o \
+  sbuffer.o wire2str.o parse.o parseutil.o rrdef.o str2wire.o libunbound.o \
+  libworker.o context.o \
+  $LIBOBJS
+
+$CXX $CXXFLAGS -std=c++11 \
+  $LIB_FUZZING_ENGINE \
+  -lssl -lcrypto -pthread \
+  -o $OUT/fuzz_3_fuzzer \
+  fuzz_3.o \
+  dns.o infra.o rrset.o dname.o \
+  msgencode.o as112.o msgparse.o msgreply.o packed_rrset.o iterator.o \
+  iter_delegpt.o iter_donotq.o iter_fwd.o iter_hints.o iter_priv.o \
+  iter_resptype.o iter_scrub.o iter_utils.o localzone.o mesh.o modstack.o view.o \
+  outbound_list.o alloc.o config_file.o configlexer.o configparser.o \
+  fptr_wlist.o edns.o locks.o log.o mini_event.o module.o net_help.o random.o \
+  rbtree.o regional.o rtt.o dnstree.o lookup3.o lruhash.o slabhash.o \
+  tcp_conn_limit.o timehist.o tube.o winsock_event.o autotrust.o val_anchor.o \
+  validator.o val_kcache.o val_kentry.o val_neg.o val_nsec3.o val_nsec.o \
+  val_secalgo.o val_sigcrypt.o val_utils.o dns64.o cachedb.o redis.o authzone.o \
+  respip.o netevent.o listen_dnsport.o outside_network.o ub_event.o keyraw.o \
+  sbuffer.o wire2str.o parse.o parseutil.o rrdef.o str2wire.o libunbound.o \
+  libworker.o context.o \
+  $LIBOBJS
+
+$CXX $CXXFLAGS -std=c++11 \
+  $LIB_FUZZING_ENGINE \
+  -lssl -lcrypto -pthread \
+  -o $OUT/fuzz_4_fuzzer \
+  fuzz_4.o \
+  dns.o infra.o rrset.o dname.o \
+  msgencode.o as112.o msgparse.o msgreply.o packed_rrset.o iterator.o \
+  iter_delegpt.o iter_donotq.o iter_fwd.o iter_hints.o iter_priv.o \
+  iter_resptype.o iter_scrub.o iter_utils.o localzone.o mesh.o modstack.o view.o \
+  outbound_list.o alloc.o config_file.o configlexer.o configparser.o \
+  fptr_wlist.o edns.o locks.o log.o mini_event.o module.o net_help.o random.o \
+  rbtree.o regional.o rtt.o dnstree.o lookup3.o lruhash.o slabhash.o \
+  tcp_conn_limit.o timehist.o tube.o winsock_event.o autotrust.o val_anchor.o \
+  validator.o val_kcache.o val_kentry.o val_neg.o val_nsec3.o val_nsec.o \
+  val_secalgo.o val_sigcrypt.o val_utils.o dns64.o cachedb.o redis.o authzone.o \
+  respip.o netevent.o listen_dnsport.o outside_network.o ub_event.o keyraw.o \
+  sbuffer.o wire2str.o parse.o parseutil.o rrdef.o str2wire.o libunbound.o \
+  libworker.o context.o \
+  $LIBOBJS
+
 wget --directory-prefix $OUT https://github.com/jsha/unbound/raw/fuzzing-corpora/testdata/parse_packet_fuzzer_seed_corpus.zip
+wget --directory-prefix $OUT https://github.com/luisx41/fuzzing-corpus/raw/master/projects/unbound/fuzz_1_fuzzer_seed_corpus.zip
+wget --directory-prefix $OUT https://github.com/luisx41/fuzzing-corpus/raw/master/projects/unbound/fuzz_2_fuzzer_seed_corpus.zip
+wget --directory-prefix $OUT https://github.com/luisx41/fuzzing-corpus/raw/master/projects/unbound/fuzz_3_fuzzer_seed_corpus.zip
+wget --directory-prefix $OUT https://github.com/luisx41/fuzzing-corpus/raw/master/projects/unbound/fuzz_4_fuzzer_seed_corpus.zip
diff --git a/projects/unbound/fuzz_1.c b/projects/unbound/fuzz_1.c
new file mode 100644
index 0000000..7fbdcc5
--- /dev/null
+++ b/projects/unbound/fuzz_1.c
@@ -0,0 +1,59 @@
+/*
+ * unbound-fuzzme.c - parse a packet provided on stdin (for fuzzing).
+ *
+ */
+#include "config.h"
+#include "util/regional.h"
+#include "util/module.h"
+#include "util/config_file.h"
+#include "iterator/iterator.h"
+#include "iterator/iter_priv.h"
+#include "iterator/iter_scrub.h"
+#include "util/log.h"
+#include "sldns/sbuffer.h"
+
+int LLVMFuzzerTestOneInput(const uint8_t *buf, size_t len) {
+  log_init("/tmp/foo", 0, NULL);
+  char *bin = buf;
+  struct regional* reg;
+
+  struct sldns_buffer *pkt = sldns_buffer_new(1);
+  sldns_buffer_new_frm_data(pkt, bin, len);
+
+  reg = regional_create();
+
+  struct msg_parse msg;
+  struct edns_data edns;
+  memset(&msg, 0, sizeof(struct msg_parse));
+  memset(&edns, 0, sizeof(edns));
+  if (parse_packet(pkt, &msg, reg) != LDNS_RCODE_NOERROR) {    
+    goto out;
+  }
+  if (parse_extract_edns(&msg, &edns, reg) != LDNS_RCODE_NOERROR) {
+    goto out;
+  }
+
+
+  struct query_info qinfo_out;
+  memset(&qinfo_out, 0, sizeof(struct query_info));
+  qinfo_out.qname = (unsigned char *) "\03nic\02de";
+  uint8_t *peter = (unsigned char *) "\02de";   // zonename  
+  struct module_env env;
+  memset(&env, 0, sizeof(struct module_env));
+  struct config_file cfg;
+  memset(&cfg, 0, sizeof(struct config_file));
+  cfg.harden_glue = 1;    // crashes now, want to remove that later
+  env.cfg = &cfg;
+
+  struct iter_env ie;
+  memset(&ie, 0, sizeof(struct iter_env));
+
+  struct iter_priv priv;
+  memset(&priv, 0, sizeof(struct iter_priv));
+  ie.priv = &priv;
+  scrub_message(pkt, &msg, &qinfo_out, peter, reg, &env, &ie);   
+out:
+  regional_destroy(reg);
+  sldns_buffer_free(pkt);
+  return 0;
+}
diff --git a/projects/unbound/fuzz_2.c b/projects/unbound/fuzz_2.c
new file mode 100644
index 0000000..baf0fee
--- /dev/null
+++ b/projects/unbound/fuzz_2.c
@@ -0,0 +1,51 @@
+#include "config.h"
+#include "sldns/sbuffer.h"
+#include "sldns/wire2str.h"
+#include "util/data/dname.h"
+
+int LLVMFuzzerTestOneInput(const uint8_t *bin, size_t nr) {
+  char *bout;
+  uint8_t *a;
+  char *b;
+  size_t bl;
+  size_t al;
+  size_t len;
+
+  if (nr > 2) {
+    len = bin[0] & 0xff;  // want random sized output buf
+    bout = malloc(len);
+    nr--;
+    bin++;
+    b = bout; bl = len; sldns_wire2str_edns_subnet_print(&b, &bl, bin, nr);
+    b = bout; bl = len; sldns_wire2str_edns_n3u_print(&b, &bl, bin, nr);
+    b = bout; bl = len; sldns_wire2str_edns_dhu_print(&b, &bl, bin, nr);
+    b = bout; bl = len; sldns_wire2str_edns_dau_print(&b, &bl, bin, nr);
+    b = bout; bl = len; sldns_wire2str_edns_nsid_print(&b, &bl, bin, nr);
+    b = bout; bl = len; sldns_wire2str_edns_ul_print(&b, &bl, bin, nr);
+    b = bout; bl = len; sldns_wire2str_edns_llq_print(&b, &bl, bin, nr); 
+  
+    a = bin; al = nr; b = bout; bl = len; sldns_wire2str_tsigerror_scan(&a, &al, &b, &bl);
+    a = bin; al = nr; b = bout; bl = len; sldns_wire2str_long_str_scan(&a, &al, &b, &bl);
+    a = bin; al = nr; b = bout; bl = len; sldns_wire2str_tag_scan(&a, &al, &b, &bl);
+    a = bin; al = nr; b = bout; bl = len; sldns_wire2str_eui64_scan(&a, &al, &b, &bl);
+    a = bin; al = nr; b = bout; bl = len; sldns_wire2str_int16_data_scan(&a, &al, &b, &bl);
+    a = bin; al = nr; b = bout; bl = len; sldns_wire2str_hip_scan(&a, &al, &b, &bl);
+    a = bin; al = nr; b = bout; bl = len; sldns_wire2str_wks_scan(&a, &al, &b, &bl);
+    a = bin; al = nr; b = bout; bl = len; sldns_wire2str_loc_scan(&a, &al, &b, &bl);
+    a = bin; al = nr; b = bout; bl = len; sldns_wire2str_cert_alg_scan(&a, &al, &b, &bl);
+    a = bin; al = nr; b = bout; bl = len; sldns_wire2str_nsec3_salt_scan(&a, &al, &b, &bl);
+    a = bin; al = nr; b = bout; bl = len; sldns_wire2str_nsec_scan(&a, &al, &b, &bl);
+    a = bin; al = nr; b = bout; bl = len; sldns_wire2str_b32_ext_scan(&a, &al, &b, &bl);
+    a = bin; al = nr; b = bout; bl = len; sldns_wire2str_apl_scan(&a, &al, &b, &bl);
+    a = bin; al = nr; b = bout; bl = len; sldns_wire2str_str_scan(&a, &al, &b, &bl);
+    a = bin; al = nr; b = bout; bl = len; sldns_wire2str_rdata_unknown_scan(&a, &al, &b, &bl);
+    a = bin; al = nr; b = bout; bl = len; sldns_wire2str_header_scan(&a, &al, &b, &bl);
+    a = bin; al = nr; b = bout; bl = len; sldns_wire2str_pkt_scan(&a, &al, &b, &bl);
+
+    bin--;
+    free(bout);
+  }
+
+out:
+  return 0;
+}
diff --git a/projects/unbound/fuzz_3.c b/projects/unbound/fuzz_3.c
new file mode 100644
index 0000000..237a543
--- /dev/null
+++ b/projects/unbound/fuzz_3.c
@@ -0,0 +1,67 @@
+#include "config.h"
+#include "sldns/sbuffer.h"
+#include "sldns/wire2str.h"
+#include "sldns/str2wire.h"
+#include "util/data/dname.h"
+
+#define SZ 1000
+#define SZ2 100
+
+
+int LLVMFuzzerTestOneInput(const uint8_t *buf, size_t nr) {
+  char *bin = malloc(nr);
+  uint8_t *bout;
+  size_t len, len2;
+
+  memset(bin, 0, nr);
+  memcpy(bin, buf, nr);
+
+  if (nr > 2) {
+    bin[nr-1] = 0x00;  // null terminate
+    len = bin[0] & 0xff;  // want random sized output buf
+    bout = malloc(len);
+    nr--;
+    bin++;
+  
+    // call the targets  
+    len2 = len; sldns_str2wire_dname_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_int8_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_int16_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_int32_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_a_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_aaaa_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_str_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_apl_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_b64_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_b32_ext_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_hex_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_nsec_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_type_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_class_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_cert_alg_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_alg_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_tsigerror_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_time_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_tsigtime_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_period_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_loc_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_wks_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_nsap_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_atma_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_ipseckey_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_nsec3_salt_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_ilnp64_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_eui48_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_eui64_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_tag_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_long_str_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_hip_buf(bin, bout, &len2);
+    len2 = len; sldns_str2wire_int16_data_buf(bin, bout, &len2);
+
+    bin--;
+    free(bout);
+  }
+
+out:
+  free(bin);
+}
diff --git a/projects/unbound/fuzz_4.c b/projects/unbound/fuzz_4.c
new file mode 100644
index 0000000..14fea49
--- /dev/null
+++ b/projects/unbound/fuzz_4.c
@@ -0,0 +1,81 @@
+/*
+ * unbound-fuzzme.c - parse a packet provided on stdin (for fuzzing).
+ *
+ */
+#include "config.h"
+#include "util/regional.h"
+#include "util/module.h"
+#include "util/config_file.h"
+#include "iterator/iterator.h"
+#include "iterator/iter_priv.h"
+#include "iterator/iter_scrub.h"
+#include "util/log.h"
+#include "util/netevent.h"
+#include "util/alloc.h"
+#include "sldns/sbuffer.h"
+#include "services/cache/rrset.h"
+
+int LLVMFuzzerTestOneInput(const uint8_t *buf, size_t nr) {
+  log_init("/tmp/foo", 0, NULL);
+  struct regional* reg;
+
+  struct sldns_buffer *pkt = sldns_buffer_new(1);
+  sldns_buffer_new_frm_data(pkt, buf, nr);
+
+  reg = regional_create();
+
+  struct msg_parse msg;
+  struct edns_data edns;
+  memset(&msg, 0, sizeof(struct msg_parse));
+  memset(&edns, 0, sizeof(edns));
+
+  struct query_info qinfo_out;
+  memset(&qinfo_out, 0, sizeof(struct query_info));
+  qinfo_out.qname = (unsigned char *) "\03nic\02de";
+  uint8_t *peter = (unsigned char *) "\02de";   // zonename  
+  struct module_env env;
+  memset(&env, 0, sizeof(struct module_env));
+  struct config_file cfg;
+  memset(&cfg, 0, sizeof(struct config_file));
+
+  cfg.harden_glue = 0;    // crashes now, want to remove that later
+  env.cfg = &cfg;
+  cfg.rrset_cache_slabs = HASH_DEFAULT_SLABS;
+  cfg.rrset_cache_size = HASH_DEFAULT_MAXMEM;
+
+  struct comm_base* base = comm_base_create(0);
+  comm_base_timept(base, &env.now, &env.now_tv);
+
+  env.alloc = malloc(sizeof(struct alloc_cache));
+  alloc_init(env.alloc, NULL, 0);
+
+  env.rrset_cache = rrset_cache_create(env.cfg, env.alloc);
+  
+
+  struct iter_env ie;
+  memset(&ie, 0, sizeof(struct iter_env));
+
+  struct iter_priv priv;
+  memset(&priv, 0, sizeof(struct iter_priv));
+  ie.priv = &priv;
+
+
+  if (parse_packet(pkt, &msg, reg) != LDNS_RCODE_NOERROR) {    
+    goto out;
+  }
+  if (parse_extract_edns(&msg, &edns, reg) != LDNS_RCODE_NOERROR) {
+    goto out;
+  }
+
+
+  scrub_message(pkt, &msg, &qinfo_out, peter, reg, &env, &ie);   
+
+out:
+  rrset_cache_delete(env.rrset_cache);
+  alloc_clear(env.alloc);
+  free(env.alloc);
+  comm_base_delete(base);
+  regional_destroy(reg);
+  sldns_buffer_free(pkt);
+  return 0;
+}
diff --git a/projects/vorbis/Dockerfile b/projects/vorbis/Dockerfile
index 2294813..464555d 100644
--- a/projects/vorbis/Dockerfile
+++ b/projects/vorbis/Dockerfile
@@ -20,6 +20,7 @@
 RUN git clone https://git.xiph.org/ogg.git
 RUN git clone https://git.xiph.org/vorbis.git
 RUN svn export https://github.com/mozillasecurity/fuzzdata.git/trunk/samples/ogg decode_corpus
+RUN svn export --force https://github.com/mozillasecurity/fuzzdata.git/trunk/samples/vorbis decode_corpus
 RUN wget --cut-dirs 3 --recursive --level=1 -A ".ogg" https://people.xiph.org/~xiphmont/test-vectors/vorbis/
 WORKDIR vorbis
 COPY build.sh $SRC/
diff --git a/projects/wabt/Dockerfile b/projects/wabt/Dockerfile
index 97a96d3..86caa31 100644
--- a/projects/wabt/Dockerfile
+++ b/projects/wabt/Dockerfile
@@ -15,7 +15,7 @@
 ################################################################################
 
 FROM gcr.io/oss-fuzz-base/base-builder
-MAINTAINER wasm-waterfall@grotations.appspotmail.com
+MAINTAINER binji@chromium.org
 RUN apt-get update && apt-get install -y cmake libtool make python
 RUN git clone --recursive https://github.com/WebAssembly/wabt
 WORKDIR wabt
diff --git a/projects/wabt/project.yaml b/projects/wabt/project.yaml
index f99caa8..933f9f2 100644
--- a/projects/wabt/project.yaml
+++ b/projects/wabt/project.yaml
@@ -1,5 +1,5 @@
 homepage: "https://github.com/WebAssembly/wabt"
-primary_contact: "dschuff@chromium.org"
+primary_contact: "binji@chromium.org"
 sanitizers:
  - address
  - memory
diff --git a/projects/wasmtime/Dockerfile b/projects/wasmtime/Dockerfile
new file mode 100644
index 0000000..a70765b
--- /dev/null
+++ b/projects/wasmtime/Dockerfile
@@ -0,0 +1,31 @@
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+
+FROM gcr.io/oss-fuzz-base/base-builder
+MAINTAINER foote@fastly.com
+RUN apt-get update && apt-get install -y make autoconf automake libtool curl cmake python llvm-dev libclang-dev clang
+
+ENV CARGO_HOME=/rust RUSTUP_HOME=/rust/rustup PATH=$PATH:/rust/bin
+RUN curl https://sh.rustup.rs | sh -s -- -y --default-toolchain=nightly
+RUN cargo install cargo-fuzz
+
+RUN git clone --depth 1 https://github.com/bytecodealliance/wasmtime wasmtime
+WORKDIR wasmtime
+RUN git submodule update --init --recursive
+
+RUN git clone --depth 1 https://github.com/bytecodealliance/wasmtime-libfuzzer-corpus wasmtime-libfuzzer-corpus
+
+COPY build.sh $SRC/
diff --git a/projects/wasmtime/build.sh b/projects/wasmtime/build.sh
new file mode 100755
index 0000000..82fbaac
--- /dev/null
+++ b/projects/wasmtime/build.sh
@@ -0,0 +1,40 @@
+#!/bin/bash -eu
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+
+# Note: This project creates Rust fuzz targets exclusively
+
+export CUSTOM_LIBFUZZER_PATH="$LIB_FUZZING_ENGINE_DEPRECATED"
+export CUSTOM_LIBFUZZER_STD_CXX=c++
+PROJECT_DIR=$SRC/wasmtime
+
+# Because Rust does not support sanitizers via CFLAGS/CXXFLAGS, the environment
+# variables are overridden with values from base-images/base-clang only
+
+export CFLAGS="-O1 -fno-omit-frame-pointer -gline-tables-only -DFUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION"
+export CXXFLAGS_EXTRA="-stdlib=libc++"
+export CXXFLAGS="$CFLAGS $CXXFLAGS_EXTRA"
+export RUSTFLAGS="-Cdebuginfo=1 -Cforce-frame-pointers"
+
+cd $PROJECT_DIR/fuzz && cargo fuzz build -O --debug-assertions
+
+FUZZ_TARGET_OUTPUT_DIR=$PROJECT_DIR/target/x86_64-unknown-linux-gnu/release
+for f in $SRC/wasmtime/fuzz/fuzz_targets/*.rs
+do
+    FUZZ_TARGET_NAME=$(basename ${f%.*})
+    cp $FUZZ_TARGET_OUTPUT_DIR/$FUZZ_TARGET_NAME $OUT/
+    zip -jr $OUT/${FUZZ_TARGET_NAME}_seed_corpus.zip $PROJECT_DIR/wasmtime-libfuzzer-corpus/$FUZZ_TARGET_NAME/
+done
diff --git a/projects/wasmtime/project.yaml b/projects/wasmtime/project.yaml
new file mode 100644
index 0000000..508523f
--- /dev/null
+++ b/projects/wasmtime/project.yaml
@@ -0,0 +1,11 @@
+homepage: "https://wasmtime.dev/"
+primary_contact: "jonathan.foote@gmail.com"
+auto_ccs:
+  - "security@bytecodealliance.org"
+  - "fitzgen@gmail.com"
+  - "alex@alexcrichton.com"
+sanitizers:
+  - address
+fuzzing_engines:
+  - libfuzzer
+language: rust
diff --git a/projects/wavpack/project.yaml b/projects/wavpack/project.yaml
index ecc8140..5e334e8 100644
--- a/projects/wavpack/project.yaml
+++ b/projects/wavpack/project.yaml
@@ -5,3 +5,5 @@
 - thuanpv.nus@gmail.com
 sanitizers: 
 - address 
+- memory
+- undefined
diff --git a/projects/wget/build.sh b/projects/wget/build.sh
index 3d9c277..840977d 100755
--- a/projects/wget/build.sh
+++ b/projects/wget/build.sh
@@ -67,7 +67,8 @@
 CFLAGS="$GNUTLS_CFLAGS" \
 ./configure --with-nettle-mini --enable-gcc-warnings --enable-static --disable-shared --with-included-libtasn1 \
     --with-included-unistring --without-p11-kit --disable-doc --disable-tests --disable-tools --disable-cxx \
-    --disable-maintainer-mode --disable-libdane --disable-gcc-warnings --prefix=$WGET_DEPS_PATH $GNUTLS_CONFIGURE_FLAGS
+    --disable-maintainer-mode --disable-libdane --disable-gcc-warnings --disable-full-test-suite \
+    --prefix=$WGET_DEPS_PATH $GNUTLS_CONFIGURE_FLAGS
 make -j$(nproc)
 make install
 
diff --git a/projects/wget2/build.sh b/projects/wget2/build.sh
index 4646bcb..3ad4e04 100755
--- a/projects/wget2/build.sh
+++ b/projects/wget2/build.sh
@@ -67,7 +67,8 @@
 CFLAGS="$GNUTLS_CFLAGS" \
 ./configure --with-nettle-mini --enable-gcc-warnings --enable-static --disable-shared --with-included-libtasn1 \
     --with-included-unistring --without-p11-kit --disable-doc --disable-tests --disable-tools --disable-cxx \
-    --disable-maintainer-mode --disable-libdane --disable-gcc-warnings --prefix=$WGET2_DEPS_PATH $GNUTLS_CONFIGURE_FLAGS
+    --disable-maintainer-mode --disable-libdane --disable-gcc-warnings --disable-full-test-suite \
+    --prefix=$WGET2_DEPS_PATH $GNUTLS_CONFIGURE_FLAGS
 make -j$(nproc)
 make install
 
diff --git a/projects/wolfssl/project.yaml b/projects/wolfssl/project.yaml
index f0ac195..cc75e08 100644
--- a/projects/wolfssl/project.yaml
+++ b/projects/wolfssl/project.yaml
@@ -1,12 +1,18 @@
 homepage: "https://www.wolfssl.com/"
 primary_contact: "jacob@wolfssl.com"
 auto_ccs:
- - "david@wolfssl.com"
- - "kaleb@wolfssl.com"
- - "levi@wolfssl.com"
- - "testing@wolfssl.com"
+  - "david@wolfssl.com"
+  - "kaleb@wolfssl.com"
+  - "levi@wolfssl.com"
+  - "testing@wolfssl.com"
+fuzzing_engines:
+  - libfuzzer
+  - afl
+  - honggfuzz
+  - dataflow
 sanitizers:
- - address
- - memory:
-    experimental: True
- - undefined
+  - address
+  - memory:
+     experimental: True
+  - undefined
+  - dataflow
diff --git a/projects/wuffs/project.yaml b/projects/wuffs/project.yaml
index 92516bf..5b09d29 100644
--- a/projects/wuffs/project.yaml
+++ b/projects/wuffs/project.yaml
@@ -1,5 +1,14 @@
 homepage: "https://github.com/google/wuffs"
 primary_contact: "nigeltao@golang.org"
+fuzzing_engines:
+  - libfuzzer
+  - afl
+  - honggfuzz
+  - dataflow
+sanitizers:
+  - address
+  - undefined
+  - dataflow
 architectures:
   - x86_64
   - i386
diff --git a/projects/xerces-c/xmlProtoConverter.cpp b/projects/xerces-c/xmlProtoConverter.cpp
index b2caf67..f8a47de 100644
--- a/projects/xerces-c/xmlProtoConverter.cpp
+++ b/projects/xerces-c/xmlProtoConverter.cpp
@@ -56,6 +56,9 @@
 
 void ProtoConverter::visit(KeyValue const& _x)
 {
+	if (!KeyValue::XmlNamespace_IsValid(_x.type()))
+		return;
+
 	switch (_x.type())
 	{
 	case KeyValue::ATTRIBUTES:
@@ -127,6 +130,9 @@
 
 void ProtoConverter::visit(ElementDecl const& _x)
 {
+	if (!ElementDecl::ContentSpec_IsValid(_x.spec()))
+		return;
+
 	m_output << "<!ELEMENT " << _x.name() << " ";
 	switch (_x.spec())
 	{
@@ -167,6 +173,9 @@
 
 void ProtoConverter::visit(AttValue const& _x)
 {
+	if (!isValid(_x))
+		return;
+
 	m_output << "\"";
 	string prefix;
 	switch (_x.type())
@@ -196,6 +205,9 @@
 
 void ProtoConverter::visit(DefaultDecl const& _x)
 {
+	if (!isValid(_x))
+		return;
+
 	switch (_x.type())
 	{
 	case DefaultDecl::REQUIRED:
@@ -219,6 +231,9 @@
 
 void ProtoConverter::visit(AttDef const& _x)
 {
+	if (!isValid(_x))
+		return;
+
 	m_output << " " << removeNonAscii(_x.name()) << " ";
 	switch (_x.type())
 	{
@@ -323,6 +338,9 @@
 
 void ProtoConverter::visit(EntityValue const& _x)
 {
+	if (!isValid(_x))
+		return;
+
 	m_output << "\"";
 	string prefix;
 	switch (_x.type())
@@ -353,6 +371,9 @@
 
 void ProtoConverter::visit(EntityDecl const& _x)
 {
+	if (!isValid(_x))
+		return;
+
 	m_output << "<!ENTITY ";
 	switch (_x.type())
 	{
@@ -373,6 +394,9 @@
 
 void ProtoConverter::visit(ConditionalSect const& _x)
 {
+	if (!isValid(_x))
+		return;
+
 	switch (_x.type())
 	{
 	case ConditionalSect::INCLUDE:
@@ -486,6 +510,9 @@
 /// Returns uri string for a given Element_Id type
 string ProtoConverter::getUri(Element_Id _x)
 {
+	if (!Element::Id_IsValid(_x))
+		return s_XInclude;
+
 	switch (_x)
 	{
 	case Element::XIINCLUDE:
@@ -504,6 +531,9 @@
 
 void ProtoConverter::visit(Element const& _x)
 {
+	if (!isValid(_x))
+		return;
+
 	// Predefined child node
 	string child = {};
 	// Predefined uri for child node
@@ -550,6 +580,9 @@
 
 void ProtoConverter::visit(ExternalId const& _x)
 {
+	if (!isValid(_x))
+		return;
+
 	switch (_x.type())
 	{
 	case ExternalId::SYSTEM:
@@ -581,6 +614,9 @@
 
 void ProtoConverter::visit(VersionNum const& _x)
 {
+	if (!isValid(_x))
+		return;
+
 	switch (_x.type())
 	{
 	case VersionNum::STANDARD:
@@ -596,6 +632,9 @@
 
 void ProtoConverter::visit(Encodings const& _x)
 {
+	if (!Encodings::Enc_IsValid(_x.name()))
+		return;
+
 	m_output << " encoding=\"";
 	switch (_x.name())
 	{
@@ -699,6 +738,7 @@
 		break;
 	case XmlDeclaration_Standalone_XmlDeclaration_Standalone_INT_MIN_SENTINEL_DO_NOT_USE_:
 	case XmlDeclaration_Standalone_XmlDeclaration_Standalone_INT_MAX_SENTINEL_DO_NOT_USE_:
+	default:
 		break;
 	}
 	m_output << "?>\n";
@@ -715,4 +755,4 @@
 {
 	visit(_x);
 	return m_output.str();
-}
\ No newline at end of file
+}
diff --git a/projects/xerces-c/xmlProtoConverter.h b/projects/xerces-c/xmlProtoConverter.h
index a6333f1..501dde3 100644
--- a/projects/xerces-c/xmlProtoConverter.h
+++ b/projects/xerces-c/xmlProtoConverter.h
@@ -89,6 +89,11 @@
 
 	void visit(XmlDocument const&);
 
+	template <typename T>
+	bool isValid(T const& messageType) {
+		return T::Type_IsValid(messageType.type());
+	}
+
 	std::string removeNonAscii(std::string const&);
 	std::string getUri(Element_Id _x);
 	std::string getPredefined(Element_Id _x, std::string const&);
diff --git a/projects/zlib-ng/project.yaml b/projects/zlib-ng/project.yaml
index b2aa5cd..26cdaa6 100644
--- a/projects/zlib-ng/project.yaml
+++ b/projects/zlib-ng/project.yaml
@@ -18,10 +18,16 @@
 primary_contact: "zlib-ng@circlestorm.org"
 auto_ccs:
   - "sebpop@gmail.com"
+fuzzing_engines:
+  - libfuzzer
+  - afl
+  - honggfuzz
+  - dataflow
 sanitizers:
   - address
   - memory
   - undefined
+  - dataflow
 architectures:
   - x86_64
   - i386