Merge "Change default arch for banchan to arm64"
diff --git a/core/rbe.mk b/core/rbe.mk
index 370d4bd..90328d3 100644
--- a/core/rbe.mk
+++ b/core/rbe.mk
@@ -87,11 +87,11 @@
   endif
 
   ifdef RBE_R8
-    R8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=r8 --exec_strategy=$(r8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=out/soong/host/linux-x86/framework/r8-compat-proguard.jar,build/make/core/proguard_basic_keeps.flags --toolchain_inputs=prebuilts/jdk/jdk11/linux-x86/bin/java)
+    R8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=r8 --exec_strategy=$(r8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=$(OUT_DIR)/soong/host/linux-x86/framework/r8-compat-proguard.jar,build/make/core/proguard_basic_keeps.flags --toolchain_inputs=prebuilts/jdk/jdk11/linux-x86/bin/java)
   endif
 
   ifdef RBE_D8
-    D8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=d8 --exec_strategy=$(d8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=out/soong/host/linux-x86/framework/d8.jar --toolchain_inputs=prebuilts/jdk/jdk11/linux-x86/bin/java)
+    D8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=d8 --exec_strategy=$(d8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=$(OUT_DIR)/soong/host/linux-x86/framework/d8.jar --toolchain_inputs=prebuilts/jdk/jdk11/linux-x86/bin/java)
   endif
 
   rbe_dir :=
diff --git a/envsetup.sh b/envsetup.sh
index 71f0170..cbd59cc 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -403,7 +403,9 @@
     # e.g.
     # ENVSETUP_NO_COMPLETION=adb # -> disable adb completion
     # ENVSETUP_NO_COMPLETION=adb:bit # -> disable adb and bit completion
+    local T=$(gettop)
     for f in ${completion_files[*]}; do
+        f="$T/$f"
         if [ ! -f "$f" ]; then
           echo "Warning: completion file $f not found"
         elif should_add_completion "$f"; then
diff --git a/orchestrator/README b/orchestrator/README
index ce6f5c3..9a1e302 100644
--- a/orchestrator/README
+++ b/orchestrator/README
@@ -2,6 +2,7 @@
 
 from the root of the workspace
 
-ln -fs ../build/build/orchestrator/inner_build/inner_build_demo.py master/.inner_build
-ln -fs ../build/build/orchestrator/inner_build/inner_build_demo.py sc-mainline-prod/.inner_build
+multitree_lunch build/build/make/orchestrator/test_workspace/combo.mcombo eng
+
+rm -rf out && multitree_build && echo "==== Files ====" && find out -type f
 
diff --git a/orchestrator/core/api_assembly.py b/orchestrator/core/api_assembly.py
index bd1c440..d7abef7 100644
--- a/orchestrator/core/api_assembly.py
+++ b/orchestrator/core/api_assembly.py
@@ -34,7 +34,7 @@
     contributions = []
     for tree_key, filenames in contribution_files_dict.items():
         for filename in filenames:
-            json_data = load_contribution_file(filename)
+            json_data = load_contribution_file(context, filename)
             if not json_data:
                 continue
             # TODO: Validate the configs, especially that the domains match what we asked for
@@ -76,13 +76,14 @@
     return result
 
 
-def load_contribution_file(filename):
+def load_contribution_file(context, filename):
     "Load and return the API contribution at filename. On error report error and return None."
     with open(filename) as f:
         try:
             return json.load(f)
         except json.decoder.JSONDecodeError as ex:
             # TODO: Error reporting
+            context.errors.error(ex.msg, filename, ex.lineno, ex.colno)
             raise ex
 
 
diff --git a/orchestrator/core/api_assembly_cc.py b/orchestrator/core/api_assembly_cc.py
index 15bc98a..ca9b2a4 100644
--- a/orchestrator/core/api_assembly_cc.py
+++ b/orchestrator/core/api_assembly_cc.py
@@ -17,17 +17,10 @@
 import os
 
 def assemble_cc_api_library(context, ninja, build_file, stub_library):
-    print("\nassembling cc_api_library %s-%s %s from:" % (stub_library.api_surface,
-        stub_library.api_surface_version, stub_library.name))
-    for contrib in stub_library.contributions:
-        print("  %s %s" % (contrib.api_domain, contrib.library_contribution))
-
     staging_dir = context.out.api_library_dir(stub_library.api_surface,
             stub_library.api_surface_version, stub_library.name)
     work_dir = context.out.api_library_work_dir(stub_library.api_surface,
             stub_library.api_surface_version, stub_library.name)
-    print("staging_dir=%s" % (staging_dir))
-    print("work_dir=%s" % (work_dir))
 
     # Generate rules to copy headers
     includes = []
diff --git a/orchestrator/core/final_packaging.py b/orchestrator/core/final_packaging.py
index 693a716..03fe890 100644
--- a/orchestrator/core/final_packaging.py
+++ b/orchestrator/core/final_packaging.py
@@ -13,10 +13,14 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import json
+import os
+import sys
+
 import ninja_tools
 import ninja_syntax # Has to be after ninja_tools because of the path hack
 
-def final_packaging(context):
+def final_packaging(context, inner_trees):
     """Pull together all of the previously defined rules into the final build stems."""
 
     with open(context.out.outer_ninja_file(), "w") as ninja_file:
@@ -25,5 +29,89 @@
         # Add the api surfaces file
         ninja.add_subninja(ninja_syntax.Subninja(context.out.api_ninja_file(), chDir=None))
 
+        # For each inner tree
+        for tree in inner_trees.keys():
+            # TODO: Verify that inner_tree.ninja was generated
+
+            # Read and verify file
+            build_targets = read_build_targets_json(context, tree)
+            if not build_targets:
+                continue
+
+            # Generate the ninja and build files for this inner tree
+            generate_cross_domain_build_rules(context, ninja, tree, build_targets)
+
         # Finish writing the ninja file
         ninja.write()
+
+
+def read_build_targets_json(context, tree):
+    """Read and validate the build_targets.json file for the given tree."""
+    try:
+        f = open(tree.out.build_targets_file())
+    except FileNotFoundError:
+        # It's allowed not to have any artifacts (e.g. if a tree is a light tree with only APIs)
+        return None
+
+    data = None
+    with f:
+        try:
+            data = json.load(f)
+        except json.decoder.JSONDecodeError as ex:
+            sys.stderr.write("Error parsing file: %s\n" % tree.out.build_targets_file())
+            # TODO: Error reporting
+            raise ex
+
+    # TODO: Better error handling
+    # TODO: Validate json schema
+    return data
+
+
+def generate_cross_domain_build_rules(context, ninja, tree, build_targets):
+    "Generate the ninja and build files for the inner tree."
+    # Include the inner tree's inner_tree.ninja
+    ninja.add_subninja(ninja_syntax.Subninja(tree.out.main_ninja_file(), chDir=tree.root))
+
+    # Generate module rules and files
+    for module in build_targets.get("modules", []):
+        generate_shared_module(context, ninja, tree, module)
+
+    # Generate staging rules
+    staging_dir = context.out.staging_dir()
+    for staged in build_targets.get("staging", []):
+        # TODO: Enforce that dest isn't in disallowed subdir of out or absolute
+        dest = staged["dest"]
+        dest = os.path.join(staging_dir, dest)
+        if "src" in staged and "obj" in staged:
+            context.errors.error("Can't have both \"src\" and \"obj\" tags in \"staging\" entry."
+                    ) # TODO: Filename and line if possible
+        if "src" in staged:
+            ninja.add_copy_file(dest, os.path.join(tree.root, staged["src"]))
+        elif "obj" in staged:
+            ninja.add_copy_file(dest, os.path.join(tree.out.root(), staged["obj"]))
+        ninja.add_global_phony("staging", [dest])
+
+    # Generate dist rules
+    dist_dir = context.out.dist_dir()
+    for disted in build_targets.get("dist", []):
+        # TODO: Enforce that dest absolute
+        dest = disted["dest"]
+        dest = os.path.join(dist_dir, dest)
+        ninja.add_copy_file(dest, os.path.join(tree.root, disted["src"]))
+        ninja.add_global_phony("dist", [dest])
+
+
+def generate_shared_module(context, ninja, tree, module):
+    """Generate ninja rules for the given build_targets.json defined module."""
+    module_name = module["name"]
+    module_type = module["type"]
+    share_dir = context.out.module_share_dir(module_type, module_name)
+    src_file = os.path.join(tree.root, module["file"])
+
+    if module_type == "apex":
+        ninja.add_copy_file(os.path.join(share_dir, module_name + ".apex"), src_file)
+        # TODO: Generate build file
+
+    else:
+        # TODO: Better error handling
+        raise Exception("Invalid module type: %s" % module)
diff --git a/orchestrator/core/inner_tree.py b/orchestrator/core/inner_tree.py
index 4383dd8..d348ee7 100644
--- a/orchestrator/core/inner_tree.py
+++ b/orchestrator/core/inner_tree.py
@@ -36,23 +36,38 @@
     def __hash__(self):
         return hash((self.root, self.product))
 
+    def _cmp(self, other):
+        if self.root < other.root:
+            return -1
+        if self.root > other.root:
+            return 1
+        if self.product == other.product:
+            return 0
+        if self.product is None:
+            return -1
+        if other.product is None:
+            return 1
+        if self.product < other.product:
+            return -1
+        return 1
+
     def __eq__(self, other):
-        return (self.root == other.root and self.product == other.product)
+        return self._cmp(other) == 0
 
     def __ne__(self, other):
-        return not self.__eq__(other)
+        return self._cmp(other) != 0
 
     def __lt__(self, other):
-        return (self.root, self.product) < (other.root, other.product)
+        return self._cmp(other) < 0
 
     def __le__(self, other):
-        return (self.root, self.product) <= (other.root, other.product)
+        return self._cmp(other) <= 0
 
     def __gt__(self, other):
-        return (self.root, self.product) > (other.root, other.product)
+        return self._cmp(other) > 0
 
     def __ge__(self, other):
-        return (self.root, self.product) >= (other.root, other.product)
+        return self._cmp(other) >= 0
 
 
 class InnerTree(object):
@@ -62,7 +77,12 @@
         self.product = product
         self.domains = {}
         # TODO: Base directory on OUT_DIR
-        self.out = OutDirLayout(context.out.inner_tree_dir(root))
+        out_root = context.out.inner_tree_dir(root)
+        if product:
+            out_root += "_" + product
+        else:
+            out_root += "_unbundled"
+        self.out = OutDirLayout(out_root)
 
     def __str__(self):
         return "InnerTree(root=%s product=%s domains=[%s])" % (enquote(self.root),
@@ -138,6 +158,11 @@
         """Get an inner tree for tree_key"""
         return self.trees.get(tree_key)
 
+    def keys(self):
+        "Get the keys for the inner trees in name order."
+        return [self.trees[k] for k in sorted(self.trees.keys())]
+
+
 class OutDirLayout(object):
     """Encapsulates the logic about the layout of the inner tree out directories.
     See also context.OutDir for outer tree out dir contents."""
@@ -155,6 +180,12 @@
     def api_contributions_dir(self):
         return os.path.join(self._root, "api_contributions")
 
+    def build_targets_file(self):
+        return os.path.join(self._root, "build_targets.json")
+
+    def main_ninja_file(self):
+        return os.path.join(self._root, "inner_tree.ninja")
+
 
 def enquote(s):
     return "None" if s is None else "\"%s\"" % s
diff --git a/orchestrator/core/ninja_runner.py b/orchestrator/core/ninja_runner.py
index 906f1ae..ab81d66 100644
--- a/orchestrator/core/ninja_runner.py
+++ b/orchestrator/core/ninja_runner.py
@@ -14,6 +14,7 @@
 # limitations under the License.
 
 import subprocess
+import sys
 
 def run_ninja(context, targets):
     """Run ninja.
diff --git a/orchestrator/core/ninja_tools.py b/orchestrator/core/ninja_tools.py
index c676907..16101ea 100644
--- a/orchestrator/core/ninja_tools.py
+++ b/orchestrator/core/ninja_tools.py
@@ -30,6 +30,7 @@
         super(Ninja, self).__init__(file)
         self._context = context
         self._did_copy_file = False
+        self._phonies = {}
 
     def add_copy_file(self, copy_to, copy_from):
         if not self._did_copy_file:
@@ -43,4 +44,16 @@
         build_action.add_variable("out_dir", os.path.dirname(copy_to))
         self.add_build_action(build_action)
 
+    def add_global_phony(self, name, deps):
+        """Add a phony target where there are multiple places that will want to add to
+        the same phony. If you can, to save memory, use add_phony instead of this function."""
+        if type(deps) not in (list, tuple):
+            raise Exception("Assertion failed: bad type of deps: %s" % type(deps))
+        self._phonies.setdefault(name, []).extend(deps)
+
+    def write(self):
+        for phony, deps in self._phonies.items():
+            self.add_phony(phony, deps)
+        super(Ninja, self).write()
+
 
diff --git a/orchestrator/core/orchestrator.py b/orchestrator/core/orchestrator.py
index bb0885d..508f73a 100755
--- a/orchestrator/core/orchestrator.py
+++ b/orchestrator/core/orchestrator.py
@@ -24,6 +24,7 @@
 import api_export
 import final_packaging
 import inner_tree
+import tree_analysis
 import interrogate
 import lunch
 import ninja_runner
@@ -67,14 +68,10 @@
 
 
 def build():
-    #
-    # Load lunch combo
-    #
-
     # Choose the out directory, set up error handling, etc.
     context = utils.Context(utils.choose_out_dir(), utils.Errors(sys.stderr))
 
-    # Read the config file
+    # Read the lunch config file
     try:
         config_file, config, variant = lunch.load_current_config()
     except lunch.ConfigException as ex:
@@ -85,44 +82,31 @@
     # Construct the trees and domains dicts
     inner_trees = process_config(context, config)
 
-    #
     # 1. Interrogate the trees
-    #
     inner_trees.for_each_tree(interrogate.interrogate_tree)
     # TODO: Detect bazel-only mode
 
-    #
     # 2a. API Export
-    #
     inner_trees.for_each_tree(api_export.export_apis_from_tree)
 
-    #
     # 2b. API Surface Assembly
-    #
     api_assembly.assemble_apis(context, inner_trees)
 
-    #
-    # 3a. API Domain Analysis
-    #
+    # 3a. Inner tree analysis
+    tree_analysis.analyze_trees(context, inner_trees)
 
-    #
     # 3b. Final Packaging Rules
-    #
-    final_packaging.final_packaging(context)
+    final_packaging.final_packaging(context, inner_trees)
 
-    #
     # 4. Build Execution
-    #
     # TODO: Decide what we want the UX for selecting targets to be across
     # branches... since there are very likely to be conflicting soong short
     # names.
     print("Running ninja...")
-    targets = ["public_api-1-libhwui", "public_api-1-libc"]
+    targets = ["staging", "system"]
     ninja_runner.run_ninja(context, targets)
 
-    #
     # Success!
-    #
     return EXIT_STATUS_OK
 
 def main(argv):
diff --git a/orchestrator/core/tree_analysis.py b/orchestrator/core/tree_analysis.py
new file mode 100644
index 0000000..052cad6
--- /dev/null
+++ b/orchestrator/core/tree_analysis.py
@@ -0,0 +1,24 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def analyze_trees(context, inner_trees):
+    inner_trees.for_each_tree(run_analysis)
+
+def run_analysis(tree_key, inner_tree, cookie):
+    inner_tree.invoke(["analyze"])
+
+
+
+
diff --git a/orchestrator/core/utils.py b/orchestrator/core/utils.py
index bb7f8ad..41310e0 100644
--- a/orchestrator/core/utils.py
+++ b/orchestrator/core/utils.py
@@ -38,33 +38,42 @@
 
     def __init__(self, root):
         "Initialize with the root of the OUT_DIR for the outer tree."
-        self._root = root
+        self._out_root = root
         self._intermediates = "intermediates"
 
     def root(self):
-        return self._root
+        return self._out_root
 
     def inner_tree_dir(self, tree_root):
         """Root directory for inner tree inside the out dir."""
-        return os.path.join(self._root, "trees", tree_root)
+        return os.path.join(self._out_root, "trees", tree_root)
 
     def api_ninja_file(self):
         """The ninja file that assembles API surfaces."""
-        return os.path.join(self._root, "api_surfaces.ninja")
+        return os.path.join(self._out_root, "api_surfaces.ninja")
 
     def api_library_dir(self, surface, version, library):
         """Directory for all the contents of a library inside an API surface, including
         the build files.  Any intermediates should go in api_library_work_dir."""
-        return os.path.join(self._root, "api_surfaces", surface, str(version), library)
+        return os.path.join(self._out_root, "api_surfaces", surface, str(version), library)
 
     def api_library_work_dir(self, surface, version, library):
         """Intermediates / scratch directory for library inside an API surface."""
-        return os.path.join(self._root, self._intermediates, "api_surfaces", surface, str(version),
-                library)
+        return os.path.join(self._out_root, self._intermediates, "api_surfaces", surface,
+                str(version), library)
 
     def outer_ninja_file(self):
-        return os.path.join(self._root, "multitree.ninja")
+        return os.path.join(self._out_root, "multitree.ninja")
 
+    def module_share_dir(self, module_type, module_name):
+        return os.path.join(self._out_root, "shared", module_type, module_name)
+
+    def staging_dir(self):
+        return os.path.join(self._out_root, "staging")
+
+    def dist_dir(self):
+        "The DIST_DIR provided or out/dist" # TODO: Look at DIST_DIR
+        return os.path.join(self._out_root, "dist")
 
 class Errors(object):
     """Class for reporting and tracking errors."""
@@ -73,9 +82,21 @@
         self._stream = stream
         self._all = []
 
-    def error(self, message):
+    def error(self, message, file=None, line=None, col=None):
         """Record the error message."""
-        s = str(s)
+        s = ""
+        if file:
+            s += str(file)
+            s += ":"
+        if line:
+            s += str(line)
+            s += ":"
+        if col:
+            s += str(col)
+            s += ":"
+        if s:
+            s += " "
+        s += str(message)
         if s[-1] != "\n":
             s += "\n"
         self._all.append(s)
diff --git a/orchestrator/inner_build/common.py b/orchestrator/inner_build/common.py
index 6919e04..382844b 100644
--- a/orchestrator/inner_build/common.py
+++ b/orchestrator/inner_build/common.py
@@ -40,6 +40,10 @@
     export_parser = subparsers.add_parser("export_api_contributions",
             help="export the API contributions of this inner tree")
 
+    # create the parser for the "b" command
+    export_parser = subparsers.add_parser("analyze",
+            help="main build analysis for this inner tree")
+
     # Parse the arguments
     return parser.parse_args(argv)
 
diff --git a/orchestrator/inner_build/inner_build_demo.py b/orchestrator/inner_build/inner_build_demo.py
index 9aafb4d..264739b 100755
--- a/orchestrator/inner_build/inner_build_demo.py
+++ b/orchestrator/inner_build/inner_build_demo.py
@@ -44,93 +44,60 @@
         mkdirs(contributions_dir)
 
         if "system" in args.api_domain:
-            with open(os.path.join(contributions_dir, "public_api-1.json"), "w") as f:
+            with open(os.path.join(contributions_dir, "api_a-1.json"), "w") as f:
                 # 'name: android' is android.jar
                 f.write(textwrap.dedent("""\
                 {
-                    "name": "public_api",
+                    "name": "api_a",
                     "version": 1,
                     "api_domain": "system",
                     "cc_libraries": [
                         {
-                            "name": "libhwui",
+                            "name": "libhello1",
                             "headers": [
                                 {
-                                    "root": "frameworks/base/libs/hwui/apex/include",
+                                    "root": "build/build/make/orchestrator/test_workspace/inner_tree_1",
                                     "files": [
-                                        "android/graphics/jni_runtime.h",
-                                        "android/graphics/paint.h",
-                                        "android/graphics/matrix.h",
-                                        "android/graphics/canvas.h",
-                                        "android/graphics/renderthread.h",
-                                        "android/graphics/bitmap.h",
-                                        "android/graphics/region.h"
+                                        "hello1.h"
                                     ]
                                 }
                             ],
                             "api": [
-                                "frameworks/base/libs/hwui/libhwui.map.txt"
-                            ]
-                        }
-                    ],
-                    "java_libraries": [
-                        {
-                            "name": "android",
-                            "api": [
-                                "frameworks/base/core/api/current.txt"
-                            ]
-                        }
-                    ],
-                    "resource_libraries": [
-                        {
-                            "name": "android",
-                            "api": "frameworks/base/core/res/res/values/public.xml"
-                        }
-                    ],
-                    "host_executables": [
-                        {
-                            "name": "aapt2",
-                            "binary": "out/host/bin/aapt2",
-                            "runfiles": [
-                                "../lib/todo.so"
-                            ]
-                        }
-                    ]
-                }"""))
-        elif "com.android.bionic" in args.api_domain:
-            with open(os.path.join(contributions_dir, "public_api-1.json"), "w") as f:
-                # 'name: android' is android.jar
-                f.write(textwrap.dedent("""\
-                {
-                    "name": "public_api",
-                    "version": 1,
-                    "api_domain": "system",
-                    "cc_libraries": [
-                        {
-                            "name": "libc",
-                            "headers": [
-                                {
-                                    "root": "bionic/libc/include",
-                                    "files": [
-                                        "stdio.h",
-                                        "sys/klog.h"
-                                    ]
-                                }
-                            ],
-                            "api": "bionic/libc/libc.map.txt"
-                        }
-                    ],
-                    "java_libraries": [
-                        {
-                            "name": "android",
-                            "api": [
-                                "frameworks/base/libs/hwui/api/current.txt"
+                                "build/build/make/orchestrator/test_workspace/inner_tree_1/libhello1"
                             ]
                         }
                     ]
                 }"""))
 
-
+    def analyze(self, args):
+        if "system" in args.api_domain:
+            # Nothing to export in this demo
+            # Write a fake inner_tree.ninja; what the inner tree would have generated
+            with open(os.path.join(args.out_dir, "inner_tree.ninja"), "w") as f:
+                # TODO: Note that this uses paths relative to the workspace not the iner tree
+                # for demo purposes until we get the ninja chdir change in.
+                f.write(textwrap.dedent("""\
+                    rule compile_c
+                        command = mkdir -p ${out_dir} && g++ -c ${cflags} -o ${out} ${in}
+                    rule link_so
+                        command = mkdir -p ${out_dir} && gcc -shared -o ${out} ${in}
+                    build %(OUT_DIR)s/libhello1/hello1.o: compile_c build/build/make/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c
+                        out_dir = %(OUT_DIR)s/libhello1
+                        cflags = -Ibuild/build/make/orchestrator/test_workspace/inner_tree_1/libhello1/include
+                    build %(OUT_DIR)s/libhello1/libhello1.so: link_so %(OUT_DIR)s/libhello1/hello1.o
+                        out_dir = %(OUT_DIR)s/libhello1
+                    build system: phony %(OUT_DIR)s/libhello1/libhello1.so
+                """ % { "OUT_DIR": args.out_dir }))
+            with open(os.path.join(args.out_dir, "build_targets.json"), "w") as f:
+                f.write(textwrap.dedent("""\
+                {
+                    "staging": [
+                        {
+                            "dest": "staging/system/lib/libhello1.so",
+                            "obj": "libhello1/libhello1.so"
+                        }
+                    ]
+                }""" % { "OUT_DIR": args.out_dir }))
 
 def main(argv):
     return InnerBuildSoong().Run(argv)
diff --git a/orchestrator/test_workspace/combo.mcombo b/orchestrator/test_workspace/combo.mcombo
new file mode 100644
index 0000000..8200dc0
--- /dev/null
+++ b/orchestrator/test_workspace/combo.mcombo
@@ -0,0 +1,17 @@
+{
+    "lunchable": true,
+    "system": {
+        "tree": "build/build/make/orchestrator/test_workspace/inner_tree_1",
+        "product": "test_product1"
+    },
+    "vendor": {
+        "tree": "build/build/make/orchestrator/test_workspace/inner_tree_1",
+        "product": "test_product2"
+    },
+    "modules": {
+        "module_1": {
+            "tree": "build/build/make/orchestrator/test_workspace/inner_tree_1"
+        }
+    }
+}
+
diff --git a/orchestrator/test_workspace/inner_tree_1/.inner_build b/orchestrator/test_workspace/inner_tree_1/.inner_build
new file mode 120000
index 0000000..d8f235f
--- /dev/null
+++ b/orchestrator/test_workspace/inner_tree_1/.inner_build
@@ -0,0 +1 @@
+../../inner_build/inner_build_demo.py
\ No newline at end of file
diff --git a/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c b/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c
new file mode 100644
index 0000000..1415082
--- /dev/null
+++ b/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c
@@ -0,0 +1,8 @@
+#include <stdio.h>
+
+#include "hello1.h"
+
+void hello1(void) {
+    printf("hello1");
+}
+
diff --git a/orchestrator/test_workspace/inner_tree_1/libhello1/include/hello1.h b/orchestrator/test_workspace/inner_tree_1/libhello1/include/hello1.h
new file mode 100644
index 0000000..0309c1c
--- /dev/null
+++ b/orchestrator/test_workspace/inner_tree_1/libhello1/include/hello1.h
@@ -0,0 +1,4 @@
+#pragma once
+
+extern "C" void hello1(void);
+
diff --git a/target/product/generic_ramdisk.mk b/target/product/generic_ramdisk.mk
index fb0370e..c7dcd60 100644
--- a/target/product/generic_ramdisk.mk
+++ b/target/product/generic_ramdisk.mk
@@ -22,10 +22,7 @@
 # Ramdisk
 PRODUCT_PACKAGES += \
     init_first_stage \
-    e2fsck.ramdisk \
-    fsck.f2fs.ramdisk \
-    tune2fs.ramdisk \
-    snapuserd.ramdisk \
+    snapuserd_ramdisk \
 
 # Debug ramdisk
 PRODUCT_PACKAGES += \
diff --git a/target/product/virtual_ab_ota/android_t_baseline.mk b/target/product/virtual_ab_ota/android_t_baseline.mk
index 18e08e4..716c8e0 100644
--- a/target/product/virtual_ab_ota/android_t_baseline.mk
+++ b/target/product/virtual_ab_ota/android_t_baseline.mk
@@ -38,15 +38,3 @@
 PRODUCT_PACKAGES += \
     snapuserd \
 
-# For dedicated recovery partitions, we need to include snapuserd
-# For GKI devices, BOARD_USES_RECOVERY_AS_BOOT is empty, but
-# so is BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT.
-ifdef BUILDING_RECOVERY_IMAGE
-ifneq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
-ifneq ($(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT),true)
-PRODUCT_PACKAGES += \
-    snapuserd.recovery
-endif
-endif
-endif
-
diff --git a/tools/warn/html_writer.py b/tools/warn/html_writer.py
index 09ebf30..46ba253 100644
--- a/tools/warn/html_writer.py
+++ b/tools/warn/html_writer.py
@@ -56,6 +56,7 @@
 
 from __future__ import print_function
 import csv
+import datetime
 import html
 import sys
 
@@ -258,7 +259,7 @@
 
 
 def dump_stats(writer, warn_patterns):
-  """Dump some stats about total number of warnings and such."""
+  """Dump some stats about total number of warnings and date."""
 
   known = 0
   skipped = 0
@@ -279,6 +280,8 @@
   if total < 1000:
     extra_msg = ' (low count may indicate incremental build)'
   writer('Total number of warnings: <b>' + str(total) + '</b>' + extra_msg)
+  date_time_str = datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S')
+  writer('<p>(generated on ' + date_time_str + ')')
 
 
 # New base table of warnings, [severity, warn_id, project, warning_message]
diff --git a/tools/warn/warn_common.py b/tools/warn/warn_common.py
index f24cfb7..aa68313 100755
--- a/tools/warn/warn_common.py
+++ b/tools/warn/warn_common.py
@@ -64,6 +64,10 @@
 from . import tidy_warn_patterns as tidy_patterns
 
 
+# Location of this file is used to guess the root of Android source tree.
+THIS_FILE_PATH = 'build/make/tools/warn/warn_common.py'
+
+
 def parse_args(use_google3):
   """Define and parse the args. Return the parse_args() result."""
   parser = argparse.ArgumentParser(
@@ -217,20 +221,22 @@
   return link
 
 
-def find_warn_py_and_android_root(path):
-  """Return android source root path if warn.py is found."""
+def find_this_file_and_android_root(path):
+  """Return android source root path if this file is found."""
   parts = path.split('/')
   for idx in reversed(range(2, len(parts))):
     root_path = '/'.join(parts[:idx])
     # Android root directory should contain this script.
-    if os.path.exists(root_path + '/build/make/tools/warn.py'):
+    if os.path.exists(root_path + '/' + THIS_FILE_PATH):
       return root_path
   return ''
 
 
 def find_android_root_top_dirs(root_dir):
   """Return a list of directories under the root_dir, if it exists."""
-  if not os.path.isdir(root_dir):
+  # Root directory should contain at least build/make and build/soong.
+  if (not os.path.isdir(root_dir + '/build/make') or
+      not os.path.isdir(root_dir + '/build/soong')):
     return None
   return list(filter(lambda d: os.path.isdir(root_dir + '/' + d),
                      os.listdir(root_dir)))
@@ -257,7 +263,7 @@
       # the source tree root.
       if count < 100:
         path = os.path.normpath(re.sub(':.*$', '', line))
-        android_root = find_warn_py_and_android_root(path)
+        android_root = find_this_file_and_android_root(path)
         if android_root:
           return android_root, find_android_root_top_dirs(android_root)
   # Do not use common prefix of a small number of paths.
@@ -272,10 +278,11 @@
     return android_root, find_android_root_top_dirs(android_root)
   # When the build.log file is moved to a different machine where
   # android_root is not found, use the location of this script
-  # to find the android source tree root and its sub directories.
-  # This __file__ is /..../build/make/tools/warn/warn_common.py
-  script_root = __file__.replace('/build/make/tools/warn/warn_common.py', '')
-  return android_root, find_android_root_top_dirs(script_root)
+  # to find the android source tree sub directories.
+  if __file__.endswith('/' + THIS_FILE_PATH):
+    script_root = __file__.replace('/' + THIS_FILE_PATH, '')
+    return android_root, find_android_root_top_dirs(script_root)
+  return android_root, None
 
 
 def remove_android_root_prefix(path, android_root):
@@ -367,7 +374,6 @@
   target_product = 'unknown'
   target_variant = 'unknown'
   build_id = 'unknown'
-  use_rbe = False
   android_root, root_top_dirs = find_android_root(infile)
   infile.seek(0)
 
@@ -443,14 +449,13 @@
       continue
     checked_warning_lines[line] = True
 
-    # Clean up extra prefix if RBE is used.
-    if use_rbe:
-      if '/b/f/w/' in line:
-        result = bfw_warning_pattern.search(line)
-      else:
-        result = extra_warning_pattern.search(line)
-      if result is not None:
-        line = result.group(1)
+    # Clean up extra prefix that could be introduced when RBE was used.
+    if '/b/f/w/' in line:
+      result = bfw_warning_pattern.search(line)
+    else:
+      result = extra_warning_pattern.search(line)
+    if result is not None:
+      line = result.group(1)
 
     if warning_pattern.match(line):
       if line.startswith('warning: '):
@@ -479,13 +484,6 @@
       if result is not None:
         build_id = result.group(0)
         continue
-      result = re.search('(?<=^TOP=).*', line)
-      if result is not None:
-        android_root = result.group(1)
-        continue
-      if re.search('USE_RBE=', line) is not None:
-        use_rbe = True
-        continue
 
   if android_root:
     new_unique_warnings = dict()