Merge "Update ide_query script to new format" into main
This commit is contained in:
@@ -58,7 +58,7 @@ cc_library_host_static {
|
|||||||
shared_libs: ["libclang-cpp_host"],
|
shared_libs: ["libclang-cpp_host"],
|
||||||
static_libs: [
|
static_libs: [
|
||||||
"include_scanner",
|
"include_scanner",
|
||||||
"ide_query_proto",
|
"cc_analyzer_proto",
|
||||||
],
|
],
|
||||||
defaults: ["ide_query_cc_analyzer_defaults"],
|
defaults: ["ide_query_cc_analyzer_defaults"],
|
||||||
}
|
}
|
||||||
@@ -72,7 +72,7 @@ cc_binary_host {
|
|||||||
"libprotobuf-cpp-full",
|
"libprotobuf-cpp-full",
|
||||||
],
|
],
|
||||||
static_libs: [
|
static_libs: [
|
||||||
"ide_query_proto",
|
"cc_analyzer_proto",
|
||||||
"builtin_headers",
|
"builtin_headers",
|
||||||
"include_scanner",
|
"include_scanner",
|
||||||
"analyzer",
|
"analyzer",
|
||||||
|
@@ -20,9 +20,9 @@
|
|||||||
#include <utility>
|
#include <utility>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
|
#include "cc_analyzer.pb.h"
|
||||||
#include "clang/Tooling/CompilationDatabase.h"
|
#include "clang/Tooling/CompilationDatabase.h"
|
||||||
#include "clang/Tooling/JSONCompilationDatabase.h"
|
#include "clang/Tooling/JSONCompilationDatabase.h"
|
||||||
#include "ide_query.pb.h"
|
|
||||||
#include "include_scanner.h"
|
#include "include_scanner.h"
|
||||||
#include "llvm/ADT/SmallString.h"
|
#include "llvm/ADT/SmallString.h"
|
||||||
#include "llvm/ADT/StringRef.h"
|
#include "llvm/ADT/StringRef.h"
|
||||||
@@ -48,11 +48,11 @@ llvm::Expected<std::unique_ptr<clang::tooling::CompilationDatabase>> LoadCompDB(
|
|||||||
}
|
}
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
::ide_query::DepsResponse GetDeps(::ide_query::RepoState state) {
|
::cc_analyzer::DepsResponse GetDeps(::cc_analyzer::RepoState state) {
|
||||||
::ide_query::DepsResponse results;
|
::cc_analyzer::DepsResponse results;
|
||||||
auto db = LoadCompDB(state.comp_db_path());
|
auto db = LoadCompDB(state.comp_db_path());
|
||||||
if (!db) {
|
if (!db) {
|
||||||
results.mutable_status()->set_code(::ide_query::Status::FAILURE);
|
results.mutable_status()->set_code(::cc_analyzer::Status::FAILURE);
|
||||||
results.mutable_status()->set_message(llvm::toString(db.takeError()));
|
results.mutable_status()->set_message(llvm::toString(db.takeError()));
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
@@ -63,7 +63,7 @@ llvm::Expected<std::unique_ptr<clang::tooling::CompilationDatabase>> LoadCompDB(
|
|||||||
llvm::sys::path::append(abs_file, active_file);
|
llvm::sys::path::append(abs_file, active_file);
|
||||||
auto cmds = db->get()->getCompileCommands(active_file);
|
auto cmds = db->get()->getCompileCommands(active_file);
|
||||||
if (cmds.empty()) {
|
if (cmds.empty()) {
|
||||||
result.mutable_status()->set_code(::ide_query::Status::FAILURE);
|
result.mutable_status()->set_code(::cc_analyzer::Status::FAILURE);
|
||||||
result.mutable_status()->set_message(
|
result.mutable_status()->set_message(
|
||||||
llvm::Twine("Can't find compile flags for file: ", abs_file).str());
|
llvm::Twine("Can't find compile flags for file: ", abs_file).str());
|
||||||
continue;
|
continue;
|
||||||
@@ -80,11 +80,11 @@ llvm::Expected<std::unique_ptr<clang::tooling::CompilationDatabase>> LoadCompDB(
|
|||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
|
|
||||||
::ide_query::IdeAnalysis GetBuildInputs(::ide_query::RepoState state) {
|
::cc_analyzer::IdeAnalysis GetBuildInputs(::cc_analyzer::RepoState state) {
|
||||||
auto db = LoadCompDB(state.comp_db_path());
|
auto db = LoadCompDB(state.comp_db_path());
|
||||||
::ide_query::IdeAnalysis results;
|
::cc_analyzer::IdeAnalysis results;
|
||||||
if (!db) {
|
if (!db) {
|
||||||
results.mutable_status()->set_code(::ide_query::Status::FAILURE);
|
results.mutable_status()->set_code(::cc_analyzer::Status::FAILURE);
|
||||||
results.mutable_status()->set_message(llvm::toString(db.takeError()));
|
results.mutable_status()->set_message(llvm::toString(db.takeError()));
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
@@ -97,7 +97,6 @@ llvm::Expected<std::unique_ptr<clang::tooling::CompilationDatabase>> LoadCompDB(
|
|||||||
genfile_root_abs.push_back('/');
|
genfile_root_abs.push_back('/');
|
||||||
}
|
}
|
||||||
|
|
||||||
results.set_build_artifact_root(state.out_dir());
|
|
||||||
for (llvm::StringRef active_file : state.active_file_path()) {
|
for (llvm::StringRef active_file : state.active_file_path()) {
|
||||||
auto& result = *results.add_sources();
|
auto& result = *results.add_sources();
|
||||||
result.set_path(active_file.str());
|
result.set_path(active_file.str());
|
||||||
@@ -106,7 +105,7 @@ llvm::Expected<std::unique_ptr<clang::tooling::CompilationDatabase>> LoadCompDB(
|
|||||||
llvm::sys::path::append(abs_file, active_file);
|
llvm::sys::path::append(abs_file, active_file);
|
||||||
auto cmds = db->get()->getCompileCommands(abs_file);
|
auto cmds = db->get()->getCompileCommands(abs_file);
|
||||||
if (cmds.empty()) {
|
if (cmds.empty()) {
|
||||||
result.mutable_status()->set_code(::ide_query::Status::FAILURE);
|
result.mutable_status()->set_code(::cc_analyzer::Status::FAILURE);
|
||||||
result.mutable_status()->set_message(
|
result.mutable_status()->set_message(
|
||||||
llvm::Twine("Can't find compile flags for file: ", abs_file).str());
|
llvm::Twine("Can't find compile flags for file: ", abs_file).str());
|
||||||
continue;
|
continue;
|
||||||
@@ -114,7 +113,7 @@ llvm::Expected<std::unique_ptr<clang::tooling::CompilationDatabase>> LoadCompDB(
|
|||||||
const auto& cmd = cmds.front();
|
const auto& cmd = cmds.front();
|
||||||
llvm::StringRef working_dir = cmd.Directory;
|
llvm::StringRef working_dir = cmd.Directory;
|
||||||
if (!working_dir.consume_front(repo_dir)) {
|
if (!working_dir.consume_front(repo_dir)) {
|
||||||
result.mutable_status()->set_code(::ide_query::Status::FAILURE);
|
result.mutable_status()->set_code(::cc_analyzer::Status::FAILURE);
|
||||||
result.mutable_status()->set_message("Command working dir " +
|
result.mutable_status()->set_message("Command working dir " +
|
||||||
working_dir.str() +
|
working_dir.str() +
|
||||||
" outside repository " + repo_dir);
|
" outside repository " + repo_dir);
|
||||||
@@ -127,7 +126,7 @@ llvm::Expected<std::unique_ptr<clang::tooling::CompilationDatabase>> LoadCompDB(
|
|||||||
auto includes =
|
auto includes =
|
||||||
ScanIncludes(cmds.front(), llvm::vfs::createPhysicalFileSystem());
|
ScanIncludes(cmds.front(), llvm::vfs::createPhysicalFileSystem());
|
||||||
if (!includes) {
|
if (!includes) {
|
||||||
result.mutable_status()->set_code(::ide_query::Status::FAILURE);
|
result.mutable_status()->set_code(::cc_analyzer::Status::FAILURE);
|
||||||
result.mutable_status()->set_message(
|
result.mutable_status()->set_message(
|
||||||
llvm::toString(includes.takeError()));
|
llvm::toString(includes.takeError()));
|
||||||
continue;
|
continue;
|
||||||
|
@@ -17,17 +17,17 @@
|
|||||||
#ifndef _TOOLS_IDE_QUERY_CC_ANALYZER_ANALYZER_H_
|
#ifndef _TOOLS_IDE_QUERY_CC_ANALYZER_ANALYZER_H_
|
||||||
#define _TOOLS_IDE_QUERY_CC_ANALYZER_ANALYZER_H_
|
#define _TOOLS_IDE_QUERY_CC_ANALYZER_ANALYZER_H_
|
||||||
|
|
||||||
#include "ide_query.pb.h"
|
#include "cc_analyzer.pb.h"
|
||||||
|
|
||||||
namespace tools::ide_query::cc_analyzer {
|
namespace tools::ide_query::cc_analyzer {
|
||||||
|
|
||||||
// Scans the build graph and returns target names from the build graph to
|
// Scans the build graph and returns target names from the build graph to
|
||||||
// generate all the dependencies for the active files.
|
// generate all the dependencies for the active files.
|
||||||
::ide_query::DepsResponse GetDeps(::ide_query::RepoState state);
|
::cc_analyzer::DepsResponse GetDeps(::cc_analyzer::RepoState state);
|
||||||
|
|
||||||
// Scans the sources and returns all the source files required for analyzing the
|
// Scans the sources and returns all the source files required for analyzing the
|
||||||
// active files.
|
// active files.
|
||||||
::ide_query::IdeAnalysis GetBuildInputs(::ide_query::RepoState state);
|
::cc_analyzer::IdeAnalysis GetBuildInputs(::cc_analyzer::RepoState state);
|
||||||
|
|
||||||
} // namespace tools::ide_query::cc_analyzer
|
} // namespace tools::ide_query::cc_analyzer
|
||||||
|
|
||||||
|
@@ -28,7 +28,7 @@
|
|||||||
|
|
||||||
#include "analyzer.h"
|
#include "analyzer.h"
|
||||||
#include "google/protobuf/message.h"
|
#include "google/protobuf/message.h"
|
||||||
#include "ide_query.pb.h"
|
#include "cc_analyzer.pb.h"
|
||||||
#include "llvm/ADT/StringRef.h"
|
#include "llvm/ADT/StringRef.h"
|
||||||
#include "llvm/Support/CommandLine.h"
|
#include "llvm/Support/CommandLine.h"
|
||||||
#include "llvm/Support/TargetSelect.h"
|
#include "llvm/Support/TargetSelect.h"
|
||||||
@@ -48,9 +48,9 @@ llvm::cl::opt<OpMode> mode{
|
|||||||
llvm::cl::desc("Print the list of headers to insert and remove"),
|
llvm::cl::desc("Print the list of headers to insert and remove"),
|
||||||
};
|
};
|
||||||
|
|
||||||
ide_query::IdeAnalysis ReturnError(llvm::StringRef message) {
|
cc_analyzer::IdeAnalysis ReturnError(llvm::StringRef message) {
|
||||||
ide_query::IdeAnalysis result;
|
cc_analyzer::IdeAnalysis result;
|
||||||
result.mutable_status()->set_code(ide_query::Status::FAILURE);
|
result.mutable_status()->set_code(cc_analyzer::Status::FAILURE);
|
||||||
result.mutable_status()->set_message(message.str());
|
result.mutable_status()->set_message(message.str());
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
@@ -61,7 +61,7 @@ int main(int argc, char* argv[]) {
|
|||||||
llvm::InitializeAllTargetInfos();
|
llvm::InitializeAllTargetInfos();
|
||||||
llvm::cl::ParseCommandLineOptions(argc, argv);
|
llvm::cl::ParseCommandLineOptions(argc, argv);
|
||||||
|
|
||||||
ide_query::RepoState state;
|
cc_analyzer::RepoState state;
|
||||||
if (!state.ParseFromFileDescriptor(STDIN_FILENO)) {
|
if (!state.ParseFromFileDescriptor(STDIN_FILENO)) {
|
||||||
llvm::errs() << "Failed to parse input!\n";
|
llvm::errs() << "Failed to parse input!\n";
|
||||||
return 1;
|
return 1;
|
||||||
@@ -70,12 +70,12 @@ int main(int argc, char* argv[]) {
|
|||||||
std::unique_ptr<google::protobuf::Message> result;
|
std::unique_ptr<google::protobuf::Message> result;
|
||||||
switch (mode) {
|
switch (mode) {
|
||||||
case OpMode::DEPS: {
|
case OpMode::DEPS: {
|
||||||
result = std::make_unique<ide_query::DepsResponse>(
|
result = std::make_unique<cc_analyzer::DepsResponse>(
|
||||||
tools::ide_query::cc_analyzer::GetDeps(std::move(state)));
|
tools::ide_query::cc_analyzer::GetDeps(std::move(state)));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case OpMode::INPUTS: {
|
case OpMode::INPUTS: {
|
||||||
result = std::make_unique<ide_query::IdeAnalysis>(
|
result = std::make_unique<cc_analyzer::IdeAnalysis>(
|
||||||
tools::ide_query::cc_analyzer::GetBuildInputs(std::move(state)));
|
tools::ide_query::cc_analyzer::GetBuildInputs(std::move(state)));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@@ -19,9 +19,9 @@ package {
|
|||||||
}
|
}
|
||||||
|
|
||||||
cc_library_host_static {
|
cc_library_host_static {
|
||||||
name: "ide_query_proto",
|
name: "cc_analyzer_proto",
|
||||||
srcs: [
|
srcs: [
|
||||||
"ide_query.proto",
|
"cc_analyzer.proto",
|
||||||
],
|
],
|
||||||
proto: {
|
proto: {
|
||||||
export_proto_headers: true,
|
export_proto_headers: true,
|
789
tools/ide_query/cc_analyzer_proto/cc_analyzer.pb.go
Normal file
789
tools/ide_query/cc_analyzer_proto/cc_analyzer.pb.go
Normal file
@@ -0,0 +1,789 @@
|
|||||||
|
//
|
||||||
|
// Copyright (C) 2024 The Android Open Source Project
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||||
|
// versions:
|
||||||
|
// protoc-gen-go v1.30.0
|
||||||
|
// protoc v3.21.12
|
||||||
|
// source: cc_analyzer.proto
|
||||||
|
|
||||||
|
package cc_analyzer_proto
|
||||||
|
|
||||||
|
import (
|
||||||
|
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||||
|
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||||
|
reflect "reflect"
|
||||||
|
sync "sync"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Verify that this generated code is sufficiently up-to-date.
|
||||||
|
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
|
||||||
|
// Verify that runtime/protoimpl is sufficiently up-to-date.
|
||||||
|
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
|
||||||
|
)
|
||||||
|
|
||||||
|
type Status_Code int32
|
||||||
|
|
||||||
|
const (
|
||||||
|
Status_OK Status_Code = 0
|
||||||
|
Status_FAILURE Status_Code = 1
|
||||||
|
)
|
||||||
|
|
||||||
|
// Enum value maps for Status_Code.
|
||||||
|
var (
|
||||||
|
Status_Code_name = map[int32]string{
|
||||||
|
0: "OK",
|
||||||
|
1: "FAILURE",
|
||||||
|
}
|
||||||
|
Status_Code_value = map[string]int32{
|
||||||
|
"OK": 0,
|
||||||
|
"FAILURE": 1,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
func (x Status_Code) Enum() *Status_Code {
|
||||||
|
p := new(Status_Code)
|
||||||
|
*p = x
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x Status_Code) String() string {
|
||||||
|
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (Status_Code) Descriptor() protoreflect.EnumDescriptor {
|
||||||
|
return file_cc_analyzer_proto_enumTypes[0].Descriptor()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (Status_Code) Type() protoreflect.EnumType {
|
||||||
|
return &file_cc_analyzer_proto_enumTypes[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x Status_Code) Number() protoreflect.EnumNumber {
|
||||||
|
return protoreflect.EnumNumber(x)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deprecated: Use Status_Code.Descriptor instead.
|
||||||
|
func (Status_Code) EnumDescriptor() ([]byte, []int) {
|
||||||
|
return file_cc_analyzer_proto_rawDescGZIP(), []int{0, 0}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Indicates the success/failure for analysis.
|
||||||
|
type Status struct {
|
||||||
|
state protoimpl.MessageState
|
||||||
|
sizeCache protoimpl.SizeCache
|
||||||
|
unknownFields protoimpl.UnknownFields
|
||||||
|
|
||||||
|
Code Status_Code `protobuf:"varint,1,opt,name=code,proto3,enum=cc_analyzer.Status_Code" json:"code,omitempty"`
|
||||||
|
// Details about the status, might be displayed to user.
|
||||||
|
Message *string `protobuf:"bytes,2,opt,name=message,proto3,oneof" json:"message,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *Status) Reset() {
|
||||||
|
*x = Status{}
|
||||||
|
if protoimpl.UnsafeEnabled {
|
||||||
|
mi := &file_cc_analyzer_proto_msgTypes[0]
|
||||||
|
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||||
|
ms.StoreMessageInfo(mi)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *Status) String() string {
|
||||||
|
return protoimpl.X.MessageStringOf(x)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (*Status) ProtoMessage() {}
|
||||||
|
|
||||||
|
func (x *Status) ProtoReflect() protoreflect.Message {
|
||||||
|
mi := &file_cc_analyzer_proto_msgTypes[0]
|
||||||
|
if protoimpl.UnsafeEnabled && x != nil {
|
||||||
|
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||||
|
if ms.LoadMessageInfo() == nil {
|
||||||
|
ms.StoreMessageInfo(mi)
|
||||||
|
}
|
||||||
|
return ms
|
||||||
|
}
|
||||||
|
return mi.MessageOf(x)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deprecated: Use Status.ProtoReflect.Descriptor instead.
|
||||||
|
func (*Status) Descriptor() ([]byte, []int) {
|
||||||
|
return file_cc_analyzer_proto_rawDescGZIP(), []int{0}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *Status) GetCode() Status_Code {
|
||||||
|
if x != nil {
|
||||||
|
return x.Code
|
||||||
|
}
|
||||||
|
return Status_OK
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *Status) GetMessage() string {
|
||||||
|
if x != nil && x.Message != nil {
|
||||||
|
return *x.Message
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Represents an Android checkout on user's workstation.
|
||||||
|
type RepoState struct {
|
||||||
|
state protoimpl.MessageState
|
||||||
|
sizeCache protoimpl.SizeCache
|
||||||
|
unknownFields protoimpl.UnknownFields
|
||||||
|
|
||||||
|
// Absolute path for the checkout in the workstation.
|
||||||
|
// e.g. /home/user/work/android/
|
||||||
|
RepoDir string `protobuf:"bytes,1,opt,name=repo_dir,json=repoDir,proto3" json:"repo_dir,omitempty"`
|
||||||
|
// Relative to repo_dir.
|
||||||
|
ActiveFilePath []string `protobuf:"bytes,2,rep,name=active_file_path,json=activeFilePath,proto3" json:"active_file_path,omitempty"`
|
||||||
|
// Repository relative path to output directory in workstation.
|
||||||
|
OutDir string `protobuf:"bytes,3,opt,name=out_dir,json=outDir,proto3" json:"out_dir,omitempty"`
|
||||||
|
// Repository relative path to compile_commands.json in workstation.
|
||||||
|
CompDbPath string `protobuf:"bytes,4,opt,name=comp_db_path,json=compDbPath,proto3" json:"comp_db_path,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *RepoState) Reset() {
|
||||||
|
*x = RepoState{}
|
||||||
|
if protoimpl.UnsafeEnabled {
|
||||||
|
mi := &file_cc_analyzer_proto_msgTypes[1]
|
||||||
|
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||||
|
ms.StoreMessageInfo(mi)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *RepoState) String() string {
|
||||||
|
return protoimpl.X.MessageStringOf(x)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (*RepoState) ProtoMessage() {}
|
||||||
|
|
||||||
|
func (x *RepoState) ProtoReflect() protoreflect.Message {
|
||||||
|
mi := &file_cc_analyzer_proto_msgTypes[1]
|
||||||
|
if protoimpl.UnsafeEnabled && x != nil {
|
||||||
|
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||||
|
if ms.LoadMessageInfo() == nil {
|
||||||
|
ms.StoreMessageInfo(mi)
|
||||||
|
}
|
||||||
|
return ms
|
||||||
|
}
|
||||||
|
return mi.MessageOf(x)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deprecated: Use RepoState.ProtoReflect.Descriptor instead.
|
||||||
|
func (*RepoState) Descriptor() ([]byte, []int) {
|
||||||
|
return file_cc_analyzer_proto_rawDescGZIP(), []int{1}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *RepoState) GetRepoDir() string {
|
||||||
|
if x != nil {
|
||||||
|
return x.RepoDir
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *RepoState) GetActiveFilePath() []string {
|
||||||
|
if x != nil {
|
||||||
|
return x.ActiveFilePath
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *RepoState) GetOutDir() string {
|
||||||
|
if x != nil {
|
||||||
|
return x.OutDir
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *RepoState) GetCompDbPath() string {
|
||||||
|
if x != nil {
|
||||||
|
return x.CompDbPath
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Provides all the targets that are pre-requisities for running language
|
||||||
|
// services on active_file_paths.
|
||||||
|
type DepsResponse struct {
|
||||||
|
state protoimpl.MessageState
|
||||||
|
sizeCache protoimpl.SizeCache
|
||||||
|
unknownFields protoimpl.UnknownFields
|
||||||
|
|
||||||
|
Deps []*DepsResponse_Deps `protobuf:"bytes,1,rep,name=deps,proto3" json:"deps,omitempty"`
|
||||||
|
Status *Status `protobuf:"bytes,2,opt,name=status,proto3,oneof" json:"status,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *DepsResponse) Reset() {
|
||||||
|
*x = DepsResponse{}
|
||||||
|
if protoimpl.UnsafeEnabled {
|
||||||
|
mi := &file_cc_analyzer_proto_msgTypes[2]
|
||||||
|
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||||
|
ms.StoreMessageInfo(mi)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *DepsResponse) String() string {
|
||||||
|
return protoimpl.X.MessageStringOf(x)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (*DepsResponse) ProtoMessage() {}
|
||||||
|
|
||||||
|
func (x *DepsResponse) ProtoReflect() protoreflect.Message {
|
||||||
|
mi := &file_cc_analyzer_proto_msgTypes[2]
|
||||||
|
if protoimpl.UnsafeEnabled && x != nil {
|
||||||
|
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||||
|
if ms.LoadMessageInfo() == nil {
|
||||||
|
ms.StoreMessageInfo(mi)
|
||||||
|
}
|
||||||
|
return ms
|
||||||
|
}
|
||||||
|
return mi.MessageOf(x)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deprecated: Use DepsResponse.ProtoReflect.Descriptor instead.
|
||||||
|
func (*DepsResponse) Descriptor() ([]byte, []int) {
|
||||||
|
return file_cc_analyzer_proto_rawDescGZIP(), []int{2}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *DepsResponse) GetDeps() []*DepsResponse_Deps {
|
||||||
|
if x != nil {
|
||||||
|
return x.Deps
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *DepsResponse) GetStatus() *Status {
|
||||||
|
if x != nil {
|
||||||
|
return x.Status
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns all the information necessary for providing language services for the
|
||||||
|
// active files.
|
||||||
|
type GeneratedFile struct {
|
||||||
|
state protoimpl.MessageState
|
||||||
|
sizeCache protoimpl.SizeCache
|
||||||
|
unknownFields protoimpl.UnknownFields
|
||||||
|
|
||||||
|
// Path to the file relative to repository root.
|
||||||
|
Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"`
|
||||||
|
// The text of the generated file, if not provided contents will be read
|
||||||
|
// from the path above in user's workstation.
|
||||||
|
Contents []byte `protobuf:"bytes,2,opt,name=contents,proto3,oneof" json:"contents,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *GeneratedFile) Reset() {
|
||||||
|
*x = GeneratedFile{}
|
||||||
|
if protoimpl.UnsafeEnabled {
|
||||||
|
mi := &file_cc_analyzer_proto_msgTypes[3]
|
||||||
|
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||||
|
ms.StoreMessageInfo(mi)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *GeneratedFile) String() string {
|
||||||
|
return protoimpl.X.MessageStringOf(x)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (*GeneratedFile) ProtoMessage() {}
|
||||||
|
|
||||||
|
func (x *GeneratedFile) ProtoReflect() protoreflect.Message {
|
||||||
|
mi := &file_cc_analyzer_proto_msgTypes[3]
|
||||||
|
if protoimpl.UnsafeEnabled && x != nil {
|
||||||
|
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||||
|
if ms.LoadMessageInfo() == nil {
|
||||||
|
ms.StoreMessageInfo(mi)
|
||||||
|
}
|
||||||
|
return ms
|
||||||
|
}
|
||||||
|
return mi.MessageOf(x)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deprecated: Use GeneratedFile.ProtoReflect.Descriptor instead.
|
||||||
|
func (*GeneratedFile) Descriptor() ([]byte, []int) {
|
||||||
|
return file_cc_analyzer_proto_rawDescGZIP(), []int{3}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *GeneratedFile) GetPath() string {
|
||||||
|
if x != nil {
|
||||||
|
return x.Path
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *GeneratedFile) GetContents() []byte {
|
||||||
|
if x != nil {
|
||||||
|
return x.Contents
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type SourceFile struct {
|
||||||
|
state protoimpl.MessageState
|
||||||
|
sizeCache protoimpl.SizeCache
|
||||||
|
unknownFields protoimpl.UnknownFields
|
||||||
|
|
||||||
|
// Path to the source file relative to repository root.
|
||||||
|
Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"`
|
||||||
|
// Working directory used by the build system. All the relative
|
||||||
|
// paths in compiler_arguments should be relative to this path.
|
||||||
|
// Relative to repository root.
|
||||||
|
WorkingDir string `protobuf:"bytes,2,opt,name=working_dir,json=workingDir,proto3" json:"working_dir,omitempty"`
|
||||||
|
// Compiler arguments to compile the source file. If multiple variants
|
||||||
|
// of the module being compiled are possible, the query script will choose
|
||||||
|
// one.
|
||||||
|
CompilerArguments []string `protobuf:"bytes,3,rep,name=compiler_arguments,json=compilerArguments,proto3" json:"compiler_arguments,omitempty"`
|
||||||
|
// Any generated files that are used in compiling the file.
|
||||||
|
Generated []*GeneratedFile `protobuf:"bytes,4,rep,name=generated,proto3" json:"generated,omitempty"`
|
||||||
|
// Paths to all of the sources, like build files, code generators,
|
||||||
|
// proto files etc. that were used during analysis. Used to figure
|
||||||
|
// out when a set of build artifacts are stale and the query tool
|
||||||
|
// must be re-run.
|
||||||
|
// Relative to repository root.
|
||||||
|
Deps []string `protobuf:"bytes,5,rep,name=deps,proto3" json:"deps,omitempty"`
|
||||||
|
// Represents analysis status for this particular file. e.g. not part
|
||||||
|
// of the build graph.
|
||||||
|
Status *Status `protobuf:"bytes,6,opt,name=status,proto3,oneof" json:"status,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *SourceFile) Reset() {
|
||||||
|
*x = SourceFile{}
|
||||||
|
if protoimpl.UnsafeEnabled {
|
||||||
|
mi := &file_cc_analyzer_proto_msgTypes[4]
|
||||||
|
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||||
|
ms.StoreMessageInfo(mi)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *SourceFile) String() string {
|
||||||
|
return protoimpl.X.MessageStringOf(x)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (*SourceFile) ProtoMessage() {}
|
||||||
|
|
||||||
|
func (x *SourceFile) ProtoReflect() protoreflect.Message {
|
||||||
|
mi := &file_cc_analyzer_proto_msgTypes[4]
|
||||||
|
if protoimpl.UnsafeEnabled && x != nil {
|
||||||
|
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||||
|
if ms.LoadMessageInfo() == nil {
|
||||||
|
ms.StoreMessageInfo(mi)
|
||||||
|
}
|
||||||
|
return ms
|
||||||
|
}
|
||||||
|
return mi.MessageOf(x)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deprecated: Use SourceFile.ProtoReflect.Descriptor instead.
|
||||||
|
func (*SourceFile) Descriptor() ([]byte, []int) {
|
||||||
|
return file_cc_analyzer_proto_rawDescGZIP(), []int{4}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *SourceFile) GetPath() string {
|
||||||
|
if x != nil {
|
||||||
|
return x.Path
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *SourceFile) GetWorkingDir() string {
|
||||||
|
if x != nil {
|
||||||
|
return x.WorkingDir
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *SourceFile) GetCompilerArguments() []string {
|
||||||
|
if x != nil {
|
||||||
|
return x.CompilerArguments
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *SourceFile) GetGenerated() []*GeneratedFile {
|
||||||
|
if x != nil {
|
||||||
|
return x.Generated
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *SourceFile) GetDeps() []string {
|
||||||
|
if x != nil {
|
||||||
|
return x.Deps
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *SourceFile) GetStatus() *Status {
|
||||||
|
if x != nil {
|
||||||
|
return x.Status
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type IdeAnalysis struct {
|
||||||
|
state protoimpl.MessageState
|
||||||
|
sizeCache protoimpl.SizeCache
|
||||||
|
unknownFields protoimpl.UnknownFields
|
||||||
|
|
||||||
|
Sources []*SourceFile `protobuf:"bytes,2,rep,name=sources,proto3" json:"sources,omitempty"`
|
||||||
|
// Status representing overall analysis.
|
||||||
|
// Should fail only when no analysis can be performed.
|
||||||
|
Status *Status `protobuf:"bytes,3,opt,name=status,proto3,oneof" json:"status,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *IdeAnalysis) Reset() {
|
||||||
|
*x = IdeAnalysis{}
|
||||||
|
if protoimpl.UnsafeEnabled {
|
||||||
|
mi := &file_cc_analyzer_proto_msgTypes[5]
|
||||||
|
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||||
|
ms.StoreMessageInfo(mi)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *IdeAnalysis) String() string {
|
||||||
|
return protoimpl.X.MessageStringOf(x)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (*IdeAnalysis) ProtoMessage() {}
|
||||||
|
|
||||||
|
func (x *IdeAnalysis) ProtoReflect() protoreflect.Message {
|
||||||
|
mi := &file_cc_analyzer_proto_msgTypes[5]
|
||||||
|
if protoimpl.UnsafeEnabled && x != nil {
|
||||||
|
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||||
|
if ms.LoadMessageInfo() == nil {
|
||||||
|
ms.StoreMessageInfo(mi)
|
||||||
|
}
|
||||||
|
return ms
|
||||||
|
}
|
||||||
|
return mi.MessageOf(x)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deprecated: Use IdeAnalysis.ProtoReflect.Descriptor instead.
|
||||||
|
func (*IdeAnalysis) Descriptor() ([]byte, []int) {
|
||||||
|
return file_cc_analyzer_proto_rawDescGZIP(), []int{5}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *IdeAnalysis) GetSources() []*SourceFile {
|
||||||
|
if x != nil {
|
||||||
|
return x.Sources
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *IdeAnalysis) GetStatus() *Status {
|
||||||
|
if x != nil {
|
||||||
|
return x.Status
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build dependencies of a source file for providing language services.
|
||||||
|
type DepsResponse_Deps struct {
|
||||||
|
state protoimpl.MessageState
|
||||||
|
sizeCache protoimpl.SizeCache
|
||||||
|
unknownFields protoimpl.UnknownFields
|
||||||
|
|
||||||
|
// Relative to repo_dir.
|
||||||
|
SourceFile string `protobuf:"bytes,1,opt,name=source_file,json=sourceFile,proto3" json:"source_file,omitempty"`
|
||||||
|
// Build target to execute for generating dep.
|
||||||
|
BuildTarget []string `protobuf:"bytes,2,rep,name=build_target,json=buildTarget,proto3" json:"build_target,omitempty"`
|
||||||
|
Status *Status `protobuf:"bytes,3,opt,name=status,proto3,oneof" json:"status,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *DepsResponse_Deps) Reset() {
|
||||||
|
*x = DepsResponse_Deps{}
|
||||||
|
if protoimpl.UnsafeEnabled {
|
||||||
|
mi := &file_cc_analyzer_proto_msgTypes[6]
|
||||||
|
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||||
|
ms.StoreMessageInfo(mi)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *DepsResponse_Deps) String() string {
|
||||||
|
return protoimpl.X.MessageStringOf(x)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (*DepsResponse_Deps) ProtoMessage() {}
|
||||||
|
|
||||||
|
func (x *DepsResponse_Deps) ProtoReflect() protoreflect.Message {
|
||||||
|
mi := &file_cc_analyzer_proto_msgTypes[6]
|
||||||
|
if protoimpl.UnsafeEnabled && x != nil {
|
||||||
|
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||||
|
if ms.LoadMessageInfo() == nil {
|
||||||
|
ms.StoreMessageInfo(mi)
|
||||||
|
}
|
||||||
|
return ms
|
||||||
|
}
|
||||||
|
return mi.MessageOf(x)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deprecated: Use DepsResponse_Deps.ProtoReflect.Descriptor instead.
|
||||||
|
func (*DepsResponse_Deps) Descriptor() ([]byte, []int) {
|
||||||
|
return file_cc_analyzer_proto_rawDescGZIP(), []int{2, 0}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *DepsResponse_Deps) GetSourceFile() string {
|
||||||
|
if x != nil {
|
||||||
|
return x.SourceFile
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *DepsResponse_Deps) GetBuildTarget() []string {
|
||||||
|
if x != nil {
|
||||||
|
return x.BuildTarget
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *DepsResponse_Deps) GetStatus() *Status {
|
||||||
|
if x != nil {
|
||||||
|
return x.Status
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var File_cc_analyzer_proto protoreflect.FileDescriptor
|
||||||
|
|
||||||
|
var file_cc_analyzer_proto_rawDesc = []byte{
|
||||||
|
0x0a, 0x11, 0x63, 0x63, 0x5f, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x72, 0x2e, 0x70, 0x72,
|
||||||
|
0x6f, 0x74, 0x6f, 0x12, 0x0b, 0x63, 0x63, 0x5f, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x72,
|
||||||
|
0x22, 0x7e, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x2c, 0x0a, 0x04, 0x63, 0x6f,
|
||||||
|
0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x18, 0x2e, 0x63, 0x63, 0x5f, 0x61, 0x6e,
|
||||||
|
0x61, 0x6c, 0x79, 0x7a, 0x65, 0x72, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x2e, 0x43, 0x6f,
|
||||||
|
0x64, 0x65, 0x52, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x12, 0x1d, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73,
|
||||||
|
0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x07, 0x6d, 0x65, 0x73,
|
||||||
|
0x73, 0x61, 0x67, 0x65, 0x88, 0x01, 0x01, 0x22, 0x1b, 0x0a, 0x04, 0x43, 0x6f, 0x64, 0x65, 0x12,
|
||||||
|
0x06, 0x0a, 0x02, 0x4f, 0x4b, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x46, 0x41, 0x49, 0x4c, 0x55,
|
||||||
|
0x52, 0x45, 0x10, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65,
|
||||||
|
0x22, 0x8b, 0x01, 0x0a, 0x09, 0x52, 0x65, 0x70, 0x6f, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x19,
|
||||||
|
0x0a, 0x08, 0x72, 0x65, 0x70, 0x6f, 0x5f, 0x64, 0x69, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
|
||||||
|
0x52, 0x07, 0x72, 0x65, 0x70, 0x6f, 0x44, 0x69, 0x72, 0x12, 0x28, 0x0a, 0x10, 0x61, 0x63, 0x74,
|
||||||
|
0x69, 0x76, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20,
|
||||||
|
0x03, 0x28, 0x09, 0x52, 0x0e, 0x61, 0x63, 0x74, 0x69, 0x76, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x50,
|
||||||
|
0x61, 0x74, 0x68, 0x12, 0x17, 0x0a, 0x07, 0x6f, 0x75, 0x74, 0x5f, 0x64, 0x69, 0x72, 0x18, 0x03,
|
||||||
|
0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6f, 0x75, 0x74, 0x44, 0x69, 0x72, 0x12, 0x20, 0x0a, 0x0c,
|
||||||
|
0x63, 0x6f, 0x6d, 0x70, 0x5f, 0x64, 0x62, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x04, 0x20, 0x01,
|
||||||
|
0x28, 0x09, 0x52, 0x0a, 0x63, 0x6f, 0x6d, 0x70, 0x44, 0x62, 0x50, 0x61, 0x74, 0x68, 0x22, 0x89,
|
||||||
|
0x02, 0x0a, 0x0c, 0x44, 0x65, 0x70, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12,
|
||||||
|
0x32, 0x0a, 0x04, 0x64, 0x65, 0x70, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e,
|
||||||
|
0x63, 0x63, 0x5f, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x72, 0x2e, 0x44, 0x65, 0x70, 0x73,
|
||||||
|
0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x44, 0x65, 0x70, 0x73, 0x52, 0x04, 0x64,
|
||||||
|
0x65, 0x70, 0x73, 0x12, 0x30, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, 0x20,
|
||||||
|
0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x63, 0x63, 0x5f, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65,
|
||||||
|
0x72, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x48, 0x00, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74,
|
||||||
|
0x75, 0x73, 0x88, 0x01, 0x01, 0x1a, 0x87, 0x01, 0x0a, 0x04, 0x44, 0x65, 0x70, 0x73, 0x12, 0x1f,
|
||||||
|
0x0a, 0x0b, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x18, 0x01, 0x20,
|
||||||
|
0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x12,
|
||||||
|
0x21, 0x0a, 0x0c, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x18,
|
||||||
|
0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0b, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x54, 0x61, 0x72, 0x67,
|
||||||
|
0x65, 0x74, 0x12, 0x30, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x03, 0x20, 0x01,
|
||||||
|
0x28, 0x0b, 0x32, 0x13, 0x2e, 0x63, 0x63, 0x5f, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x72,
|
||||||
|
0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x48, 0x00, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75,
|
||||||
|
0x73, 0x88, 0x01, 0x01, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x42,
|
||||||
|
0x09, 0x0a, 0x07, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x51, 0x0a, 0x0d, 0x47, 0x65,
|
||||||
|
0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x70,
|
||||||
|
0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12,
|
||||||
|
0x1f, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28,
|
||||||
|
0x0c, 0x48, 0x00, 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x88, 0x01, 0x01,
|
||||||
|
0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x22, 0xfb, 0x01,
|
||||||
|
0x0a, 0x0a, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x12, 0x0a, 0x04,
|
||||||
|
0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68,
|
||||||
|
0x12, 0x1f, 0x0a, 0x0b, 0x77, 0x6f, 0x72, 0x6b, 0x69, 0x6e, 0x67, 0x5f, 0x64, 0x69, 0x72, 0x18,
|
||||||
|
0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x69, 0x6e, 0x67, 0x44, 0x69,
|
||||||
|
0x72, 0x12, 0x2d, 0x0a, 0x12, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x5f, 0x61, 0x72,
|
||||||
|
0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x11, 0x63,
|
||||||
|
0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73,
|
||||||
|
0x12, 0x38, 0x0a, 0x09, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x18, 0x04, 0x20,
|
||||||
|
0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x63, 0x63, 0x5f, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65,
|
||||||
|
0x72, 0x2e, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x52,
|
||||||
|
0x09, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x65,
|
||||||
|
0x70, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x64, 0x65, 0x70, 0x73, 0x12, 0x30,
|
||||||
|
0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13,
|
||||||
|
0x2e, 0x63, 0x63, 0x5f, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x72, 0x2e, 0x53, 0x74, 0x61,
|
||||||
|
0x74, 0x75, 0x73, 0x48, 0x00, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x88, 0x01, 0x01,
|
||||||
|
0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x83, 0x01, 0x0a, 0x0b,
|
||||||
|
0x49, 0x64, 0x65, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x12, 0x31, 0x0a, 0x07, 0x73,
|
||||||
|
0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x63,
|
||||||
|
0x63, 0x5f, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x72, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63,
|
||||||
|
0x65, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x07, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x30,
|
||||||
|
0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13,
|
||||||
|
0x2e, 0x63, 0x63, 0x5f, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x72, 0x2e, 0x53, 0x74, 0x61,
|
||||||
|
0x74, 0x75, 0x73, 0x48, 0x00, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x88, 0x01, 0x01,
|
||||||
|
0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x4a, 0x04, 0x08, 0x01, 0x10,
|
||||||
|
0x02, 0x42, 0x1d, 0x5a, 0x1b, 0x69, 0x64, 0x65, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2f, 0x63,
|
||||||
|
0x63, 0x5f, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x72, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f,
|
||||||
|
0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
file_cc_analyzer_proto_rawDescOnce sync.Once
|
||||||
|
file_cc_analyzer_proto_rawDescData = file_cc_analyzer_proto_rawDesc
|
||||||
|
)
|
||||||
|
|
||||||
|
func file_cc_analyzer_proto_rawDescGZIP() []byte {
|
||||||
|
file_cc_analyzer_proto_rawDescOnce.Do(func() {
|
||||||
|
file_cc_analyzer_proto_rawDescData = protoimpl.X.CompressGZIP(file_cc_analyzer_proto_rawDescData)
|
||||||
|
})
|
||||||
|
return file_cc_analyzer_proto_rawDescData
|
||||||
|
}
|
||||||
|
|
||||||
|
var file_cc_analyzer_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
|
||||||
|
var file_cc_analyzer_proto_msgTypes = make([]protoimpl.MessageInfo, 7)
|
||||||
|
var file_cc_analyzer_proto_goTypes = []interface{}{
|
||||||
|
(Status_Code)(0), // 0: cc_analyzer.Status.Code
|
||||||
|
(*Status)(nil), // 1: cc_analyzer.Status
|
||||||
|
(*RepoState)(nil), // 2: cc_analyzer.RepoState
|
||||||
|
(*DepsResponse)(nil), // 3: cc_analyzer.DepsResponse
|
||||||
|
(*GeneratedFile)(nil), // 4: cc_analyzer.GeneratedFile
|
||||||
|
(*SourceFile)(nil), // 5: cc_analyzer.SourceFile
|
||||||
|
(*IdeAnalysis)(nil), // 6: cc_analyzer.IdeAnalysis
|
||||||
|
(*DepsResponse_Deps)(nil), // 7: cc_analyzer.DepsResponse.Deps
|
||||||
|
}
|
||||||
|
var file_cc_analyzer_proto_depIdxs = []int32{
|
||||||
|
0, // 0: cc_analyzer.Status.code:type_name -> cc_analyzer.Status.Code
|
||||||
|
7, // 1: cc_analyzer.DepsResponse.deps:type_name -> cc_analyzer.DepsResponse.Deps
|
||||||
|
1, // 2: cc_analyzer.DepsResponse.status:type_name -> cc_analyzer.Status
|
||||||
|
4, // 3: cc_analyzer.SourceFile.generated:type_name -> cc_analyzer.GeneratedFile
|
||||||
|
1, // 4: cc_analyzer.SourceFile.status:type_name -> cc_analyzer.Status
|
||||||
|
5, // 5: cc_analyzer.IdeAnalysis.sources:type_name -> cc_analyzer.SourceFile
|
||||||
|
1, // 6: cc_analyzer.IdeAnalysis.status:type_name -> cc_analyzer.Status
|
||||||
|
1, // 7: cc_analyzer.DepsResponse.Deps.status:type_name -> cc_analyzer.Status
|
||||||
|
8, // [8:8] is the sub-list for method output_type
|
||||||
|
8, // [8:8] is the sub-list for method input_type
|
||||||
|
8, // [8:8] is the sub-list for extension type_name
|
||||||
|
8, // [8:8] is the sub-list for extension extendee
|
||||||
|
0, // [0:8] is the sub-list for field type_name
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() { file_cc_analyzer_proto_init() }
|
||||||
|
func file_cc_analyzer_proto_init() {
|
||||||
|
if File_cc_analyzer_proto != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if !protoimpl.UnsafeEnabled {
|
||||||
|
file_cc_analyzer_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
|
||||||
|
switch v := v.(*Status); i {
|
||||||
|
case 0:
|
||||||
|
return &v.state
|
||||||
|
case 1:
|
||||||
|
return &v.sizeCache
|
||||||
|
case 2:
|
||||||
|
return &v.unknownFields
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
file_cc_analyzer_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
|
||||||
|
switch v := v.(*RepoState); i {
|
||||||
|
case 0:
|
||||||
|
return &v.state
|
||||||
|
case 1:
|
||||||
|
return &v.sizeCache
|
||||||
|
case 2:
|
||||||
|
return &v.unknownFields
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
file_cc_analyzer_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
|
||||||
|
switch v := v.(*DepsResponse); i {
|
||||||
|
case 0:
|
||||||
|
return &v.state
|
||||||
|
case 1:
|
||||||
|
return &v.sizeCache
|
||||||
|
case 2:
|
||||||
|
return &v.unknownFields
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
file_cc_analyzer_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
|
||||||
|
switch v := v.(*GeneratedFile); i {
|
||||||
|
case 0:
|
||||||
|
return &v.state
|
||||||
|
case 1:
|
||||||
|
return &v.sizeCache
|
||||||
|
case 2:
|
||||||
|
return &v.unknownFields
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
file_cc_analyzer_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
|
||||||
|
switch v := v.(*SourceFile); i {
|
||||||
|
case 0:
|
||||||
|
return &v.state
|
||||||
|
case 1:
|
||||||
|
return &v.sizeCache
|
||||||
|
case 2:
|
||||||
|
return &v.unknownFields
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
file_cc_analyzer_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
|
||||||
|
switch v := v.(*IdeAnalysis); i {
|
||||||
|
case 0:
|
||||||
|
return &v.state
|
||||||
|
case 1:
|
||||||
|
return &v.sizeCache
|
||||||
|
case 2:
|
||||||
|
return &v.unknownFields
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
file_cc_analyzer_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
|
||||||
|
switch v := v.(*DepsResponse_Deps); i {
|
||||||
|
case 0:
|
||||||
|
return &v.state
|
||||||
|
case 1:
|
||||||
|
return &v.sizeCache
|
||||||
|
case 2:
|
||||||
|
return &v.unknownFields
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
file_cc_analyzer_proto_msgTypes[0].OneofWrappers = []interface{}{}
|
||||||
|
file_cc_analyzer_proto_msgTypes[2].OneofWrappers = []interface{}{}
|
||||||
|
file_cc_analyzer_proto_msgTypes[3].OneofWrappers = []interface{}{}
|
||||||
|
file_cc_analyzer_proto_msgTypes[4].OneofWrappers = []interface{}{}
|
||||||
|
file_cc_analyzer_proto_msgTypes[5].OneofWrappers = []interface{}{}
|
||||||
|
file_cc_analyzer_proto_msgTypes[6].OneofWrappers = []interface{}{}
|
||||||
|
type x struct{}
|
||||||
|
out := protoimpl.TypeBuilder{
|
||||||
|
File: protoimpl.DescBuilder{
|
||||||
|
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
|
||||||
|
RawDescriptor: file_cc_analyzer_proto_rawDesc,
|
||||||
|
NumEnums: 1,
|
||||||
|
NumMessages: 7,
|
||||||
|
NumExtensions: 0,
|
||||||
|
NumServices: 0,
|
||||||
|
},
|
||||||
|
GoTypes: file_cc_analyzer_proto_goTypes,
|
||||||
|
DependencyIndexes: file_cc_analyzer_proto_depIdxs,
|
||||||
|
EnumInfos: file_cc_analyzer_proto_enumTypes,
|
||||||
|
MessageInfos: file_cc_analyzer_proto_msgTypes,
|
||||||
|
}.Build()
|
||||||
|
File_cc_analyzer_proto = out.File
|
||||||
|
file_cc_analyzer_proto_rawDesc = nil
|
||||||
|
file_cc_analyzer_proto_goTypes = nil
|
||||||
|
file_cc_analyzer_proto_depIdxs = nil
|
||||||
|
}
|
109
tools/ide_query/cc_analyzer_proto/cc_analyzer.proto
Normal file
109
tools/ide_query/cc_analyzer_proto/cc_analyzer.proto
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (C) 2024 The Android Open Source Project
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package cc_analyzer;
|
||||||
|
|
||||||
|
option go_package = "ide_query/cc_analyzer_proto";
|
||||||
|
|
||||||
|
// Indicates the success/failure for analysis.
|
||||||
|
message Status {
|
||||||
|
enum Code {
|
||||||
|
OK = 0;
|
||||||
|
FAILURE = 1;
|
||||||
|
}
|
||||||
|
Code code = 1;
|
||||||
|
// Details about the status, might be displayed to user.
|
||||||
|
optional string message = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Represents an Android checkout on user's workstation.
|
||||||
|
message RepoState {
|
||||||
|
// Absolute path for the checkout in the workstation.
|
||||||
|
// e.g. /home/user/work/android/
|
||||||
|
string repo_dir = 1;
|
||||||
|
// Relative to repo_dir.
|
||||||
|
repeated string active_file_path = 2;
|
||||||
|
// Repository relative path to output directory in workstation.
|
||||||
|
string out_dir = 3;
|
||||||
|
// Repository relative path to compile_commands.json in workstation.
|
||||||
|
string comp_db_path = 4;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Provides all the targets that are pre-requisities for running language
|
||||||
|
// services on active_file_paths.
|
||||||
|
message DepsResponse {
|
||||||
|
// Build dependencies of a source file for providing language services.
|
||||||
|
message Deps {
|
||||||
|
// Relative to repo_dir.
|
||||||
|
string source_file = 1;
|
||||||
|
// Build target to execute for generating dep.
|
||||||
|
repeated string build_target = 2;
|
||||||
|
optional Status status = 3;
|
||||||
|
}
|
||||||
|
repeated Deps deps = 1;
|
||||||
|
optional Status status = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns all the information necessary for providing language services for the
|
||||||
|
// active files.
|
||||||
|
message GeneratedFile {
|
||||||
|
// Path to the file relative to repository root.
|
||||||
|
string path = 1;
|
||||||
|
|
||||||
|
// The text of the generated file, if not provided contents will be read
|
||||||
|
// from the path above in user's workstation.
|
||||||
|
optional bytes contents = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message SourceFile {
|
||||||
|
// Path to the source file relative to repository root.
|
||||||
|
string path = 1;
|
||||||
|
|
||||||
|
// Working directory used by the build system. All the relative
|
||||||
|
// paths in compiler_arguments should be relative to this path.
|
||||||
|
// Relative to repository root.
|
||||||
|
string working_dir = 2;
|
||||||
|
|
||||||
|
// Compiler arguments to compile the source file. If multiple variants
|
||||||
|
// of the module being compiled are possible, the query script will choose
|
||||||
|
// one.
|
||||||
|
repeated string compiler_arguments = 3;
|
||||||
|
|
||||||
|
// Any generated files that are used in compiling the file.
|
||||||
|
repeated GeneratedFile generated = 4;
|
||||||
|
|
||||||
|
// Paths to all of the sources, like build files, code generators,
|
||||||
|
// proto files etc. that were used during analysis. Used to figure
|
||||||
|
// out when a set of build artifacts are stale and the query tool
|
||||||
|
// must be re-run.
|
||||||
|
// Relative to repository root.
|
||||||
|
repeated string deps = 5;
|
||||||
|
|
||||||
|
// Represents analysis status for this particular file. e.g. not part
|
||||||
|
// of the build graph.
|
||||||
|
optional Status status = 6;
|
||||||
|
}
|
||||||
|
|
||||||
|
message IdeAnalysis {
|
||||||
|
repeated SourceFile sources = 2;
|
||||||
|
|
||||||
|
// Status representing overall analysis.
|
||||||
|
// Should fail only when no analysis can be performed.
|
||||||
|
optional Status status = 3;
|
||||||
|
|
||||||
|
reserved 1;
|
||||||
|
}
|
3
tools/ide_query/cc_analyzer_proto/regen.sh
Executable file
3
tools/ide_query/cc_analyzer_proto/regen.sh
Executable file
@@ -0,0 +1,3 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
aprotoc --go_out=paths=source_relative:. cc_analyzer.proto
|
@@ -33,6 +33,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"google.golang.org/protobuf/proto"
|
"google.golang.org/protobuf/proto"
|
||||||
|
apb "ide_query/cc_analyzer_proto"
|
||||||
pb "ide_query/ide_query_proto"
|
pb "ide_query/ide_query_proto"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -42,9 +43,6 @@ type Env struct {
|
|||||||
RepoDir string
|
RepoDir string
|
||||||
OutDir string
|
OutDir string
|
||||||
ClangToolsRoot string
|
ClangToolsRoot string
|
||||||
|
|
||||||
CcFiles []string
|
|
||||||
JavaFiles []string
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// LunchTarget is a parsed Android lunch target.
|
// LunchTarget is a parsed Android lunch target.
|
||||||
@@ -83,7 +81,7 @@ func (l *LunchTarget) String() string {
|
|||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
var env Env
|
var env Env
|
||||||
env.OutDir = os.Getenv("OUT_DIR")
|
env.OutDir = strings.TrimSuffix(os.Getenv("OUT_DIR"), "/")
|
||||||
env.RepoDir = os.Getenv("ANDROID_BUILD_TOP")
|
env.RepoDir = os.Getenv("ANDROID_BUILD_TOP")
|
||||||
env.ClangToolsRoot = os.Getenv("PREBUILTS_CLANG_TOOLS_ROOT")
|
env.ClangToolsRoot = os.Getenv("PREBUILTS_CLANG_TOOLS_ROOT")
|
||||||
flag.Var(&env.LunchTarget, "lunch_target", "The lunch target to query")
|
flag.Var(&env.LunchTarget, "lunch_target", "The lunch target to query")
|
||||||
@@ -95,12 +93,13 @@ func main() {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var ccFiles, javaFiles []string
|
||||||
for _, f := range files {
|
for _, f := range files {
|
||||||
switch {
|
switch {
|
||||||
case strings.HasSuffix(f, ".java") || strings.HasSuffix(f, ".kt"):
|
case strings.HasSuffix(f, ".java") || strings.HasSuffix(f, ".kt"):
|
||||||
env.JavaFiles = append(env.JavaFiles, f)
|
javaFiles = append(javaFiles, f)
|
||||||
case strings.HasSuffix(f, ".cc") || strings.HasSuffix(f, ".cpp") || strings.HasSuffix(f, ".h"):
|
case strings.HasSuffix(f, ".cc") || strings.HasSuffix(f, ".cpp") || strings.HasSuffix(f, ".h"):
|
||||||
env.CcFiles = append(env.CcFiles, f)
|
ccFiles = append(ccFiles, f)
|
||||||
default:
|
default:
|
||||||
log.Printf("File %q is supported - will be skipped.", f)
|
log.Printf("File %q is supported - will be skipped.", f)
|
||||||
}
|
}
|
||||||
@@ -110,28 +109,54 @@ func main() {
|
|||||||
// TODO(michaelmerg): Figure out if module_bp_java_deps.json and compile_commands.json is outdated.
|
// TODO(michaelmerg): Figure out if module_bp_java_deps.json and compile_commands.json is outdated.
|
||||||
runMake(ctx, env, "nothing")
|
runMake(ctx, env, "nothing")
|
||||||
|
|
||||||
javaModules, javaFileToModuleMap, err := loadJavaModules(&env)
|
javaModules, err := loadJavaModules(env)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("Failed to load java modules: %v", err)
|
log.Printf("Failed to load java modules: %v", err)
|
||||||
}
|
}
|
||||||
toMake := getJavaTargets(javaFileToModuleMap)
|
|
||||||
|
|
||||||
ccTargets, status := getCCTargets(ctx, &env)
|
var targets []string
|
||||||
if status != nil && status.Code != pb.Status_OK {
|
javaTargetsByFile := findJavaModules(javaFiles, javaModules)
|
||||||
log.Fatalf("Failed to query cc targets: %v", *status.Message)
|
for _, t := range javaTargetsByFile {
|
||||||
}
|
targets = append(targets, t)
|
||||||
toMake = append(toMake, ccTargets...)
|
|
||||||
fmt.Fprintf(os.Stderr, "Running make for modules: %v\n", strings.Join(toMake, ", "))
|
|
||||||
if err := runMake(ctx, env, toMake...); err != nil {
|
|
||||||
log.Printf("Building deps failed: %v", err)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
res := getJavaInputs(&env, javaModules, javaFileToModuleMap)
|
ccTargets, err := getCCTargets(ctx, env, ccFiles)
|
||||||
ccAnalysis := getCCInputs(ctx, &env)
|
if err != nil {
|
||||||
proto.Merge(res, ccAnalysis)
|
log.Fatalf("Failed to query cc targets: %v", err)
|
||||||
|
}
|
||||||
|
targets = append(targets, ccTargets...)
|
||||||
|
if len(targets) == 0 {
|
||||||
|
fmt.Println("No targets found.")
|
||||||
|
os.Exit(1)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
res.BuildArtifactRoot = env.OutDir
|
fmt.Fprintf(os.Stderr, "Running make for modules: %v\n", strings.Join(targets, ", "))
|
||||||
data, err := proto.Marshal(res)
|
if err := runMake(ctx, env, targets...); err != nil {
|
||||||
|
log.Printf("Building modules failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var analysis pb.IdeAnalysis
|
||||||
|
results, units := getJavaInputs(env, javaTargetsByFile, javaModules)
|
||||||
|
analysis.Results = results
|
||||||
|
analysis.Units = units
|
||||||
|
if err != nil && analysis.Error == nil {
|
||||||
|
analysis.Error = &pb.AnalysisError{
|
||||||
|
ErrorMessage: err.Error(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
results, units, err = getCCInputs(ctx, env, ccFiles)
|
||||||
|
analysis.Results = append(analysis.Results, results...)
|
||||||
|
analysis.Units = append(analysis.Units, units...)
|
||||||
|
if err != nil && analysis.Error == nil {
|
||||||
|
analysis.Error = &pb.AnalysisError{
|
||||||
|
ErrorMessage: err.Error(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
analysis.BuildOutDir = env.OutDir
|
||||||
|
data, err := proto.Marshal(&analysis)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("Failed to marshal result proto: %v", err)
|
log.Fatalf("Failed to marshal result proto: %v", err)
|
||||||
}
|
}
|
||||||
@@ -141,22 +166,22 @@ func main() {
|
|||||||
log.Fatalf("Failed to write result proto: %v", err)
|
log.Fatalf("Failed to write result proto: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, s := range res.Sources {
|
for _, r := range analysis.Results {
|
||||||
fmt.Fprintf(os.Stderr, "%s: %v (Deps: %d, Generated: %d)\n", s.GetPath(), s.GetStatus(), len(s.GetDeps()), len(s.GetGenerated()))
|
fmt.Fprintf(os.Stderr, "%s: %+v\n", r.GetSourceFilePath(), r.GetStatus())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func repoState(env *Env) *pb.RepoState {
|
func repoState(env Env, filePaths []string) *apb.RepoState {
|
||||||
const compDbPath = "soong/development/ide/compdb/compile_commands.json"
|
const compDbPath = "soong/development/ide/compdb/compile_commands.json"
|
||||||
return &pb.RepoState{
|
return &apb.RepoState{
|
||||||
RepoDir: env.RepoDir,
|
RepoDir: env.RepoDir,
|
||||||
ActiveFilePath: env.CcFiles,
|
ActiveFilePath: filePaths,
|
||||||
OutDir: env.OutDir,
|
OutDir: env.OutDir,
|
||||||
CompDbPath: path.Join(env.OutDir, compDbPath),
|
CompDbPath: path.Join(env.OutDir, compDbPath),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func runCCanalyzer(ctx context.Context, env *Env, mode string, in []byte) ([]byte, error) {
|
func runCCanalyzer(ctx context.Context, env Env, mode string, in []byte) ([]byte, error) {
|
||||||
ccAnalyzerPath := path.Join(env.ClangToolsRoot, "bin/ide_query_cc_analyzer")
|
ccAnalyzerPath := path.Join(env.ClangToolsRoot, "bin/ide_query_cc_analyzer")
|
||||||
outBuffer := new(bytes.Buffer)
|
outBuffer := new(bytes.Buffer)
|
||||||
|
|
||||||
@@ -176,127 +201,205 @@ func runCCanalyzer(ctx context.Context, env *Env, mode string, in []byte) ([]byt
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Execute cc_analyzer and get all the targets that needs to be build for analyzing files.
|
// Execute cc_analyzer and get all the targets that needs to be build for analyzing files.
|
||||||
func getCCTargets(ctx context.Context, env *Env) ([]string, *pb.Status) {
|
func getCCTargets(ctx context.Context, env Env, filePaths []string) ([]string, error) {
|
||||||
state := repoState(env)
|
state, err := proto.Marshal(repoState(env, filePaths))
|
||||||
bytes, err := proto.Marshal(state)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalln("Failed to serialize state:", err)
|
log.Fatalln("Failed to serialize state:", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
resp := new(pb.DepsResponse)
|
resp := new(apb.DepsResponse)
|
||||||
result, err := runCCanalyzer(ctx, env, "deps", bytes)
|
result, err := runCCanalyzer(ctx, env, "deps", state)
|
||||||
if marshal_err := proto.Unmarshal(result, resp); marshal_err != nil {
|
if err != nil {
|
||||||
return nil, &pb.Status{
|
return nil, err
|
||||||
Code: pb.Status_FAILURE,
|
|
||||||
Message: proto.String("Malformed response from cc_analyzer: " + marshal_err.Error()),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := proto.Unmarshal(result, resp); err != nil {
|
||||||
|
return nil, fmt.Errorf("malformed response from cc_analyzer: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
var targets []string
|
var targets []string
|
||||||
if resp.Status != nil && resp.Status.Code != pb.Status_OK {
|
if resp.Status != nil && resp.Status.Code != apb.Status_OK {
|
||||||
return targets, resp.Status
|
return targets, fmt.Errorf("cc_analyzer failed: %v", resp.Status.Message)
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, deps := range resp.Deps {
|
for _, deps := range resp.Deps {
|
||||||
targets = append(targets, deps.BuildTarget...)
|
targets = append(targets, deps.BuildTarget...)
|
||||||
}
|
}
|
||||||
|
return targets, nil
|
||||||
status := &pb.Status{Code: pb.Status_OK}
|
|
||||||
if err != nil {
|
|
||||||
status = &pb.Status{
|
|
||||||
Code: pb.Status_FAILURE,
|
|
||||||
Message: proto.String(err.Error()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return targets, status
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func getCCInputs(ctx context.Context, env *Env) *pb.IdeAnalysis {
|
func getCCInputs(ctx context.Context, env Env, filePaths []string) ([]*pb.AnalysisResult, []*pb.BuildableUnit, error) {
|
||||||
state := repoState(env)
|
state, err := proto.Marshal(repoState(env, filePaths))
|
||||||
bytes, err := proto.Marshal(state)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalln("Failed to serialize state:", err)
|
log.Fatalln("Failed to serialize state:", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
resp := new(pb.IdeAnalysis)
|
resp := new(apb.IdeAnalysis)
|
||||||
result, err := runCCanalyzer(ctx, env, "inputs", bytes)
|
result, err := runCCanalyzer(ctx, env, "inputs", state)
|
||||||
if marshal_err := proto.Unmarshal(result, resp); marshal_err != nil {
|
|
||||||
resp.Status = &pb.Status{
|
|
||||||
Code: pb.Status_FAILURE,
|
|
||||||
Message: proto.String("Malformed response from cc_analyzer: " + marshal_err.Error()),
|
|
||||||
}
|
|
||||||
return resp
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil && (resp.Status == nil || resp.Status.Code == pb.Status_OK) {
|
|
||||||
resp.Status = &pb.Status{
|
|
||||||
Code: pb.Status_FAILURE,
|
|
||||||
Message: proto.String(err.Error()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return resp
|
|
||||||
}
|
|
||||||
|
|
||||||
func getJavaTargets(javaFileToModuleMap map[string]*javaModule) []string {
|
|
||||||
var targets []string
|
|
||||||
for _, m := range javaFileToModuleMap {
|
|
||||||
targets = append(targets, m.Name)
|
|
||||||
}
|
|
||||||
return targets
|
|
||||||
}
|
|
||||||
|
|
||||||
func getJavaInputs(env *Env, javaModules map[string]*javaModule, javaFileToModuleMap map[string]*javaModule) *pb.IdeAnalysis {
|
|
||||||
var sources []*pb.SourceFile
|
|
||||||
type depsAndGenerated struct {
|
|
||||||
Deps []string
|
|
||||||
Generated []*pb.GeneratedFile
|
|
||||||
}
|
|
||||||
moduleToDeps := make(map[string]*depsAndGenerated)
|
|
||||||
for _, f := range env.JavaFiles {
|
|
||||||
file := &pb.SourceFile{
|
|
||||||
Path: f,
|
|
||||||
}
|
|
||||||
sources = append(sources, file)
|
|
||||||
|
|
||||||
m := javaFileToModuleMap[f]
|
|
||||||
if m == nil {
|
|
||||||
file.Status = &pb.Status{
|
|
||||||
Code: pb.Status_FAILURE,
|
|
||||||
Message: proto.String("File not found in any module."),
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
file.Status = &pb.Status{Code: pb.Status_OK}
|
|
||||||
if moduleToDeps[m.Name] != nil {
|
|
||||||
file.Generated = moduleToDeps[m.Name].Generated
|
|
||||||
file.Deps = moduleToDeps[m.Name].Deps
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
deps := transitiveDeps(m, javaModules)
|
|
||||||
var generated []*pb.GeneratedFile
|
|
||||||
outPrefix := env.OutDir + "/"
|
|
||||||
for _, d := range deps {
|
|
||||||
if relPath, ok := strings.CutPrefix(d, outPrefix); ok {
|
|
||||||
contents, err := os.ReadFile(d)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Printf("Generated file %q not found - will be skipped.\n", d)
|
return nil, nil, fmt.Errorf("cc_analyzer failed:", err)
|
||||||
|
}
|
||||||
|
if err := proto.Unmarshal(result, resp); err != nil {
|
||||||
|
return nil, nil, fmt.Errorf("malformed response from cc_analyzer: %v", err)
|
||||||
|
}
|
||||||
|
if resp.Status != nil && resp.Status.Code != apb.Status_OK {
|
||||||
|
return nil, nil, fmt.Errorf("cc_analyzer failed: %v", resp.Status.Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
var results []*pb.AnalysisResult
|
||||||
|
var units []*pb.BuildableUnit
|
||||||
|
for _, s := range resp.Sources {
|
||||||
|
status := &pb.AnalysisResult_Status{
|
||||||
|
Code: pb.AnalysisResult_Status_CODE_OK,
|
||||||
|
}
|
||||||
|
if s.GetStatus().GetCode() != apb.Status_OK {
|
||||||
|
status.Code = pb.AnalysisResult_Status_CODE_BUILD_FAILED
|
||||||
|
status.StatusMessage = proto.String(s.GetStatus().GetMessage())
|
||||||
|
}
|
||||||
|
|
||||||
|
result := &pb.AnalysisResult{
|
||||||
|
SourceFilePath: s.GetPath(),
|
||||||
|
UnitId: s.GetPath(),
|
||||||
|
Status: status,
|
||||||
|
}
|
||||||
|
results = append(results, result)
|
||||||
|
|
||||||
|
var generated []*pb.GeneratedFile
|
||||||
|
for _, f := range s.Generated {
|
||||||
|
generated = append(generated, &pb.GeneratedFile{
|
||||||
|
Path: f.GetPath(),
|
||||||
|
Contents: f.GetContents(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
genUnit := &pb.BuildableUnit{
|
||||||
|
Id: "genfiles_for_" + s.GetPath(),
|
||||||
|
SourceFilePaths: s.GetDeps(),
|
||||||
|
GeneratedFiles: generated,
|
||||||
|
}
|
||||||
|
|
||||||
|
unit := &pb.BuildableUnit{
|
||||||
|
Id: s.GetPath(),
|
||||||
|
Language: pb.Language_LANGUAGE_CPP,
|
||||||
|
SourceFilePaths: []string{s.GetPath()},
|
||||||
|
CompilerArguments: s.GetCompilerArguments(),
|
||||||
|
DependencyIds: []string{genUnit.GetId()},
|
||||||
|
}
|
||||||
|
units = append(units, unit, genUnit)
|
||||||
|
}
|
||||||
|
return results, units, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// findJavaModules tries to find the modules that cover the given file paths.
|
||||||
|
// If a file is covered by multiple modules, the first module is returned.
|
||||||
|
func findJavaModules(paths []string, modules map[string]*javaModule) map[string]string {
|
||||||
|
ret := make(map[string]string)
|
||||||
|
for name, module := range modules {
|
||||||
|
if strings.HasSuffix(name, ".impl") {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
generated = append(generated, &pb.GeneratedFile{
|
for i, p := range paths {
|
||||||
|
if slices.Contains(module.Srcs, p) {
|
||||||
|
ret[p] = name
|
||||||
|
paths = append(paths[:i], paths[i+1:]...)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(paths) == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
func getJavaInputs(env Env, modulesByPath map[string]string, modules map[string]*javaModule) ([]*pb.AnalysisResult, []*pb.BuildableUnit) {
|
||||||
|
var results []*pb.AnalysisResult
|
||||||
|
unitsById := make(map[string]*pb.BuildableUnit)
|
||||||
|
for p, moduleName := range modulesByPath {
|
||||||
|
r := &pb.AnalysisResult{
|
||||||
|
SourceFilePath: p,
|
||||||
|
}
|
||||||
|
results = append(results, r)
|
||||||
|
|
||||||
|
m := modules[moduleName]
|
||||||
|
if m == nil {
|
||||||
|
r.Status = &pb.AnalysisResult_Status{
|
||||||
|
Code: pb.AnalysisResult_Status_CODE_NOT_FOUND,
|
||||||
|
StatusMessage: proto.String("File not found in any module."),
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
r.UnitId = moduleName
|
||||||
|
r.Status = &pb.AnalysisResult_Status{Code: pb.AnalysisResult_Status_CODE_OK}
|
||||||
|
if unitsById[r.UnitId] != nil {
|
||||||
|
// File is covered by an already created unit.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
u := &pb.BuildableUnit{
|
||||||
|
Id: moduleName,
|
||||||
|
Language: pb.Language_LANGUAGE_JAVA,
|
||||||
|
SourceFilePaths: m.Srcs,
|
||||||
|
}
|
||||||
|
unitsById[u.Id] = u
|
||||||
|
|
||||||
|
q := list.New()
|
||||||
|
for _, d := range m.Deps {
|
||||||
|
q.PushBack(d)
|
||||||
|
}
|
||||||
|
for q.Len() > 0 {
|
||||||
|
name := q.Remove(q.Front()).(string)
|
||||||
|
mod := modules[name]
|
||||||
|
if mod == nil || unitsById[name] != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
var paths []string
|
||||||
|
paths = append(paths, mod.Srcs...)
|
||||||
|
paths = append(paths, mod.SrcJars...)
|
||||||
|
paths = append(paths, mod.Jars...)
|
||||||
|
unitsById[name] = &pb.BuildableUnit{
|
||||||
|
Id: name,
|
||||||
|
SourceFilePaths: mod.Srcs,
|
||||||
|
GeneratedFiles: genFiles(env, paths),
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, d := range mod.Deps {
|
||||||
|
q.PushBack(d)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
units := make([]*pb.BuildableUnit, 0, len(unitsById))
|
||||||
|
for _, u := range unitsById {
|
||||||
|
units = append(units, u)
|
||||||
|
}
|
||||||
|
return results, units
|
||||||
|
}
|
||||||
|
|
||||||
|
// genFiles returns the generated files (paths that start with outDir/) for the
|
||||||
|
// given paths. Generated files that do not exist are ignored.
|
||||||
|
func genFiles(env Env, paths []string) []*pb.GeneratedFile {
|
||||||
|
prefix := env.OutDir + "/"
|
||||||
|
var ret []*pb.GeneratedFile
|
||||||
|
for _, p := range paths {
|
||||||
|
relPath, ok := strings.CutPrefix(p, prefix)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
contents, err := os.ReadFile(path.Join(env.RepoDir, p))
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = append(ret, &pb.GeneratedFile{
|
||||||
Path: relPath,
|
Path: relPath,
|
||||||
Contents: contents,
|
Contents: contents,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
return ret
|
||||||
moduleToDeps[m.Name] = &depsAndGenerated{deps, generated}
|
|
||||||
file.Generated = generated
|
|
||||||
file.Deps = deps
|
|
||||||
}
|
|
||||||
return &pb.IdeAnalysis{
|
|
||||||
Sources: sources,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// runMake runs Soong build for the given modules.
|
// runMake runs Soong build for the given modules.
|
||||||
@@ -308,6 +411,7 @@ func runMake(ctx context.Context, env Env, modules ...string) error {
|
|||||||
"TARGET_PRODUCT=" + env.LunchTarget.Product,
|
"TARGET_PRODUCT=" + env.LunchTarget.Product,
|
||||||
"TARGET_RELEASE=" + env.LunchTarget.Release,
|
"TARGET_RELEASE=" + env.LunchTarget.Release,
|
||||||
"TARGET_BUILD_VARIANT=" + env.LunchTarget.Variant,
|
"TARGET_BUILD_VARIANT=" + env.LunchTarget.Variant,
|
||||||
|
"TARGET_BUILD_TYPE=release",
|
||||||
"-k",
|
"-k",
|
||||||
}
|
}
|
||||||
args = append(args, modules...)
|
args = append(args, modules...)
|
||||||
@@ -319,7 +423,6 @@ func runMake(ctx context.Context, env Env, modules ...string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type javaModule struct {
|
type javaModule struct {
|
||||||
Name string
|
|
||||||
Path []string `json:"path,omitempty"`
|
Path []string `json:"path,omitempty"`
|
||||||
Deps []string `json:"dependencies,omitempty"`
|
Deps []string `json:"dependencies,omitempty"`
|
||||||
Srcs []string `json:"srcs,omitempty"`
|
Srcs []string `json:"srcs,omitempty"`
|
||||||
@@ -327,66 +430,23 @@ type javaModule struct {
|
|||||||
SrcJars []string `json:"srcjars,omitempty"`
|
SrcJars []string `json:"srcjars,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func loadJavaModules(env *Env) (map[string]*javaModule, map[string]*javaModule, error) {
|
func loadJavaModules(env Env) (map[string]*javaModule, error) {
|
||||||
javaDepsPath := path.Join(env.RepoDir, env.OutDir, "soong/module_bp_java_deps.json")
|
javaDepsPath := path.Join(env.RepoDir, env.OutDir, "soong/module_bp_java_deps.json")
|
||||||
data, err := os.ReadFile(javaDepsPath)
|
data, err := os.ReadFile(javaDepsPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var moduleMapping map[string]*javaModule // module name -> module
|
var ret map[string]*javaModule // module name -> module
|
||||||
if err = json.Unmarshal(data, &moduleMapping); err != nil {
|
if err = json.Unmarshal(data, &ret); err != nil {
|
||||||
return nil, nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
javaModules := make(map[string]*javaModule)
|
// Add top level java_sdk_library for .impl modules.
|
||||||
javaFileToModuleMap := make(map[string]*javaModule)
|
for name, module := range ret {
|
||||||
for name, module := range moduleMapping {
|
if striped := strings.TrimSuffix(name, ".impl"); striped != name {
|
||||||
if strings.HasSuffix(name, "-jarjar") || strings.HasSuffix(name, ".impl") {
|
ret[striped] = module
|
||||||
continue
|
|
||||||
}
|
|
||||||
module.Name = name
|
|
||||||
javaModules[name] = module
|
|
||||||
for _, src := range module.Srcs {
|
|
||||||
if !slices.Contains(env.JavaFiles, src) {
|
|
||||||
// We are only interested in active files.
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if javaFileToModuleMap[src] != nil {
|
|
||||||
// TODO(michaelmerg): Handle the case where a file is covered by multiple modules.
|
|
||||||
log.Printf("File %q found in module %q but is already covered by module %q", src, module.Name, javaFileToModuleMap[src].Name)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
javaFileToModuleMap[src] = module
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return javaModules, javaFileToModuleMap, nil
|
return ret, nil
|
||||||
}
|
|
||||||
|
|
||||||
func transitiveDeps(m *javaModule, modules map[string]*javaModule) []string {
|
|
||||||
var ret []string
|
|
||||||
q := list.New()
|
|
||||||
q.PushBack(m.Name)
|
|
||||||
seen := make(map[string]bool) // module names -> true
|
|
||||||
for q.Len() > 0 {
|
|
||||||
name := q.Remove(q.Front()).(string)
|
|
||||||
mod := modules[name]
|
|
||||||
if mod == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
ret = append(ret, mod.Srcs...)
|
|
||||||
ret = append(ret, mod.SrcJars...)
|
|
||||||
ret = append(ret, mod.Jars...)
|
|
||||||
for _, d := range mod.Deps {
|
|
||||||
if seen[d] {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
seen[d] = true
|
|
||||||
q.PushBack(d)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
slices.Sort(ret)
|
|
||||||
ret = slices.Compact(ret)
|
|
||||||
return ret
|
|
||||||
}
|
}
|
||||||
|
@@ -32,6 +32,7 @@ case $(uname -s) in
|
|||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
|
export BUILD_ENV_SEQUENCE_NUMBER=13
|
||||||
export ANDROID_BUILD_TOP=$TOP
|
export ANDROID_BUILD_TOP=$TOP
|
||||||
export OUT_DIR=${OUT_DIR}
|
export OUT_DIR=${OUT_DIR}
|
||||||
exec "${PREBUILTS_GO_ROOT}/bin/go" "run" "ide_query" "$@"
|
exec "${PREBUILTS_GO_ROOT}/bin/go" "run" "ide_query" "$@"
|
||||||
|
File diff suppressed because it is too large
Load Diff
@@ -16,51 +16,11 @@
|
|||||||
syntax = "proto3";
|
syntax = "proto3";
|
||||||
|
|
||||||
package ide_query;
|
package ide_query;
|
||||||
|
|
||||||
option go_package = "ide_query/ide_query_proto";
|
option go_package = "ide_query/ide_query_proto";
|
||||||
|
|
||||||
// Indicates the success/failure for analysis.
|
|
||||||
message Status {
|
|
||||||
enum Code {
|
|
||||||
OK = 0;
|
|
||||||
FAILURE = 1;
|
|
||||||
}
|
|
||||||
Code code = 1;
|
|
||||||
// Details about the status, might be displayed to user.
|
|
||||||
optional string message = 2;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Represents an Android checkout on user's workstation.
|
|
||||||
message RepoState {
|
|
||||||
// Absolute path for the checkout in the workstation.
|
|
||||||
// e.g. /home/user/work/android/
|
|
||||||
string repo_dir = 1;
|
|
||||||
// Relative to repo_dir.
|
|
||||||
repeated string active_file_path = 2;
|
|
||||||
// Repository relative path to output directory in workstation.
|
|
||||||
string out_dir = 3;
|
|
||||||
// Repository relative path to compile_commands.json in workstation.
|
|
||||||
string comp_db_path = 4;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Provides all the targets that are pre-requisities for running language
|
|
||||||
// services on active_file_paths.
|
|
||||||
message DepsResponse {
|
|
||||||
// Build dependencies of a source file for providing language services.
|
|
||||||
message Deps {
|
|
||||||
// Relative to repo_dir.
|
|
||||||
string source_file = 1;
|
|
||||||
// Build target to execute for generating dep.
|
|
||||||
repeated string build_target = 2;
|
|
||||||
optional Status status = 3;
|
|
||||||
}
|
|
||||||
repeated Deps deps = 1;
|
|
||||||
optional Status status = 2;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns all the information necessary for providing language services for the
|
|
||||||
// active files.
|
|
||||||
message GeneratedFile {
|
message GeneratedFile {
|
||||||
// Path to the file relative to IdeAnalysis.build_artifact_root.
|
// Path to the file relative to build_out_dir.
|
||||||
string path = 1;
|
string path = 1;
|
||||||
|
|
||||||
// The text of the generated file, if not provided contents will be read
|
// The text of the generated file, if not provided contents will be read
|
||||||
@@ -68,44 +28,100 @@ message GeneratedFile {
|
|||||||
optional bytes contents = 2;
|
optional bytes contents = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message SourceFile {
|
|
||||||
// Path to the source file relative to repository root.
|
|
||||||
string path = 1;
|
|
||||||
|
|
||||||
// Working directory used by the build system. All the relative
|
|
||||||
// paths in compiler_arguments should be relative to this path.
|
|
||||||
// Relative to repository root.
|
|
||||||
string working_dir = 2;
|
|
||||||
|
|
||||||
// Compiler arguments to compile the source file. If multiple variants
|
|
||||||
// of the module being compiled are possible, the query script will choose
|
|
||||||
// one.
|
|
||||||
repeated string compiler_arguments = 3;
|
|
||||||
|
|
||||||
// Any generated files that are used in compiling the file.
|
|
||||||
repeated GeneratedFile generated = 4;
|
|
||||||
|
|
||||||
// Paths to all of the sources, like build files, code generators,
|
|
||||||
// proto files etc. that were used during analysis. Used to figure
|
|
||||||
// out when a set of build artifacts are stale and the query tool
|
|
||||||
// must be re-run.
|
|
||||||
// Relative to repository root.
|
|
||||||
repeated string deps = 5;
|
|
||||||
|
|
||||||
// Represents analysis status for this particular file. e.g. not part
|
|
||||||
// of the build graph.
|
|
||||||
optional Status status = 6;
|
|
||||||
}
|
|
||||||
|
|
||||||
message IdeAnalysis {
|
message IdeAnalysis {
|
||||||
// Path relative to repository root, containing all the artifacts
|
// Directory that contains build outputs generated by the build system.
|
||||||
// generated by the build system. GeneratedFile.path are always
|
// Relative to repository root.
|
||||||
// relative to this directory.
|
string build_out_dir = 1;
|
||||||
string build_artifact_root = 1;
|
// Working directory used by the build system.
|
||||||
|
// Relative to repository root.
|
||||||
|
string working_dir = 4;
|
||||||
|
// Only set if the whole query failed.
|
||||||
|
optional AnalysisError error = 5;
|
||||||
|
// List of results, one per queried file.
|
||||||
|
repeated AnalysisResult results = 6;
|
||||||
|
// List of buildable units directly or indirectly references by the results.
|
||||||
|
repeated BuildableUnit units = 7;
|
||||||
|
|
||||||
repeated SourceFile sources = 2;
|
reserved 2, 3;
|
||||||
|
}
|
||||||
// Status representing overall analysis.
|
|
||||||
// Should fail only when no analysis can be performed.
|
message AnalysisError {
|
||||||
optional Status status = 3;
|
// Human readable error message.
|
||||||
|
string error_message = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message AnalysisResult {
|
||||||
|
// Path to the source file that was queried, relative to repository root.
|
||||||
|
string source_file_path = 1;
|
||||||
|
// Indicates the success/failure for the query.
|
||||||
|
message Status {
|
||||||
|
enum Code {
|
||||||
|
CODE_UNSPECIFIED = 0;
|
||||||
|
CODE_OK = 1;
|
||||||
|
CODE_NOT_FOUND = 2; // no target or module found for the source file.
|
||||||
|
CODE_BUILD_FAILED = 3;
|
||||||
|
}
|
||||||
|
Code code = 1;
|
||||||
|
// Details about the status, might be displayed to user.
|
||||||
|
optional string status_message = 2;
|
||||||
|
}
|
||||||
|
// Represents status for this result. e.g. not part of the build graph.
|
||||||
|
Status status = 2;
|
||||||
|
// ID of buildable unit that contains the source file.
|
||||||
|
// The ide_query script can choose the most relevant unit from multiple
|
||||||
|
// options.
|
||||||
|
string unit_id = 3;
|
||||||
|
// Invalidation rule to check if the result is still valid.
|
||||||
|
Invalidation invalidation = 4;
|
||||||
|
}
|
||||||
|
|
||||||
|
enum Language {
|
||||||
|
LANGUAGE_UNSPECIFIED = 0;
|
||||||
|
LANGUAGE_JAVA = 1; // also includes Kotlin
|
||||||
|
LANGUAGE_CPP = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message BuildableUnit {
|
||||||
|
// Unique identifier of the buildable unit.
|
||||||
|
//
|
||||||
|
// Examples:
|
||||||
|
// - Java: module or target name, e.g. "framework-bluetooth" or
|
||||||
|
// "//third_party/hamcrest:hamcrest_java"
|
||||||
|
// - C++: source file, e.g. "path/to/file.cc"
|
||||||
|
string id = 1;
|
||||||
|
// Language of the unit.
|
||||||
|
// Required for buildable units directly referenced by the AnalysisResult,
|
||||||
|
// e.g. the unit associated with the compilation stage for the source file.
|
||||||
|
Language language = 2;
|
||||||
|
// Source files that are part of this unit.
|
||||||
|
// Path to the file relative to working_dir.
|
||||||
|
repeated string source_file_paths = 3;
|
||||||
|
// Compiler arguments to compile the source files.
|
||||||
|
repeated string compiler_arguments = 4;
|
||||||
|
// List of generated files produced by this unit.
|
||||||
|
repeated GeneratedFile generated_files = 5;
|
||||||
|
// List of other BuildableUnits this unit depend on.
|
||||||
|
repeated string dependency_ids = 6;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Invalidation rule to check if the result is still valid.
|
||||||
|
// This should contain files/dirs that are not directly part of the build graph
|
||||||
|
// but still affect the result. For example BUILD files, directory to the
|
||||||
|
// toolchain or config files etc.
|
||||||
|
message Invalidation {
|
||||||
|
// If any of these files change the result may become invalid.
|
||||||
|
// Path to the file relative to repository root.
|
||||||
|
repeated string file_paths = 1;
|
||||||
|
|
||||||
|
message Wildcard {
|
||||||
|
// Prefix of the file path (e.g. "path/to/")
|
||||||
|
optional string prefix = 1;
|
||||||
|
// Suffix of the file path (e.g. "Android.bp")
|
||||||
|
optional string suffix = 2;
|
||||||
|
// If false, the part of the path between the given `prefix` and `suffix`
|
||||||
|
// should not contain directory separators ('/').
|
||||||
|
optional bool can_cross_folder = 3;
|
||||||
|
}
|
||||||
|
// If any of these rules match a changed file the result may become invalid.
|
||||||
|
repeated Wildcard wildcards = 4;
|
||||||
}
|
}
|
Reference in New Issue
Block a user