Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .bazelrc
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# *******************************************************************************

common --@score_baselibs//score/memory/shared/flags:use_typedshmd=False
common --@score-baselibs//score/json:base_library=nlohmann
common --@score_baselibs//score/json:base_library=nlohmann


common --//score/datarouter/build_configuration_flags:persistent_logging=False
Expand Down
4 changes: 2 additions & 2 deletions MODULE.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ module(
)

bazel_dep(name = "platforms", version = "1.0.0")

bazel_dep(name = "score_toolchains_gcc", version = "0.4", dev_dependency = True)

bazel_dep(name = "googletest", version = "1.17.0.bcr.1")
Expand Down Expand Up @@ -94,7 +95,6 @@ download_archive(

# C/C++ rules for Bazel
bazel_dep(name = "rules_cc", version = "0.2.1")

bazel_dep(name = "nlohmann_json", version = "3.11.3")
bazel_dep(name = "bazel_skylib", version = "1.7.1")
bazel_dep(name = "rules_doxygen", version = "2.5.0")
Expand Down Expand Up @@ -160,4 +160,4 @@ bazel_dep(name = "aspect_rules_lint", version = "1.5.3")
bazel_dep(name = "buildifier_prebuilt", version = "8.2.0.2")

#docs-as-code
bazel_dep(name = "score_docs_as_code", version = "1.1.0")
bazel_dep(name = "score_docs_as_code", version = "2.0.2")
8 changes: 4 additions & 4 deletions score/datarouter/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -296,11 +296,11 @@ filegroup(
"etc/persistent-logging.json",
],
visibility = [
"//ecu/xpad/xpad-shared/config/common/pas/datarouter:__subpackages__",
"//ecu/xyz/xyz-shared/config/common/pas/datarouter:__subpackages__",
"//score/datarouter/test:__subpackages__",
"//platform/aas/tools/itf:__subpackages__",
"//platform/aas/tools/sctf:__subpackages__",
# "@ddad//ecu/xpad/xpad-shared/config/common/pas/datarouter:__subpackages__",
# "@ddad//ecu/xyz/xyz-shared/config/common/pas/datarouter:__subpackages__",
],
)

Expand Down Expand Up @@ -762,10 +762,10 @@ cc_binary(
],
features = COMPILER_WARNING_FEATURES,
visibility = [
"//ecu/xpad/xpad-shared/config/common/pas/datarouter:__subpackages__",
"//ecu/xyz/xyz-shared/config/common/pas/datarouter:__subpackages__",
"//platform/aas/tools/itf:__subpackages__",
"//platform/aas/tools/sctf:__subpackages__",
# "@ddad//ecu/xpad/xpad-shared/config/common/pas/datarouter:__subpackages__",
# "@ddad//ecu/xyz/xyz-shared/config/common/pas/datarouter:__subpackages__",
],
deps = [
":datarouter_app",
Expand Down
2 changes: 1 addition & 1 deletion score/datarouter/doc/guideline/file_based_local.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

You sometimes want to get a log in the filesystem. No matter if qemu or normal, this should work. Though you can set it up at application level only, it can bring significant benefits for offline analysis.

1. take a look at the following document regarding configuration of logging: [xpad doc](broken_link_g/xpad/documentation/blob/master/guidelines/logging/configuration.md)
1. take a look at the following document regarding configuration of logging: [xyz doc](broken_link_g/xyz/documentation/blob/master/guidelines/logging/configuration.md)

The following config will set App1 to write into /tmp/App1.dlt

Expand Down
14 changes: 6 additions & 8 deletions third_party/host_llvm/host_llvm.MODULE.bazel
Original file line number Diff line number Diff line change
@@ -1,11 +1,7 @@
bazel_dep(name = "toolchains_llvm", version = "1.5.0", dev_dependency=True)

llvm = use_extension("@toolchains_llvm//toolchain/extensions:llvm.bzl", "llvm", dev_dependency=True)
bazel_dep(name = "toolchains_llvm", version = "1.5.0", dev_dependency = True)

llvm = use_extension("@toolchains_llvm//toolchain/extensions:llvm.bzl", "llvm", dev_dependency = True)
llvm.toolchain(
llvm_version = "16.0.0",
cxx_standard = {"": "c++17"},
stdlib ={"": "dynamic-stdc++"},
compile_flags = {"": [
"-march=nehalem",
"-ffp-model=strict",
Expand All @@ -19,9 +15,11 @@ llvm.toolchain(
"-Wno-error=self-assign-overloaded",
"-Wthread-safety",
]},
link_libs ={"":[
cxx_standard = {"": "c++17"},
link_libs = {"": [
"-lrt",
]},
llvm_version = "16.0.0",
stdlib = {"": "dynamic-stdc++"},
)

use_repo(llvm, "llvm_toolchain")
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,13 @@
import subprocess
from pathlib import Path


def get_github_repo() -> str:
git_root = find_git_root()
repo = get_github_repo_info(git_root)
return repo


def parse_git_output(str_line: str) -> str:
if len(str_line.split()) < 2:
logger.warning(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,20 +39,23 @@
]

lobster_code_template = {
"data": [],
"generator": "lobster_cpp",
"schema": "lobster-imp-trace",
"version": 3
}
"data": [],
"generator": "lobster_cpp",
"schema": "lobster-imp-trace",
"version": 3,
}

lobster_reqs_template = {
"data": [],
"generator": "lobster-trlc",
"schema": "lobster-req-trace",
"version": 4
}
"data": [],
"generator": "lobster-trlc",
"schema": "lobster-req-trace",
"version": 4,
}

def extract_id_from_line(line: str, tags: List[str], nodes: List[str]) -> Optional[Tuple[str, str]]:

def extract_id_from_line(
line: str, tags: List[str], nodes: List[str]
) -> Optional[Tuple[str, str]]:
"""
Parse a single line to extract the ID from tags or nodes.

Expand Down Expand Up @@ -90,10 +93,12 @@ def extract_id_from_line(line: str, tags: List[str], nodes: List[str]) -> Option
None
"""
# Step 1: Clean the line of $, \n, and \\n
cleaned_line = line.replace('\n', '').replace('\\n', '')
cleaned_line = line.replace("\n", "").replace("\\n", "")

# Step 2: Remove all single and double quotes
cleaned_line = cleaned_line.replace('"', '').replace("'", '').replace("{",'').strip()
cleaned_line = (
cleaned_line.replace('"', "").replace("'", "").replace("{", "").strip()
)

# Step 3 and 4: Search for tags or nodes and capture the last element
for tag in tags:
Expand All @@ -103,16 +108,16 @@ def extract_id_from_line(line: str, tags: List[str], nodes: List[str]) -> Option
for node in nodes:
if cleaned_line.startswith(node):
# Scan for Macro
if node.startswith('$'):
parts = cleaned_line.split(',')
if node.startswith("$"):
parts = cleaned_line.split(",")
if len(parts) >= 2:
return parts[1].strip().rstrip(')'), node
else: # scan for normal plantuml element
return parts[1].strip().rstrip(")"), node
else: # scan for normal plantuml element
# Remove the identifier from the start of the line
parts = cleaned_line[len(node):].strip().split()
parts = cleaned_line[len(node) :].strip().split()

# If there are at least 3 parts and the second-to-last is 'as', return the last part
if len(parts) >= 3 and parts[-2] == 'as':
if len(parts) >= 3 and parts[-2] == "as":
return parts[-1], node

# If there's only one part after the identifier, return it
Expand All @@ -121,11 +126,12 @@ def extract_id_from_line(line: str, tags: List[str], nodes: List[str]) -> Option

return None


def extract_tags(
source_file: str,
github_base_url: str,
nodes: List[str],
git_hash_func: Union[Callable[[str], str], None] = get_git_hash
git_hash_func: Union[Callable[[str], str], None] = get_git_hash,
) -> Dict[str, List[Tuple[str, str]]]:
"""
This extracts the file-path, lineNr as well as the git hash of the file
Expand All @@ -142,7 +148,9 @@ def extract_tags(
if git_hash_func is None:
git_hash_func = get_git_hash

requirement_mapping: dict[str, List[Tuple[str, str]]] = collections.defaultdict(list)
requirement_mapping: dict[str, List[Tuple[str, str]]] = collections.defaultdict(
list
)
with open(source_file) as f:
hash = git_hash_func(source_file)
for line_number, line in enumerate(f):
Expand All @@ -159,11 +167,12 @@ def extract_tags(

return requirement_mapping


def _extract_tags_dispatch(
source_file: str,
github_base_url: str,
mode=str,
git_hash_func: Union[Callable[[str], str], None] = get_git_hash
git_hash_func: Union[Callable[[str], str], None] = get_git_hash,
) -> Dict[str, List[str]]:
"""
Dispatch to a specialized parser based on file extension.
Expand All @@ -176,7 +185,7 @@ def _extract_tags_dispatch(

if mode == "reqs":
foo = lobster_reqs_template
rm = extract_tags(source_file, github_base_url, [], git_hash_func)
rm = extract_tags(source_file, github_base_url, [], git_hash_func)
for id, item in rm.items():
link, node = item[0]
requirement = {
Expand All @@ -193,9 +202,9 @@ def _extract_tags_dispatch(
"just_down": [],
"just_global": [],
"framework": "TRLC",
"kind": node.replace('$','').strip(),
"kind": node.replace("$", "").strip(),
"text": f"{id}",
}
}

foo["data"].append(requirement)
else:
Expand All @@ -217,16 +226,15 @@ def _extract_tags_dispatch(
"just_up": [],
"just_down": [],
"just_global": [],
"refs": [
f"req {id}"
],
"refs": [f"req {id}"],
"language": "cpp",
"kind": "Function"
}
"kind": "Function",
}
foo["data"].append(codetag)

return foo


if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-o", "--output")
Expand All @@ -253,14 +261,18 @@ def _extract_tags_dispatch(
logger.info(f"Parsing source files: {args.inputs}")

# Finding the GH URL
gh_base_url=f"{args.url}{get_github_repo()}"
gh_base_url = f"{args.url}{get_github_repo()}"

requirement_mappings: Dict[str, List[Tuple[str, str]]] = collections.defaultdict(list)
requirement_mappings: Dict[str, List[Tuple[str, str]]] = collections.defaultdict(
list
)

for input in args.inputs:
with open(input) as f:
for source_file in f:
foo = _extract_tags_dispatch(source_file.strip(), gh_base_url, args.trace)
foo = _extract_tags_dispatch(
source_file.strip(), gh_base_url, args.trace
)

if not foo:
if args.trace == "reqs":
Expand Down
Loading
Loading