From ba1cec7f93160726fe741fadd48b0ac2df27ebe6 Mon Sep 17 00:00:00 2001 From: xinyan Date: Mon, 26 Nov 2018 23:31:02 +0800 Subject: [PATCH 01/84] Get started with LSP. Copied files from commit 4af4989213dbac4a1470f7abbd1bbfb48a1e7933. --- .../lsp_graph_server/callgraph/__init__.py | 126 +++ .../lsp_graph_server/callgraph/builder.py | 363 +++++++ .../lsp_graph_server/callgraph/manager.py | 71 ++ persper/analytics/lsp_graph_server/ccls.py | 123 +++ persper/analytics/lsp_graph_server/cquery.py | 111 ++ .../fileparsers/CPP14Lexer.py | 964 ++++++++++++++++++ .../lsp_graph_server/jsonrpcutils.py | 39 + .../languageclient/__init__.py | 0 .../languageclient/lspclient.py | 73 ++ .../languageclient/lspcontract.py | 304 ++++++ .../languageclient/lspserver.py | 202 ++++ persper/analytics/lsp_graph_server/main.py | 86 ++ .../analytics/lsp_graph_server/wildcards.py | 47 + 13 files changed, 2509 insertions(+) create mode 100644 persper/analytics/lsp_graph_server/callgraph/__init__.py create mode 100644 persper/analytics/lsp_graph_server/callgraph/builder.py create mode 100644 persper/analytics/lsp_graph_server/callgraph/manager.py create mode 100644 persper/analytics/lsp_graph_server/ccls.py create mode 100644 persper/analytics/lsp_graph_server/cquery.py create mode 100644 persper/analytics/lsp_graph_server/fileparsers/CPP14Lexer.py create mode 100644 persper/analytics/lsp_graph_server/jsonrpcutils.py create mode 100644 persper/analytics/lsp_graph_server/languageclient/__init__.py create mode 100644 persper/analytics/lsp_graph_server/languageclient/lspclient.py create mode 100644 persper/analytics/lsp_graph_server/languageclient/lspcontract.py create mode 100644 persper/analytics/lsp_graph_server/languageclient/lspserver.py create mode 100644 persper/analytics/lsp_graph_server/main.py create mode 100644 persper/analytics/lsp_graph_server/wildcards.py diff --git a/persper/analytics/lsp_graph_server/callgraph/__init__.py b/persper/analytics/lsp_graph_server/callgraph/__init__.py new file mode 100644 index 00000000000..679e5127224 --- /dev/null +++ b/persper/analytics/lsp_graph_server/callgraph/__init__.py @@ -0,0 +1,126 @@ +""" +Basic data structures for call graph. +""" +import logging +from io import IOBase +from pathlib import Path, PurePath +from typing import Dict, Iterable, List, NamedTuple, Tuple, Type, Union + +import jsonpickle + +from languageclient.lspcontract import (DocumentSymbol, Location, Position, + SymbolInformation, SymbolKind, + TextDocument, + TextDocumentContentChangeEvent) + + +_logger = logging.getLogger(__name__) + + +class CallGraphNode(NamedTuple): + name: str + kind: SymbolKind + file: PurePath + pos: Position + length: int + + def __eq__(self, other): + if not isinstance(other, CallGraphNode): + return False + return self.name == other.name and self.file == other.file and self.pos == other.pos and self.length == other.length + + def __hash__(self): + return hash((self.name, self.kind, self.file, self.pos, self.length)) + + +class CallGraphScope(NamedTuple): + name: str + kind: SymbolKind + file: PurePath + startPos: Position + endPos: Position + + def __eq__(self, other): + if not isinstance(other, CallGraphScope): + return False + return self.name == other.name and self.file == other.file and self.startPos == other.startPos \ + and self.endPos == other.endPos + + def __hash__(self): + return hash((self.name, self.kind, self.file, self.startPos, self.endPos)) + + +class CallGraphBranch(NamedTuple): + sourceScope: CallGraphScope + definitionScope: CallGraphScope + sourceToken: CallGraphNode + definitionToken: CallGraphNode + + def __eq__(self, other): + if not isinstance(other, CallGraphBranch): + return False + return self.sourceScope == other.sourceScope and self.definitionScope == other.definitionScope \ + and self.sourceToken == other.sourceToken and self.definitionToken == other.definitionToken + + +class CallGraph(): + + def __init__(self): + self._items = [] + + @property + def items(self): + return self._items + + def add(self, branch: CallGraphBranch): + if not branch.sourceScope: + raise ValueError("branch.sourceScope should not be None.") + if not branch.definitionScope: + raise ValueError("branch.definitionScope should not be None.") + self._items.append(branch) + + def clear(self): + self._items.clear() + + def removeBySourceFiles(self, fileNames: Iterable[PurePath]): + if not isinstance(fileNames, set): + fileNames = set(fileNames) + newItems = [i for i in self._items if i.sourceScope.file not in fileNames] + _logger.info("Removed %d branches by %d files.", len(self._items) - len(newItems), len(fileNames)) + self._items = newItems + + def dump(self, file: IOBase): + for i in self._items: + file.write(str(i)) + file.write("\n") + + def dumpTo(self, fileName: str): + with open(fileName, "wt") as f: + self.dump(f) + + def serialize(self, file: IOBase): + for item in self._items: + file.write(jsonpickle.dumps(item, file)) + file.write("\n") + _logger.info("Written %d call graph branches.", len(self._items)) + + def serializeTo(self, fileName): + with open(fileName, "wt") as f: + self.serialize(f) + + def deserialize(self, file: IOBase): + items = [] + for line in file: + line: str = line.strip() + if line: + item = jsonpickle.loads(line) + if not isinstance(item, CallGraphBranch): + raise ValueError("Parsed object [{0}] is not CallGraphBranch.".format(type(item))) + items.append(item) + self._items = items + _logger.info("Loaded %d call graph branches.", len(items)) + assert isinstance(self._items, list) + + def deserializeFrom(self, fileName): + with open(fileName, "rt") as f: + self.deserialize(fileName) diff --git a/persper/analytics/lsp_graph_server/callgraph/builder.py b/persper/analytics/lsp_graph_server/callgraph/builder.py new file mode 100644 index 00000000000..5876e55225b --- /dev/null +++ b/persper/analytics/lsp_graph_server/callgraph/builder.py @@ -0,0 +1,363 @@ +import logging +import re +import urllib.parse +from abc import ABC, abstractclassmethod +from glob import iglob +from os import path +from pathlib import Path, PurePath +from typing import Dict, Iterable, List, Type, Union + +from antlr4 import FileStream, Lexer, Token +from antlr4.error.ErrorListener import ErrorListener +from jsonrpc.exceptions import JsonRpcException + +import wildcards +from languageclient.lspclient import LspClient +from languageclient.lspcontract import (DocumentSymbol, Location, Position, + SymbolInformation, SymbolKind, + TextDocument, + TextDocumentContentChangeEvent) + +from . import CallGraphBranch, CallGraphNode, CallGraphScope + +_logger = logging.getLogger(__name__) + +_KNOWN_EXTENSION_LANGUAGES = { + ".h": "cpp", + ".cpp": "cpp", + ".hpp": "cpp", + ".cc": "cpp", + ".c": "c" +} + + +class TokenizedDocument: + """ + Represents a fully tokenized document that supports finding a symbol or scope from + the specified document position. + """ + + def __init__(self, tokens: Iterable[Token], + documentSymbols: Iterable[Union[DocumentSymbol, SymbolInformation]], fileName: PurePath): + self._tokens = [] + self._scopes = [] + self._fileName = fileName + # cquery returns SymbolInformation, which does not contain the exact position of the defined symbol. + # We just assume symbol is at the first line of the container + # DocumentSymbol + # { (symbolLine, symbolColumn): symbolKind } + # SymbolInformation + # { (symbolLine, symbolName): (containerColumn, symbolKind) } + symbolKinds = {} + for s in documentSymbols: + if isinstance(s, DocumentSymbol): + # We assume selectionRange is exactly the range of symbol name + symbolKinds[s.selectionRange.start.toTuple()] = s.kind + self._scopes.append(CallGraphScope(s.name, s.kind, fileName, s.range.start, s.range.end)) + elif isinstance(s, SymbolInformation): + symbolKinds[(s.location.range.start.line, s.name)] = (s.location.range.start.character, s.kind) + self._scopes.append(CallGraphScope(s.containerName, s.kind, fileName, + s.location.range.start, s.location.range.end)) + else: + _logger.error("Invalid DocumentSymbol in %s: %s", fileName, s) + # put the scopes in document order of start positions, then by the document order of their end positions + self._scopes.sort(key=lambda sc: (sc.startPos, sc.endPos)) + NOT_EXISTS = object() + for t in tokens: + t: Token + assert t.line >= 1 + assert t.column >= 0 + line, col = t.line - 1, t.column + kind = symbolKinds.pop((line, col), NOT_EXISTS) + if kind is NOT_EXISTS: + kind = symbolKinds.get((line, t.text)) + if kind: + containerCol, kind = kind + if containerCol <= col: + # Symbol must be in the container + # e.g. + # |container |symbol |container + # |starts here |starts here |ends here + # v v v + # int main() { ... } + del symbolKinds[(line, t.text)] + else: + kind = None + self._tokens.append(CallGraphNode(t.text, kind, fileName, Position(line, col), t.stop - t.start + 1)) + + @property + def tokens(self): + return self._tokens + + @property + def scopes(self): + return self._scopes + + @property + def fileName(self): + return self._fileName + + def tokenAt(self, line: int, character: int) -> CallGraphNode: + """ + Gets the CallGraphNode from the specified 0-base line and character position + in the document. + """ + L = 0 + R = len(self._tokens) - 1 + pos = Position(line, character) + while L <= R: + M = (L+R)//2 + tokenM: CallGraphNode = self._tokens[M] + # assume there is no \n in token content + endPos = Position(tokenM.pos.line, tokenM.pos.character + tokenM.length) + if endPos <= pos: + L = M + 1 + elif tokenM.pos > pos: + R = M - 1 + else: + return tokenM + return None + + def scopeAt(self, line: int, character: int) -> CallGraphScope: + """ + Gets the CallGraphScope from the specified 0-base line and character position + in the document. + """ + L = 0 + R = len(self._scopes) - 1 + MatchingM = None + pos = Position(line, character) + lastScope = None + # Find the smallest container scope, assume the scopes do not intersect with each other + # (either contains or not contains one another) + for scope in self._scopes: + # This is inefficient (yet correct) + if scope.startPos > pos: + break + if pos < scope.endPos: + if lastScope is None or lastScope.startPos <= scope.startPos <= lastScope.endPos: + lastScope = scope + return lastScope + + +class CallGraphBuilder(ABC): + """ + Building call graph branches from the given files with the specific Lexer and LspClient. + """ + + def __init__(self, lexerType: Type, lspClient: LspClient): + if not issubclass(lexerType, Lexer): + raise TypeError("lexerType should be a subtype of Lexer.") + if not isinstance(lspClient, LspClient): + raise TypeError("lspClient should be an instance of LspClient.") + self._lexerType = lexerType + self._lspClient = lspClient + self._tokenizedDocCache: Dict[str, TokenizedDocument] = {} + self._workspaceFilePatterns: List[str] = None + self._workspaceFilePatternsRegex: list[re.Pattern] = None + + @property + def lspClient(self): + return self._lspClient + + # @lspClient.setter + # def lspClient(self, value: LspClient): + # if not isinstance(value, LspClient): + # raise TypeError("lspClient should be an instance of LspClient.") + # self._lspClient = value + + @property + def workspaceFilePatterns(self) -> List[str]: + """ + A list of `str` containing the glob pattern of workspace files. + When performing goto defintion operations, symbols defined ouside the workspace files + will not be counted in as call graph branch. + """ + return self._workspaceFilePatterns + + @workspaceFilePatterns.setter + def workspaceFilePatterns(self, value: List[str]): + self._workspaceFilePatterns = value + if value: + self._workspaceFilePatternsRegex = [re.compile(wildcards.translate(p)) for p in value] + else: + self._workspaceFilePatternsRegex = None + + def removeDocumentCache(self, path: Union[str, PurePath]): + """ + Remove the lexer cache of a specified document by path. + + path: either be a `str` or a fully resolved `Path` instance. + In the former case, the given path string will be resolved automatically. + """ + if isinstance(path, str): + path = Path(path).resolve() + del self._tokenizedDocCache[path] + + async def getTokenizedDocument(self, path: Union[str, PurePath]): + class MyLexerErrorListener(ErrorListener): + def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e): + _logger.warning("%s:%d,%d: %s", path, line, column, msg) + + if isinstance(path, str): + path = Path(path).resolve() + doc = self._tokenizedDocCache.get(path) + if doc: + return doc + textDoc = TextDocument.loadFile(path, self.inferLanguageId(path)) + input = FileStream(path, encoding="utf-8", errors="replace") + lexer = self._lexerType(input) + assert isinstance(lexer, Lexer) + lexer.removeErrorListeners() + lexer.addErrorListener(MyLexerErrorListener()) + documentSymbols = [] + if await self.openDocument(textDoc): + try: + documentSymbols = await self._lspClient.server.textDocumentGetSymbols(textDoc.uri) + finally: + # _logger.info("Close doc") + self._lspClient.server.textDocumentDidClose(textDoc.uri) + + def tokenGenerator(): + while True: + tk = lexer.nextToken() + if tk.type == Token.EOF: + return + if self.filterToken(tk): + yield tk + doc = TokenizedDocument(tokenGenerator(), documentSymbols, path) + self._tokenizedDocCache[path] = doc + return doc + + def pathFromUri(self, expr: str) -> Path: + expr: str = urllib.parse.unquote(expr).strip() + if expr.lower().startswith("file:///"): + expr = expr[8:] + return Path(expr).resolve() + + @abstractclassmethod + def filterToken(self, token: Token) -> bool: + """ + When overridden in the derived class, determines whether the given token + has the need to perform goto definition LSP invocations on. + """ + raise NotImplementedError + + def inferLanguageId(self, path: PurePath) -> str: + """ + Infers the language ID for the given document path. + """ + ext = path.suffix.lower() + return _KNOWN_EXTENSION_LANGUAGES[ext] + + async def openDocument(self, textDoc: TextDocument): + """ + Opens the specified text document, notifying the LSP server. + """ + self._lspClient.server.textDocumentDidOpen(textDoc) + + async def closeDocument(self, uri: str): + """ + Closes the specified text document, notifying the LSP server. + + uri: URI of the text document. + """ + self._lspClient.server.textDocumentDidClose(uri) + + async def buildCallGraphInFiles(self, globPattern: Union[str, Iterable[str]] = None): + """ + Build call graph branches asynchronously in files matching the specified glob pattern(s). + """ + if not globPattern: + if not self._workspaceFilePatterns: + raise ValueError("globPattern is required if workspaceFilePatterns is not available.") + globPattern = self._workspaceFilePatterns[0] + if isinstance(globPattern, str): + globPattern = [globPattern] + visitedPaths = set() + for pattern in globPattern: + for fileName in iglob(pattern, recursive=True): + if not path.isfile(fileName): + continue + if fileName in visitedPaths: + continue + visitedPaths.add(fileName) + async for node in self.buildCallGraphInFile(fileName): + yield node + + async def buildCallGraphInFile(self, fileName: str) -> Iterable[CallGraphBranch]: + """ + Build call graph branches asynchronously in the specified file. + """ + srcPath = self.pathFromUri(fileName) + _logger.info("Build call graph in: %s", srcPath) + counter = 0 + thisDoc = await self.getTokenizedDocument(srcPath) + textDoc = TextDocument.loadFile(srcPath, self.inferLanguageId(srcPath)) + if not await self.openDocument(textDoc): + return + try: + for node in thisDoc.tokens: + # Do not waste time on this + if node.kind == SymbolKind.Namespace: + continue + # Put the cursor to the middle. + line, col = node.pos.line, node.pos.character + node.length//2 + _logger.debug(node) + task = self._lspClient.server.textDocumentGotoDefinition(textDoc.uri, (line, col)) + nodeScope = thisDoc.scopeAt(line, col) + defs = await task + defNodes = [] + for d in defs: + d: Location + defPath = self.pathFromUri(d.uri) + if self._workspaceFilePatternsRegex: + if all(not p.match(str(defPath)) for p in self._workspaceFilePatternsRegex): + continue + defsDoc = await self.getTokenizedDocument(defPath) + defNode = defsDoc.tokenAt(d.range.start.line, d.range.start.character) + defScope = defsDoc.scopeAt(d.range.start.line, d.range.start.character) + if not defNode: + # Failed to retrieve a node from the given position. + _logger.warning("Failed to retrieve node from %s:%s.", defPath, d.range) + defNode = CallGraphNode(None, None, defPath, d.range.start, None) + if defNode == node: + # This node itself is a definition. Do not waste time on this. + defNodes = None + break + if defNode.kind == SymbolKind.Namespace: + # Find some namespace. Do not waste time on this. + defNodes = None + break + defNodes.append((defNode, defScope)) + if defNodes: + for dn, ds in defNodes: + counter += 1 + yield CallGraphBranch(nodeScope, ds, node, dn) + finally: + await self.closeDocument(textDoc.uri) + _logger.info("Yielded %d branches.", counter) + + async def modifyFile(self, fileName: str, newContent: str): + """ + Modify a file's content, notifying the language server, as if the file + is modified in the editor. + """ + if newContent is None: + newContent = "" + path = Path(fileName).resolve() + try: + doc = TextDocument.loadFile(str(path), self.inferLanguageId(path), 1) + try: + self.removeDocumentCache(path) + self._lspClient.server.textDocumentDidOpen(doc) + self._lspClient.server.textDocumentDidChange(doc.uri, 2, [TextDocumentContentChangeEvent(newContent)]) + with open(fileName, "wt", encoding="utf-8") as f: + f.write(newContent) + self._lspClient.server.textDocumentDidSave(doc.uri) + _logger.info("Modified %s.", path) + return doc.text + finally: + await self.closeDocument(doc.uri) + except Exception as ex: + raise Exception("Cannot modify {0}.".format(path)) from ex diff --git a/persper/analytics/lsp_graph_server/callgraph/manager.py b/persper/analytics/lsp_graph_server/callgraph/manager.py new file mode 100644 index 00000000000..598a2b33f7a --- /dev/null +++ b/persper/analytics/lsp_graph_server/callgraph/manager.py @@ -0,0 +1,71 @@ +""" +Contains CallGraphManager. +""" +import logging +from pathlib import Path +from typing import Iterable, NamedTuple, Tuple, Union + +from . import CallGraph +from .builder import CallGraphBuilder + +_logger = logging.getLogger(__name__) + + +class CallGraphManager(): + """ + Used to construct / update call graph independently of specific implementations of + CallGraphBuilder. + """ + def __init__(self, builder: CallGraphBuilder, callGraph: CallGraph = None): + if not isinstance(builder, CallGraphBuilder): + raise TypeError("builderType should be a subtype of CallGraphBuilder.") + self._builder = builder + self._graph = callGraph or CallGraph() + #self.x = 0 + + @property + def graph(self): + """ + Gets the underlying CallGraph. + """ + return self._graph + + async def buildGraph(self, globPattern: Union[str, Iterable[str]] = None): + """ + Build call graph branches from the specified files. + + globPattern: `str` or `str[]` containing the glob pattern of the files + from which to build the call graph branches. + """ + counter = 0 + async for branch in self._builder.buildCallGraphInFiles(globPattern): + try: + self._graph.add(branch) + counter += 1 + if counter % 1000 == 0: + _logger.info("Already added %d branches.", counter) + except ValueError as ex: + _logger.debug("%s Branch: %s", ex, branch) + _logger.info("Added %d branches.", counter) + + def removeByFiles(self, fileNames: Iterable[str]): + """ + Clear the graph nodes whose source or definition node contains the specified files. + """ + fileNames = set((Path(f).resolve() for f in fileNames)) + affectedFiles = set((i.sourceScope.file for i in self._graph.items if i.definitionScope.file in fileNames)) + affectedFiles.update(fileNames) + self._graph.removeBySourceFiles(affectedFiles) + + async def rebuildGraph(self, fileNames: Iterable[str]): + """ + Rebuild the source graph for the specified files. This operation will clear and rebuild the graph nodes + whose source or definition node contains the specified files. + """ + fileNames = set((Path(f).resolve() for f in fileNames)) + affectedFiles = set((i.sourceScope.file for i in self._graph.items if i.definitionScope.file in fileNames)) + affectedFiles.update(fileNames) + self._graph.removeBySourceFiles(affectedFiles) + #self.x += 1 + #self._graph.dumpTo("dmp" + str(self.x) + ".txt") + await self.buildGraph((str(p) for p in affectedFiles)) diff --git a/persper/analytics/lsp_graph_server/ccls.py b/persper/analytics/lsp_graph_server/ccls.py new file mode 100644 index 00000000000..8ebafa99d4a --- /dev/null +++ b/persper/analytics/lsp_graph_server/ccls.py @@ -0,0 +1,123 @@ +""" +ccls client-side LSP support. +""" +import logging +from asyncio import sleep +from pathlib import PurePath + +from antlr4 import Token +from jsonrpc.endpoint import Endpoint +from jsonrpc.exceptions import JsonRpcException + +from callgraph.builder import CallGraphBuilder +from fileparsers.CPP14Lexer import CPP14Lexer +from languageclient.lspclient import LspClient +from languageclient.lspcontract import TextDocument +from languageclient.lspserver import LspServerStub + +_logger = logging.getLogger(__name__) + + +class CclsLspServerStub(LspServerStub): + def __init__(self, endpoint: Endpoint): + super().__init__(endpoint) + + async def getJobs(self): + """ + Gets the count of jobs to be done before server can provide latest call information. + """ + result = await self.request("$ccls/getJobs") + return int(result) + + +class CclsLspClient(LspClient): + def __init__(self, rx, tx, logFile: str = None): + super().__init__(rx, tx, logFile) + self._serverStub = CclsLspServerStub(self._endpoint) + + def m_ccls__publish_skipped_ranges(self, uri: str, skippedRanges: list): + pass + + def m_ccls__publish_semantic_highlight(self, uri: str, symbols: list): + pass + + +class CclsCallGraphBuilder(CallGraphBuilder): + # Do not F12 on operators. cquery tend to randomly jump to false-positives for non-overloaded operators. + _tokensOfInterest = {CPP14Lexer.Identifier, + # CPP14Lexer.Plus, + # CPP14Lexer.Minus, + # CPP14Lexer.Star, + # CPP14Lexer.Div, + # CPP14Lexer.Mod, + # CPP14Lexer.Caret, + # CPP14Lexer.And, + # CPP14Lexer.Or, + # CPP14Lexer.Tilde, + # CPP14Lexer.Not, + # CPP14Lexer.Assign, + # CPP14Lexer.Less, + # CPP14Lexer.Greater, + # CPP14Lexer.PlusAssign, + # CPP14Lexer.MinusAssign, + # CPP14Lexer.StarAssign, + # CPP14Lexer.DivAssign, + # CPP14Lexer.ModAssign, + # CPP14Lexer.XorAssign, + # CPP14Lexer.AndAssign, + # CPP14Lexer.OrAssign, + # CPP14Lexer.LeftShift, + # CPP14Lexer.LeftShiftAssign, + # CPP14Lexer.Equal, + # CPP14Lexer.NotEqual, + # CPP14Lexer.LessEqual, + # CPP14Lexer.GreaterEqual, + # CPP14Lexer.AndAnd, + # CPP14Lexer.OrOr, + # CPP14Lexer.PlusPlus, + # CPP14Lexer.MinusMinus + } + + def __init__(self, lspClient: LspClient): + if not isinstance(lspClient, CclsLspClient): + raise TypeError("lspClient should be an instance of CclsLspClient.") + super().__init__(CPP14Lexer, lspClient) + + def filterToken(self, token: Token): + return token.type in self._tokensOfInterest + + def inferLanguageId(self, path: PurePath): + return "cpp" + + def modifyFile(self, fileName: str, newContent: str): + return super().modifyFile(fileName, newContent) + + async def openDocument(self, textDoc: TextDocument): + self._lspClient.server.textDocumentDidOpen(textDoc) + lastJobs = None + while True: + try: + while True: + curJobs = await self._lspClient.server.getJobs() + if curJobs != lastJobs: + _logger.debug("Server jobs: %d.", curJobs) + lastJobs = curJobs + if curJobs == 0: + break + if curJobs < 5: + await sleep(0.05) + elif curJobs < 50: + await sleep(0.1) + else: + await sleep(1) + # dummy request + await self._lspClient.server.textDocumentCodeLens(textDoc.uri) + return True + except JsonRpcException as ex: + if ex.code == -32002: + _logger.warning("Language server is not ready. Waiting…") + await sleep(5) + elif ex.code == -32603 and "unable to find" in ex.message: + _logger.warning("The file seems invalid. Server error: %s", ex.message) + return False + raise diff --git a/persper/analytics/lsp_graph_server/cquery.py b/persper/analytics/lsp_graph_server/cquery.py new file mode 100644 index 00000000000..e268a3d8e94 --- /dev/null +++ b/persper/analytics/lsp_graph_server/cquery.py @@ -0,0 +1,111 @@ +import logging +from asyncio import sleep +from pathlib import Path, PurePath + +from antlr4 import Token +from jsonrpc.endpoint import Endpoint +from jsonrpc.exceptions import JsonRpcException + +from callgraph import CallGraphBuilder +from fileparsers.CPP14Lexer import CPP14Lexer +from languageclient.lspclient import LspClient +from languageclient.lspcontract import TextDocument +from languageclient.lspserver import LspServerStub + +_logger = logging.getLogger(__name__) + + +class CQueryLspServerStub(LspServerStub): + def __init__(self, endpoint: Endpoint): + super().__init__(endpoint) + + def freshenIndex(self): + self.notify("$cquery/freshenIndex") + + def textDocumentDidView(self, documentUri: str): + self.notify("$cquery/textDocumentDidView", {"textDocumentUri": documentUri}) + + +class CQueryLspClient(LspClient): + def __init__(self, rx, tx): + super().__init__(rx, tx) + self._serverStub = CQueryLspServerStub(self._endpoint) + self._isBusy = False + + @property + def isBusy(self): + return self._isBusy + + def m_cquery__progress(self, indexRequestCount=0, doIdMapCount=0, loadPreviousIndexCount=0, onIdMappedCount=0, onIndexedCount=0, activeThreads=0): + # See https://github.com/cquery-project/vscode-cquery/blob/8ded1bd94548f9341bd9f1f1a636af01602012e0/src/extension.ts#L559 + total = indexRequestCount + doIdMapCount + loadPreviousIndexCount + onIdMappedCount + onIndexedCount + activeThreads + self._isBusy = total > 0 + _logger.log(logging.INFO if total > 0 else logging.DEBUG, "Req:%d IdMap:%d/%d/%d Threads:%d", + indexRequestCount, doIdMapCount, onIdMappedCount, onIndexedCount, activeThreads) + + +class CQueryCallGraphBuilder(CallGraphBuilder): + # Do not F12 on operators. cquery tend to randomly jump to false-positives for non-overloaded operators. + _tokensOfInterest = {CPP14Lexer.Identifier, + # CPP14Lexer.Plus, + # CPP14Lexer.Minus, + # CPP14Lexer.Star, + # CPP14Lexer.Div, + # CPP14Lexer.Mod, + # CPP14Lexer.Caret, + # CPP14Lexer.And, + # CPP14Lexer.Or, + # CPP14Lexer.Tilde, + # CPP14Lexer.Not, + # CPP14Lexer.Assign, + # CPP14Lexer.Less, + # CPP14Lexer.Greater, + # CPP14Lexer.PlusAssign, + # CPP14Lexer.MinusAssign, + # CPP14Lexer.StarAssign, + # CPP14Lexer.DivAssign, + # CPP14Lexer.ModAssign, + # CPP14Lexer.XorAssign, + # CPP14Lexer.AndAssign, + # CPP14Lexer.OrAssign, + # CPP14Lexer.LeftShift, + # CPP14Lexer.LeftShiftAssign, + # CPP14Lexer.Equal, + # CPP14Lexer.NotEqual, + # CPP14Lexer.LessEqual, + # CPP14Lexer.GreaterEqual, + # CPP14Lexer.AndAnd, + # CPP14Lexer.OrOr, + # CPP14Lexer.PlusPlus, + # CPP14Lexer.MinusMinus + } + + def __init__(self, lspClient: LspClient): + if not isinstance(lspClient, CQueryLspClient): + raise TypeError("lspClient should be an instance of CQueryLspClient.") + super().__init__(CPP14Lexer, lspClient) + + def filterToken(self, token: Token): + return token.type in self._tokensOfInterest + + def inferLanguageId(self, path: PurePath): + return "cpp" + + def modifyFile(self, fileName: str, newContent: str): + old = super().modifyFile(fileName, newContent) + self._lspClient.server.freshenIndex() + return old + + async def openDocument(self, textDoc: TextDocument): + self._lspClient.server.textDocumentDidOpen(textDoc) + while True: + try: + while self._lspClient.isBusy: + await sleep(1) + await self._lspClient.server.textDocumentCodeLens(textDoc.uri) + return + except JsonRpcException as ex: + # cquery specific + if ex.code == -32603 and "Unable to find file" in ex.message: + _logger.warning("Language server is not ready. Waiting…") + await sleep(5) diff --git a/persper/analytics/lsp_graph_server/fileparsers/CPP14Lexer.py b/persper/analytics/lsp_graph_server/fileparsers/CPP14Lexer.py new file mode 100644 index 00000000000..a85e5a022bd --- /dev/null +++ b/persper/analytics/lsp_graph_server/fileparsers/CPP14Lexer.py @@ -0,0 +1,964 @@ +# Generated from .\cpp\CPP14.g4 by ANTLR 4.7.1 +from antlr4 import * +from io import StringIO +from typing.io import TextIO +import sys + + +def serializedATN(): + with StringIO() as buf: + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\u0090") + buf.write("\u05a8\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") + buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r") + buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23") + buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30") + buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36") + buf.write("\t\36\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%") + buf.write("\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.") + buf.write("\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64") + buf.write("\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:") + buf.write("\4;\t;\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\t") + buf.write("C\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4L\t") + buf.write("L\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\4U\t") + buf.write("U\4V\tV\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4") + buf.write("^\t^\4_\t_\4`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\4f\tf\4") + buf.write("g\tg\4h\th\4i\ti\4j\tj\4k\tk\4l\tl\4m\tm\4n\tn\4o\to\4") + buf.write("p\tp\4q\tq\4r\tr\4s\ts\4t\tt\4u\tu\4v\tv\4w\tw\4x\tx\4") + buf.write("y\ty\4z\tz\4{\t{\4|\t|\4}\t}\4~\t~\4\177\t\177\4\u0080") + buf.write("\t\u0080\4\u0081\t\u0081\4\u0082\t\u0082\4\u0083\t\u0083") + buf.write("\4\u0084\t\u0084\4\u0085\t\u0085\4\u0086\t\u0086\4\u0087") + buf.write("\t\u0087\4\u0088\t\u0088\4\u0089\t\u0089\4\u008a\t\u008a") + buf.write("\4\u008b\t\u008b\4\u008c\t\u008c\4\u008d\t\u008d\4\u008e") + buf.write("\t\u008e\4\u008f\t\u008f\4\u0090\t\u0090\4\u0091\t\u0091") + buf.write("\4\u0092\t\u0092\4\u0093\t\u0093\4\u0094\t\u0094\4\u0095") + buf.write("\t\u0095\4\u0096\t\u0096\4\u0097\t\u0097\4\u0098\t\u0098") + buf.write("\4\u0099\t\u0099\4\u009a\t\u009a\4\u009b\t\u009b\4\u009c") + buf.write("\t\u009c\4\u009d\t\u009d\4\u009e\t\u009e\4\u009f\t\u009f") + buf.write("\4\u00a0\t\u00a0\4\u00a1\t\u00a1\4\u00a2\t\u00a2\4\u00a3") + buf.write("\t\u00a3\4\u00a4\t\u00a4\4\u00a5\t\u00a5\4\u00a6\t\u00a6") + buf.write("\4\u00a7\t\u00a7\4\u00a8\t\u00a8\4\u00a9\t\u00a9\3\2\3") + buf.write("\2\7\2\u0156\n\2\f\2\16\2\u0159\13\2\3\2\3\2\5\2\u015d") + buf.write("\n\2\3\2\6\2\u0160\n\2\r\2\16\2\u0161\3\2\6\2\u0165\n") + buf.write("\2\r\2\16\2\u0166\3\2\3\2\3\3\3\3\7\3\u016d\n\3\f\3\16") + buf.write("\3\u0170\13\3\3\3\3\3\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4") + buf.write("\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\6\3\6\3\6\3\6\3\7\3") + buf.write("\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\t") + buf.write("\3\t\3\n\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\13\3\13\3\13") + buf.write("\3\f\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3") + buf.write("\r\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\17\3") + buf.write("\17\3\17\3\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\20") + buf.write("\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\22") + buf.write("\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\23") + buf.write("\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\24\3\24\3\24") + buf.write("\3\24\3\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\25") + buf.write("\3\25\3\25\3\25\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\27") + buf.write("\3\27\3\27\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\31\3\31") + buf.write("\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31") + buf.write("\3\32\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33\3\34") + buf.write("\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\35\3\35\3\35") + buf.write("\3\35\3\35\3\35\3\35\3\36\3\36\3\36\3\36\3\36\3\36\3\36") + buf.write("\3\37\3\37\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \3!\3") + buf.write("!\3!\3!\3!\3!\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3#\3#\3#\3$") + buf.write("\3$\3$\3$\3$\3%\3%\3%\3&\3&\3&\3&\3&\3&\3&\3\'\3\'\3\'") + buf.write("\3\'\3(\3(\3(\3(\3(\3)\3)\3)\3)\3)\3)\3)\3)\3*\3*\3*\3") + buf.write("*\3*\3*\3*\3*\3*\3*\3+\3+\3+\3+\3,\3,\3,\3,\3,\3,\3,\3") + buf.write(",\3,\3-\3-\3-\3-\3-\3-\3-\3-\3.\3.\3.\3.\3.\3.\3.\3.\3") + buf.write(".\3/\3/\3/\3/\3/\3/\3/\3/\3/\3\60\3\60\3\60\3\60\3\60") + buf.write("\3\60\3\60\3\60\3\61\3\61\3\61\3\61\3\61\3\61\3\61\3\61") + buf.write("\3\61\3\61\3\62\3\62\3\62\3\62\3\62\3\62\3\62\3\63\3\63") + buf.write("\3\63\3\63\3\63\3\63\3\63\3\63\3\63\3\64\3\64\3\64\3\64") + buf.write("\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\64") + buf.write("\3\64\3\64\3\65\3\65\3\65\3\65\3\65\3\65\3\65\3\66\3\66") + buf.write("\3\66\3\66\3\66\3\66\3\67\3\67\3\67\3\67\3\67\3\67\3\67") + buf.write("\38\38\38\38\38\38\38\39\39\39\39\39\39\39\3:\3:\3:\3") + buf.write(":\3:\3:\3:\3:\3:\3:\3:\3:\3:\3:\3;\3;\3;\3;\3;\3;\3;\3") + buf.write(";\3;\3;\3;\3;\3<\3<\3<\3<\3<\3<\3<\3=\3=\3=\3=\3=\3=\3") + buf.write("=\3>\3>\3>\3>\3>\3>\3>\3>\3>\3?\3?\3?\3?\3?\3@\3@\3@\3") + buf.write("@\3@\3@\3@\3@\3@\3@\3@\3@\3@\3A\3A\3A\3A\3A\3A\3B\3B\3") + buf.write("B\3B\3B\3C\3C\3C\3C\3D\3D\3D\3D\3D\3D\3D\3D\3E\3E\3E\3") + buf.write("E\3E\3E\3E\3F\3F\3F\3F\3F\3F\3F\3F\3F\3G\3G\3G\3G\3G\3") + buf.write("G\3H\3H\3H\3H\3H\3H\3H\3H\3H\3I\3I\3I\3I\3I\3I\3J\3J\3") + buf.write("J\3J\3J\3J\3J\3J\3K\3K\3K\3K\3K\3L\3L\3L\3L\3L\3L\3L\3") + buf.write("L\3L\3M\3M\3M\3M\3M\3M\3M\3M\3N\3N\3N\3N\3N\3N\3O\3O\3") + buf.write("P\3P\3Q\3Q\3R\3R\3S\3S\3T\3T\3U\3U\3V\3V\3W\3W\3X\3X\3") + buf.write("Y\3Y\3Z\3Z\3[\3[\3\\\3\\\3]\3]\3^\3^\3_\3_\3`\3`\3a\3") + buf.write("a\3b\3b\3b\3c\3c\3c\3d\3d\3d\3e\3e\3e\3f\3f\3f\3g\3g\3") + buf.write("g\3h\3h\3h\3i\3i\3i\3j\3j\3j\3k\3k\3k\3k\3l\3l\3l\3m\3") + buf.write("m\3m\3n\3n\3n\3o\3o\3o\3p\3p\3p\3q\3q\3q\3r\3r\3r\3s\3") + buf.write("s\3s\3t\3t\3u\3u\3u\3u\3v\3v\3v\3w\3w\3x\3x\3y\3y\3y\3") + buf.write("z\3z\3{\3{\3|\3|\3|\3}\3}\3}\3}\3~\3~\3~\3~\3~\3\177\3") + buf.write("\177\3\177\3\177\3\177\3\177\3\177\3\177\3\177\3\177\5") + buf.write("\177\u0421\n\177\3\u0080\3\u0080\3\u0080\7\u0080\u0426") + buf.write("\n\u0080\f\u0080\16\u0080\u0429\13\u0080\3\u0081\3\u0081") + buf.write("\5\u0081\u042d\n\u0081\3\u0082\3\u0082\3\u0083\3\u0083") + buf.write("\3\u0084\3\u0084\5\u0084\u0435\n\u0084\3\u0084\3\u0084") + buf.write("\5\u0084\u0439\n\u0084\3\u0084\3\u0084\5\u0084\u043d\n") + buf.write("\u0084\3\u0084\3\u0084\5\u0084\u0441\n\u0084\5\u0084\u0443") + buf.write("\n\u0084\3\u0085\3\u0085\5\u0085\u0447\n\u0085\3\u0085") + buf.write("\7\u0085\u044a\n\u0085\f\u0085\16\u0085\u044d\13\u0085") + buf.write("\3\u0086\3\u0086\5\u0086\u0451\n\u0086\3\u0086\7\u0086") + buf.write("\u0454\n\u0086\f\u0086\16\u0086\u0457\13\u0086\3\u0087") + buf.write("\3\u0087\3\u0087\3\u0087\5\u0087\u045d\n\u0087\3\u0087") + buf.write("\3\u0087\5\u0087\u0461\n\u0087\3\u0087\7\u0087\u0464\n") + buf.write("\u0087\f\u0087\16\u0087\u0467\13\u0087\3\u0088\3\u0088") + buf.write("\3\u0088\3\u0088\5\u0088\u046d\n\u0088\3\u0088\3\u0088") + buf.write("\5\u0088\u0471\n\u0088\3\u0088\7\u0088\u0474\n\u0088\f") + buf.write("\u0088\16\u0088\u0477\13\u0088\3\u0089\3\u0089\3\u008a") + buf.write("\3\u008a\3\u008b\3\u008b\3\u008c\3\u008c\3\u008d\3\u008d") + buf.write("\5\u008d\u0483\n\u008d\3\u008d\3\u008d\5\u008d\u0487\n") + buf.write("\u008d\3\u008d\3\u008d\5\u008d\u048b\n\u008d\3\u008d\3") + buf.write("\u008d\5\u008d\u048f\n\u008d\5\u008d\u0491\n\u008d\3\u008e") + buf.write("\3\u008e\3\u008f\3\u008f\3\u0090\3\u0090\3\u0090\3\u0090") + buf.write("\5\u0090\u049b\n\u0090\3\u0091\3\u0091\6\u0091\u049f\n") + buf.write("\u0091\r\u0091\16\u0091\u04a0\3\u0091\3\u0091\3\u0091") + buf.write("\3\u0091\3\u0091\6\u0091\u04a8\n\u0091\r\u0091\16\u0091") + buf.write("\u04a9\3\u0091\3\u0091\3\u0091\3\u0091\3\u0091\6\u0091") + buf.write("\u04b1\n\u0091\r\u0091\16\u0091\u04b2\3\u0091\3\u0091") + buf.write("\3\u0091\3\u0091\3\u0091\6\u0091\u04ba\n\u0091\r\u0091") + buf.write("\16\u0091\u04bb\3\u0091\3\u0091\5\u0091\u04c0\n\u0091") + buf.write("\3\u0092\3\u0092\3\u0092\5\u0092\u04c5\n\u0092\3\u0093") + buf.write("\3\u0093\3\u0093\5\u0093\u04ca\n\u0093\3\u0094\3\u0094") + buf.write("\3\u0094\3\u0094\3\u0094\3\u0094\3\u0094\3\u0094\3\u0094") + buf.write("\3\u0094\3\u0094\3\u0094\3\u0094\3\u0094\3\u0094\3\u0094") + buf.write("\3\u0094\3\u0094\3\u0094\3\u0094\3\u0094\3\u0094\5\u0094") + buf.write("\u04e2\n\u0094\3\u0095\3\u0095\3\u0095\3\u0095\3\u0095") + buf.write("\3\u0095\3\u0095\3\u0095\3\u0095\3\u0095\3\u0095\5\u0095") + buf.write("\u04ef\n\u0095\3\u0096\3\u0096\3\u0096\3\u0096\6\u0096") + buf.write("\u04f5\n\u0096\r\u0096\16\u0096\u04f6\3\u0097\3\u0097") + buf.write("\5\u0097\u04fb\n\u0097\3\u0097\5\u0097\u04fe\n\u0097\3") + buf.write("\u0097\3\u0097\3\u0097\5\u0097\u0503\n\u0097\5\u0097\u0505") + buf.write("\n\u0097\3\u0098\5\u0098\u0508\n\u0098\3\u0098\3\u0098") + buf.write("\3\u0098\3\u0098\3\u0098\5\u0098\u050f\n\u0098\3\u0099") + buf.write("\3\u0099\5\u0099\u0513\n\u0099\3\u0099\3\u0099\3\u0099") + buf.write("\5\u0099\u0518\n\u0099\3\u0099\5\u0099\u051b\n\u0099\3") + buf.write("\u009a\3\u009a\3\u009b\3\u009b\5\u009b\u0521\n\u009b\3") + buf.write("\u009b\7\u009b\u0524\n\u009b\f\u009b\16\u009b\u0527\13") + buf.write("\u009b\3\u009c\3\u009c\3\u009d\5\u009d\u052c\n\u009d\3") + buf.write("\u009d\3\u009d\7\u009d\u0530\n\u009d\f\u009d\16\u009d") + buf.write("\u0533\13\u009d\3\u009d\3\u009d\5\u009d\u0537\n\u009d") + buf.write("\3\u009d\3\u009d\5\u009d\u053b\n\u009d\3\u009e\3\u009e") + buf.write("\3\u009e\5\u009e\u0540\n\u009e\3\u009f\3\u009f\3\u009f") + buf.write("\5\u009f\u0545\n\u009f\3\u00a0\3\u00a0\7\u00a0\u0549\n") + buf.write("\u00a0\f\u00a0\16\u00a0\u054c\13\u00a0\3\u00a0\3\u00a0") + buf.write("\7\u00a0\u0550\n\u00a0\f\u00a0\16\u00a0\u0553\13\u00a0") + buf.write("\3\u00a0\3\u00a0\7\u00a0\u0557\n\u00a0\f\u00a0\16\u00a0") + buf.write("\u055a\13\u00a0\3\u00a0\3\u00a0\3\u00a1\3\u00a1\3\u00a1") + buf.write("\3\u00a1\3\u00a1\3\u00a1\3\u00a1\3\u00a1\3\u00a1\3\u00a1") + buf.write("\3\u00a1\3\u00a1\5\u00a1\u056a\n\u00a1\3\u00a2\3\u00a2") + buf.write("\5\u00a2\u056e\n\u00a2\3\u00a2\3\u00a2\3\u00a2\3\u00a2") + buf.write("\3\u00a2\3\u00a2\5\u00a2\u0576\n\u00a2\3\u00a3\3\u00a3") + buf.write("\3\u00a3\3\u00a4\3\u00a4\3\u00a4\3\u00a5\3\u00a5\3\u00a6") + buf.write("\6\u00a6\u0581\n\u00a6\r\u00a6\16\u00a6\u0582\3\u00a6") + buf.write("\3\u00a6\3\u00a7\3\u00a7\5\u00a7\u0589\n\u00a7\3\u00a7") + buf.write("\5\u00a7\u058c\n\u00a7\3\u00a7\3\u00a7\3\u00a8\3\u00a8") + buf.write("\3\u00a8\3\u00a8\7\u00a8\u0594\n\u00a8\f\u00a8\16\u00a8") + buf.write("\u0597\13\u00a8\3\u00a8\3\u00a8\3\u00a8\3\u00a8\3\u00a8") + buf.write("\3\u00a9\3\u00a9\3\u00a9\3\u00a9\7\u00a9\u05a2\n\u00a9") + buf.write("\f\u00a9\16\u00a9\u05a5\13\u00a9\3\u00a9\3\u00a9\7\u0157") + buf.write("\u054a\u0551\u0558\u0595\2\u00aa\3\3\5\4\7\5\t\6\13\7") + buf.write("\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21") + buf.write("!\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67") + buf.write("\359\36;\37= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61") + buf.write("a\62c\63e\64g\65i\66k\67m8o9q:s;u{?}@\177A\u0081") + buf.write("B\u0083C\u0085D\u0087E\u0089F\u008bG\u008dH\u008fI\u0091") + buf.write("J\u0093K\u0095L\u0097M\u0099N\u009bO\u009dP\u009fQ\u00a1") + buf.write("R\u00a3S\u00a5T\u00a7U\u00a9V\u00abW\u00adX\u00afY\u00b1") + buf.write("Z\u00b3[\u00b5\\\u00b7]\u00b9^\u00bb_\u00bd`\u00bfa\u00c1") + buf.write("b\u00c3c\u00c5d\u00c7e\u00c9f\u00cbg\u00cdh\u00cfi\u00d1") + buf.write("j\u00d3k\u00d5l\u00d7m\u00d9n\u00dbo\u00ddp\u00dfq\u00e1") + buf.write("r\u00e3s\u00e5t\u00e7u\u00e9v\u00ebw\u00edx\u00efy\u00f1") + buf.write("z\u00f3{\u00f5|\u00f7}\u00f9~\u00fb\2\u00fd\2\u00ff\177") + buf.write("\u0101\2\u0103\2\u0105\2\u0107\u0080\u0109\u0081\u010b") + buf.write("\u0082\u010d\u0083\u010f\u0084\u0111\2\u0113\2\u0115\2") + buf.write("\u0117\2\u0119\u0085\u011b\2\u011d\2\u011f\2\u0121\u0086") + buf.write("\u0123\2\u0125\2\u0127\2\u0129\2\u012b\2\u012d\u0087\u012f") + buf.write("\2\u0131\2\u0133\2\u0135\2\u0137\2\u0139\u0088\u013b\2") + buf.write("\u013d\2\u013f\2\u0141\u0089\u0143\u008a\u0145\u008b\u0147") + buf.write("\u008c\u0149\2\u014b\u008d\u014d\u008e\u014f\u008f\u0151") + buf.write("\u0090\3\2\22\3\2\f\f\5\2C\\aac|\3\2\62;\3\2\63;\3\2\62") + buf.write("9\5\2\62;CHch\3\2\62\63\4\2WWww\4\2NNnn\6\2\f\f\17\17") + buf.write("))^^\4\2--//\6\2HHNNhhnn\5\2NNWWww\6\2\f\f\17\17$$^^\4") + buf.write("\2\13\13\"\"\4\2\f\f\17\17\2\u05e6\2\3\3\2\2\2\2\5\3\2") + buf.write("\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2") + buf.write("\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2") + buf.write("\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37") + buf.write("\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2") + buf.write("\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2") + buf.write("\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2") + buf.write("\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3\2\2") + buf.write("\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2M\3\2") + buf.write("\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2\2\2U\3\2\2\2\2W\3") + buf.write("\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2]\3\2\2\2\2_\3\2\2\2\2a") + buf.write("\3\2\2\2\2c\3\2\2\2\2e\3\2\2\2\2g\3\2\2\2\2i\3\2\2\2\2") + buf.write("k\3\2\2\2\2m\3\2\2\2\2o\3\2\2\2\2q\3\2\2\2\2s\3\2\2\2") + buf.write("\2u\3\2\2\2\2w\3\2\2\2\2y\3\2\2\2\2{\3\2\2\2\2}\3\2\2") + buf.write("\2\2\177\3\2\2\2\2\u0081\3\2\2\2\2\u0083\3\2\2\2\2\u0085") + buf.write("\3\2\2\2\2\u0087\3\2\2\2\2\u0089\3\2\2\2\2\u008b\3\2\2") + buf.write("\2\2\u008d\3\2\2\2\2\u008f\3\2\2\2\2\u0091\3\2\2\2\2\u0093") + buf.write("\3\2\2\2\2\u0095\3\2\2\2\2\u0097\3\2\2\2\2\u0099\3\2\2") + buf.write("\2\2\u009b\3\2\2\2\2\u009d\3\2\2\2\2\u009f\3\2\2\2\2\u00a1") + buf.write("\3\2\2\2\2\u00a3\3\2\2\2\2\u00a5\3\2\2\2\2\u00a7\3\2\2") + buf.write("\2\2\u00a9\3\2\2\2\2\u00ab\3\2\2\2\2\u00ad\3\2\2\2\2\u00af") + buf.write("\3\2\2\2\2\u00b1\3\2\2\2\2\u00b3\3\2\2\2\2\u00b5\3\2\2") + buf.write("\2\2\u00b7\3\2\2\2\2\u00b9\3\2\2\2\2\u00bb\3\2\2\2\2\u00bd") + buf.write("\3\2\2\2\2\u00bf\3\2\2\2\2\u00c1\3\2\2\2\2\u00c3\3\2\2") + buf.write("\2\2\u00c5\3\2\2\2\2\u00c7\3\2\2\2\2\u00c9\3\2\2\2\2\u00cb") + buf.write("\3\2\2\2\2\u00cd\3\2\2\2\2\u00cf\3\2\2\2\2\u00d1\3\2\2") + buf.write("\2\2\u00d3\3\2\2\2\2\u00d5\3\2\2\2\2\u00d7\3\2\2\2\2\u00d9") + buf.write("\3\2\2\2\2\u00db\3\2\2\2\2\u00dd\3\2\2\2\2\u00df\3\2\2") + buf.write("\2\2\u00e1\3\2\2\2\2\u00e3\3\2\2\2\2\u00e5\3\2\2\2\2\u00e7") + buf.write("\3\2\2\2\2\u00e9\3\2\2\2\2\u00eb\3\2\2\2\2\u00ed\3\2\2") + buf.write("\2\2\u00ef\3\2\2\2\2\u00f1\3\2\2\2\2\u00f3\3\2\2\2\2\u00f5") + buf.write("\3\2\2\2\2\u00f7\3\2\2\2\2\u00f9\3\2\2\2\2\u00ff\3\2\2") + buf.write("\2\2\u0107\3\2\2\2\2\u0109\3\2\2\2\2\u010b\3\2\2\2\2\u010d") + buf.write("\3\2\2\2\2\u010f\3\2\2\2\2\u0119\3\2\2\2\2\u0121\3\2\2") + buf.write("\2\2\u012d\3\2\2\2\2\u0139\3\2\2\2\2\u0141\3\2\2\2\2\u0143") + buf.write("\3\2\2\2\2\u0145\3\2\2\2\2\u0147\3\2\2\2\2\u014b\3\2\2") + buf.write("\2\2\u014d\3\2\2\2\2\u014f\3\2\2\2\2\u0151\3\2\2\2\3\u0153") + buf.write("\3\2\2\2\5\u016a\3\2\2\2\7\u0173\3\2\2\2\t\u017b\3\2\2") + buf.write("\2\13\u0183\3\2\2\2\r\u0187\3\2\2\2\17\u018c\3\2\2\2\21") + buf.write("\u0191\3\2\2\2\23\u0197\3\2\2\2\25\u019c\3\2\2\2\27\u01a2") + buf.write("\3\2\2\2\31\u01a7\3\2\2\2\33\u01b0\3\2\2\2\35\u01b9\3") + buf.write("\2\2\2\37\u01bf\3\2\2\2!\u01c5\3\2\2\2#\u01cf\3\2\2\2") + buf.write("%\u01da\3\2\2\2\'\u01e3\3\2\2\2)\u01ec\3\2\2\2+\u01f4") + buf.write("\3\2\2\2-\u01fb\3\2\2\2/\u01fe\3\2\2\2\61\u0205\3\2\2") + buf.write("\2\63\u0212\3\2\2\2\65\u0217\3\2\2\2\67\u021c\3\2\2\2") + buf.write("9\u0225\3\2\2\2;\u022c\3\2\2\2=\u0233\3\2\2\2?\u0239\3") + buf.write("\2\2\2A\u023f\3\2\2\2C\u0245\3\2\2\2E\u0249\3\2\2\2G\u0250") + buf.write("\3\2\2\2I\u0255\3\2\2\2K\u0258\3\2\2\2M\u025f\3\2\2\2") + buf.write("O\u0263\3\2\2\2Q\u0268\3\2\2\2S\u0270\3\2\2\2U\u027a\3") + buf.write("\2\2\2W\u027e\3\2\2\2Y\u0287\3\2\2\2[\u028f\3\2\2\2]\u0298") + buf.write("\3\2\2\2_\u02a1\3\2\2\2a\u02a9\3\2\2\2c\u02b3\3\2\2\2") + buf.write("e\u02ba\3\2\2\2g\u02c3\3\2\2\2i\u02d4\3\2\2\2k\u02db\3") + buf.write("\2\2\2m\u02e1\3\2\2\2o\u02e8\3\2\2\2q\u02ef\3\2\2\2s\u02f6") + buf.write("\3\2\2\2u\u0304\3\2\2\2w\u0310\3\2\2\2y\u0317\3\2\2\2") + buf.write("{\u031e\3\2\2\2}\u0327\3\2\2\2\177\u032c\3\2\2\2\u0081") + buf.write("\u0339\3\2\2\2\u0083\u033f\3\2\2\2\u0085\u0344\3\2\2\2") + buf.write("\u0087\u0348\3\2\2\2\u0089\u0350\3\2\2\2\u008b\u0357\3") + buf.write("\2\2\2\u008d\u0360\3\2\2\2\u008f\u0366\3\2\2\2\u0091\u036f") + buf.write("\3\2\2\2\u0093\u0375\3\2\2\2\u0095\u037d\3\2\2\2\u0097") + buf.write("\u0382\3\2\2\2\u0099\u038b\3\2\2\2\u009b\u0393\3\2\2\2") + buf.write("\u009d\u0399\3\2\2\2\u009f\u039b\3\2\2\2\u00a1\u039d\3") + buf.write("\2\2\2\u00a3\u039f\3\2\2\2\u00a5\u03a1\3\2\2\2\u00a7\u03a3") + buf.write("\3\2\2\2\u00a9\u03a5\3\2\2\2\u00ab\u03a7\3\2\2\2\u00ad") + buf.write("\u03a9\3\2\2\2\u00af\u03ab\3\2\2\2\u00b1\u03ad\3\2\2\2") + buf.write("\u00b3\u03af\3\2\2\2\u00b5\u03b1\3\2\2\2\u00b7\u03b3\3") + buf.write("\2\2\2\u00b9\u03b5\3\2\2\2\u00bb\u03b7\3\2\2\2\u00bd\u03b9") + buf.write("\3\2\2\2\u00bf\u03bb\3\2\2\2\u00c1\u03bd\3\2\2\2\u00c3") + buf.write("\u03bf\3\2\2\2\u00c5\u03c2\3\2\2\2\u00c7\u03c5\3\2\2\2") + buf.write("\u00c9\u03c8\3\2\2\2\u00cb\u03cb\3\2\2\2\u00cd\u03ce\3") + buf.write("\2\2\2\u00cf\u03d1\3\2\2\2\u00d1\u03d4\3\2\2\2\u00d3\u03d7") + buf.write("\3\2\2\2\u00d5\u03da\3\2\2\2\u00d7\u03de\3\2\2\2\u00d9") + buf.write("\u03e1\3\2\2\2\u00db\u03e4\3\2\2\2\u00dd\u03e7\3\2\2\2") + buf.write("\u00df\u03ea\3\2\2\2\u00e1\u03ed\3\2\2\2\u00e3\u03f0\3") + buf.write("\2\2\2\u00e5\u03f3\3\2\2\2\u00e7\u03f6\3\2\2\2\u00e9\u03f8") + buf.write("\3\2\2\2\u00eb\u03fc\3\2\2\2\u00ed\u03ff\3\2\2\2\u00ef") + buf.write("\u0401\3\2\2\2\u00f1\u0403\3\2\2\2\u00f3\u0406\3\2\2\2") + buf.write("\u00f5\u0408\3\2\2\2\u00f7\u040a\3\2\2\2\u00f9\u040d\3") + buf.write("\2\2\2\u00fb\u0411\3\2\2\2\u00fd\u0420\3\2\2\2\u00ff\u0422") + buf.write("\3\2\2\2\u0101\u042c\3\2\2\2\u0103\u042e\3\2\2\2\u0105") + buf.write("\u0430\3\2\2\2\u0107\u0442\3\2\2\2\u0109\u0444\3\2\2\2") + buf.write("\u010b\u044e\3\2\2\2\u010d\u045c\3\2\2\2\u010f\u046c\3") + buf.write("\2\2\2\u0111\u0478\3\2\2\2\u0113\u047a\3\2\2\2\u0115\u047c") + buf.write("\3\2\2\2\u0117\u047e\3\2\2\2\u0119\u0490\3\2\2\2\u011b") + buf.write("\u0492\3\2\2\2\u011d\u0494\3\2\2\2\u011f\u049a\3\2\2\2") + buf.write("\u0121\u04bf\3\2\2\2\u0123\u04c4\3\2\2\2\u0125\u04c9\3") + buf.write("\2\2\2\u0127\u04e1\3\2\2\2\u0129\u04ee\3\2\2\2\u012b\u04f0") + buf.write("\3\2\2\2\u012d\u0504\3\2\2\2\u012f\u050e\3\2\2\2\u0131") + buf.write("\u051a\3\2\2\2\u0133\u051c\3\2\2\2\u0135\u051e\3\2\2\2") + buf.write("\u0137\u0528\3\2\2\2\u0139\u053a\3\2\2\2\u013b\u053f\3") + buf.write("\2\2\2\u013d\u0544\3\2\2\2\u013f\u0546\3\2\2\2\u0141\u0569") + buf.write("\3\2\2\2\u0143\u0575\3\2\2\2\u0145\u0577\3\2\2\2\u0147") + buf.write("\u057a\3\2\2\2\u0149\u057d\3\2\2\2\u014b\u0580\3\2\2\2") + buf.write("\u014d\u058b\3\2\2\2\u014f\u058f\3\2\2\2\u0151\u059d\3") + buf.write("\2\2\2\u0153\u015f\7%\2\2\u0154\u0156\n\2\2\2\u0155\u0154") + buf.write("\3\2\2\2\u0156\u0159\3\2\2\2\u0157\u0158\3\2\2\2\u0157") + buf.write("\u0155\3\2\2\2\u0158\u015a\3\2\2\2\u0159\u0157\3\2\2\2") + buf.write("\u015a\u015c\7^\2\2\u015b\u015d\7\17\2\2\u015c\u015b\3") + buf.write("\2\2\2\u015c\u015d\3\2\2\2\u015d\u015e\3\2\2\2\u015e\u0160") + buf.write("\7\f\2\2\u015f\u0157\3\2\2\2\u0160\u0161\3\2\2\2\u0161") + buf.write("\u015f\3\2\2\2\u0161\u0162\3\2\2\2\u0162\u0164\3\2\2\2") + buf.write("\u0163\u0165\n\2\2\2\u0164\u0163\3\2\2\2\u0165\u0166\3") + buf.write("\2\2\2\u0166\u0164\3\2\2\2\u0166\u0167\3\2\2\2\u0167\u0168") + buf.write("\3\2\2\2\u0168\u0169\b\2\2\2\u0169\4\3\2\2\2\u016a\u016e") + buf.write("\7%\2\2\u016b\u016d\n\2\2\2\u016c\u016b\3\2\2\2\u016d") + buf.write("\u0170\3\2\2\2\u016e\u016c\3\2\2\2\u016e\u016f\3\2\2\2") + buf.write("\u016f\u0171\3\2\2\2\u0170\u016e\3\2\2\2\u0171\u0172\b") + buf.write("\3\2\2\u0172\6\3\2\2\2\u0173\u0174\7c\2\2\u0174\u0175") + buf.write("\7n\2\2\u0175\u0176\7k\2\2\u0176\u0177\7i\2\2\u0177\u0178") + buf.write("\7p\2\2\u0178\u0179\7c\2\2\u0179\u017a\7u\2\2\u017a\b") + buf.write("\3\2\2\2\u017b\u017c\7c\2\2\u017c\u017d\7n\2\2\u017d\u017e") + buf.write("\7k\2\2\u017e\u017f\7i\2\2\u017f\u0180\7p\2\2\u0180\u0181") + buf.write("\7q\2\2\u0181\u0182\7h\2\2\u0182\n\3\2\2\2\u0183\u0184") + buf.write("\7c\2\2\u0184\u0185\7u\2\2\u0185\u0186\7o\2\2\u0186\f") + buf.write("\3\2\2\2\u0187\u0188\7c\2\2\u0188\u0189\7w\2\2\u0189\u018a") + buf.write("\7v\2\2\u018a\u018b\7q\2\2\u018b\16\3\2\2\2\u018c\u018d") + buf.write("\7d\2\2\u018d\u018e\7q\2\2\u018e\u018f\7q\2\2\u018f\u0190") + buf.write("\7n\2\2\u0190\20\3\2\2\2\u0191\u0192\7d\2\2\u0192\u0193") + buf.write("\7t\2\2\u0193\u0194\7g\2\2\u0194\u0195\7c\2\2\u0195\u0196") + buf.write("\7m\2\2\u0196\22\3\2\2\2\u0197\u0198\7e\2\2\u0198\u0199") + buf.write("\7c\2\2\u0199\u019a\7u\2\2\u019a\u019b\7g\2\2\u019b\24") + buf.write("\3\2\2\2\u019c\u019d\7e\2\2\u019d\u019e\7c\2\2\u019e\u019f") + buf.write("\7v\2\2\u019f\u01a0\7e\2\2\u01a0\u01a1\7j\2\2\u01a1\26") + buf.write("\3\2\2\2\u01a2\u01a3\7e\2\2\u01a3\u01a4\7j\2\2\u01a4\u01a5") + buf.write("\7c\2\2\u01a5\u01a6\7t\2\2\u01a6\30\3\2\2\2\u01a7\u01a8") + buf.write("\7e\2\2\u01a8\u01a9\7j\2\2\u01a9\u01aa\7c\2\2\u01aa\u01ab") + buf.write("\7t\2\2\u01ab\u01ac\7\63\2\2\u01ac\u01ad\78\2\2\u01ad") + buf.write("\u01ae\7a\2\2\u01ae\u01af\7v\2\2\u01af\32\3\2\2\2\u01b0") + buf.write("\u01b1\7e\2\2\u01b1\u01b2\7j\2\2\u01b2\u01b3\7c\2\2\u01b3") + buf.write("\u01b4\7t\2\2\u01b4\u01b5\7\65\2\2\u01b5\u01b6\7\64\2") + buf.write("\2\u01b6\u01b7\7a\2\2\u01b7\u01b8\7v\2\2\u01b8\34\3\2") + buf.write("\2\2\u01b9\u01ba\7e\2\2\u01ba\u01bb\7n\2\2\u01bb\u01bc") + buf.write("\7c\2\2\u01bc\u01bd\7u\2\2\u01bd\u01be\7u\2\2\u01be\36") + buf.write("\3\2\2\2\u01bf\u01c0\7e\2\2\u01c0\u01c1\7q\2\2\u01c1\u01c2") + buf.write("\7p\2\2\u01c2\u01c3\7u\2\2\u01c3\u01c4\7v\2\2\u01c4 \3") + buf.write("\2\2\2\u01c5\u01c6\7e\2\2\u01c6\u01c7\7q\2\2\u01c7\u01c8") + buf.write("\7p\2\2\u01c8\u01c9\7u\2\2\u01c9\u01ca\7v\2\2\u01ca\u01cb") + buf.write("\7g\2\2\u01cb\u01cc\7z\2\2\u01cc\u01cd\7r\2\2\u01cd\u01ce") + buf.write("\7t\2\2\u01ce\"\3\2\2\2\u01cf\u01d0\7e\2\2\u01d0\u01d1") + buf.write("\7q\2\2\u01d1\u01d2\7p\2\2\u01d2\u01d3\7u\2\2\u01d3\u01d4") + buf.write("\7v\2\2\u01d4\u01d5\7a\2\2\u01d5\u01d6\7e\2\2\u01d6\u01d7") + buf.write("\7c\2\2\u01d7\u01d8\7u\2\2\u01d8\u01d9\7v\2\2\u01d9$\3") + buf.write("\2\2\2\u01da\u01db\7e\2\2\u01db\u01dc\7q\2\2\u01dc\u01dd") + buf.write("\7p\2\2\u01dd\u01de\7v\2\2\u01de\u01df\7k\2\2\u01df\u01e0") + buf.write("\7p\2\2\u01e0\u01e1\7w\2\2\u01e1\u01e2\7g\2\2\u01e2&\3") + buf.write("\2\2\2\u01e3\u01e4\7f\2\2\u01e4\u01e5\7g\2\2\u01e5\u01e6") + buf.write("\7e\2\2\u01e6\u01e7\7n\2\2\u01e7\u01e8\7v\2\2\u01e8\u01e9") + buf.write("\7{\2\2\u01e9\u01ea\7r\2\2\u01ea\u01eb\7g\2\2\u01eb(\3") + buf.write("\2\2\2\u01ec\u01ed\7f\2\2\u01ed\u01ee\7g\2\2\u01ee\u01ef") + buf.write("\7h\2\2\u01ef\u01f0\7c\2\2\u01f0\u01f1\7w\2\2\u01f1\u01f2") + buf.write("\7n\2\2\u01f2\u01f3\7v\2\2\u01f3*\3\2\2\2\u01f4\u01f5") + buf.write("\7f\2\2\u01f5\u01f6\7g\2\2\u01f6\u01f7\7n\2\2\u01f7\u01f8") + buf.write("\7g\2\2\u01f8\u01f9\7v\2\2\u01f9\u01fa\7g\2\2\u01fa,\3") + buf.write("\2\2\2\u01fb\u01fc\7f\2\2\u01fc\u01fd\7q\2\2\u01fd.\3") + buf.write("\2\2\2\u01fe\u01ff\7f\2\2\u01ff\u0200\7q\2\2\u0200\u0201") + buf.write("\7w\2\2\u0201\u0202\7d\2\2\u0202\u0203\7n\2\2\u0203\u0204") + buf.write("\7g\2\2\u0204\60\3\2\2\2\u0205\u0206\7f\2\2\u0206\u0207") + buf.write("\7{\2\2\u0207\u0208\7p\2\2\u0208\u0209\7c\2\2\u0209\u020a") + buf.write("\7o\2\2\u020a\u020b\7k\2\2\u020b\u020c\7e\2\2\u020c\u020d") + buf.write("\7a\2\2\u020d\u020e\7e\2\2\u020e\u020f\7c\2\2\u020f\u0210") + buf.write("\7u\2\2\u0210\u0211\7v\2\2\u0211\62\3\2\2\2\u0212\u0213") + buf.write("\7g\2\2\u0213\u0214\7n\2\2\u0214\u0215\7u\2\2\u0215\u0216") + buf.write("\7g\2\2\u0216\64\3\2\2\2\u0217\u0218\7g\2\2\u0218\u0219") + buf.write("\7p\2\2\u0219\u021a\7w\2\2\u021a\u021b\7o\2\2\u021b\66") + buf.write("\3\2\2\2\u021c\u021d\7g\2\2\u021d\u021e\7z\2\2\u021e\u021f") + buf.write("\7r\2\2\u021f\u0220\7n\2\2\u0220\u0221\7k\2\2\u0221\u0222") + buf.write("\7e\2\2\u0222\u0223\7k\2\2\u0223\u0224\7v\2\2\u02248\3") + buf.write("\2\2\2\u0225\u0226\7g\2\2\u0226\u0227\7z\2\2\u0227\u0228") + buf.write("\7r\2\2\u0228\u0229\7q\2\2\u0229\u022a\7t\2\2\u022a\u022b") + buf.write("\7v\2\2\u022b:\3\2\2\2\u022c\u022d\7g\2\2\u022d\u022e") + buf.write("\7z\2\2\u022e\u022f\7v\2\2\u022f\u0230\7g\2\2\u0230\u0231") + buf.write("\7t\2\2\u0231\u0232\7p\2\2\u0232<\3\2\2\2\u0233\u0234") + buf.write("\7h\2\2\u0234\u0235\7c\2\2\u0235\u0236\7n\2\2\u0236\u0237") + buf.write("\7u\2\2\u0237\u0238\7g\2\2\u0238>\3\2\2\2\u0239\u023a") + buf.write("\7h\2\2\u023a\u023b\7k\2\2\u023b\u023c\7p\2\2\u023c\u023d") + buf.write("\7c\2\2\u023d\u023e\7n\2\2\u023e@\3\2\2\2\u023f\u0240") + buf.write("\7h\2\2\u0240\u0241\7n\2\2\u0241\u0242\7q\2\2\u0242\u0243") + buf.write("\7c\2\2\u0243\u0244\7v\2\2\u0244B\3\2\2\2\u0245\u0246") + buf.write("\7h\2\2\u0246\u0247\7q\2\2\u0247\u0248\7t\2\2\u0248D\3") + buf.write("\2\2\2\u0249\u024a\7h\2\2\u024a\u024b\7t\2\2\u024b\u024c") + buf.write("\7k\2\2\u024c\u024d\7g\2\2\u024d\u024e\7p\2\2\u024e\u024f") + buf.write("\7f\2\2\u024fF\3\2\2\2\u0250\u0251\7i\2\2\u0251\u0252") + buf.write("\7q\2\2\u0252\u0253\7v\2\2\u0253\u0254\7q\2\2\u0254H\3") + buf.write("\2\2\2\u0255\u0256\7k\2\2\u0256\u0257\7h\2\2\u0257J\3") + buf.write("\2\2\2\u0258\u0259\7k\2\2\u0259\u025a\7p\2\2\u025a\u025b") + buf.write("\7n\2\2\u025b\u025c\7k\2\2\u025c\u025d\7p\2\2\u025d\u025e") + buf.write("\7g\2\2\u025eL\3\2\2\2\u025f\u0260\7k\2\2\u0260\u0261") + buf.write("\7p\2\2\u0261\u0262\7v\2\2\u0262N\3\2\2\2\u0263\u0264") + buf.write("\7n\2\2\u0264\u0265\7q\2\2\u0265\u0266\7p\2\2\u0266\u0267") + buf.write("\7i\2\2\u0267P\3\2\2\2\u0268\u0269\7o\2\2\u0269\u026a") + buf.write("\7w\2\2\u026a\u026b\7v\2\2\u026b\u026c\7c\2\2\u026c\u026d") + buf.write("\7d\2\2\u026d\u026e\7n\2\2\u026e\u026f\7g\2\2\u026fR\3") + buf.write("\2\2\2\u0270\u0271\7p\2\2\u0271\u0272\7c\2\2\u0272\u0273") + buf.write("\7o\2\2\u0273\u0274\7g\2\2\u0274\u0275\7u\2\2\u0275\u0276") + buf.write("\7r\2\2\u0276\u0277\7c\2\2\u0277\u0278\7e\2\2\u0278\u0279") + buf.write("\7g\2\2\u0279T\3\2\2\2\u027a\u027b\7p\2\2\u027b\u027c") + buf.write("\7g\2\2\u027c\u027d\7y\2\2\u027dV\3\2\2\2\u027e\u027f") + buf.write("\7p\2\2\u027f\u0280\7q\2\2\u0280\u0281\7g\2\2\u0281\u0282") + buf.write("\7z\2\2\u0282\u0283\7e\2\2\u0283\u0284\7g\2\2\u0284\u0285") + buf.write("\7r\2\2\u0285\u0286\7v\2\2\u0286X\3\2\2\2\u0287\u0288") + buf.write("\7p\2\2\u0288\u0289\7w\2\2\u0289\u028a\7n\2\2\u028a\u028b") + buf.write("\7n\2\2\u028b\u028c\7r\2\2\u028c\u028d\7v\2\2\u028d\u028e") + buf.write("\7t\2\2\u028eZ\3\2\2\2\u028f\u0290\7q\2\2\u0290\u0291") + buf.write("\7r\2\2\u0291\u0292\7g\2\2\u0292\u0293\7t\2\2\u0293\u0294") + buf.write("\7c\2\2\u0294\u0295\7v\2\2\u0295\u0296\7q\2\2\u0296\u0297") + buf.write("\7t\2\2\u0297\\\3\2\2\2\u0298\u0299\7q\2\2\u0299\u029a") + buf.write("\7x\2\2\u029a\u029b\7g\2\2\u029b\u029c\7t\2\2\u029c\u029d") + buf.write("\7t\2\2\u029d\u029e\7k\2\2\u029e\u029f\7f\2\2\u029f\u02a0") + buf.write("\7g\2\2\u02a0^\3\2\2\2\u02a1\u02a2\7r\2\2\u02a2\u02a3") + buf.write("\7t\2\2\u02a3\u02a4\7k\2\2\u02a4\u02a5\7x\2\2\u02a5\u02a6") + buf.write("\7c\2\2\u02a6\u02a7\7v\2\2\u02a7\u02a8\7g\2\2\u02a8`\3") + buf.write("\2\2\2\u02a9\u02aa\7r\2\2\u02aa\u02ab\7t\2\2\u02ab\u02ac") + buf.write("\7q\2\2\u02ac\u02ad\7v\2\2\u02ad\u02ae\7g\2\2\u02ae\u02af") + buf.write("\7e\2\2\u02af\u02b0\7v\2\2\u02b0\u02b1\7g\2\2\u02b1\u02b2") + buf.write("\7f\2\2\u02b2b\3\2\2\2\u02b3\u02b4\7r\2\2\u02b4\u02b5") + buf.write("\7w\2\2\u02b5\u02b6\7d\2\2\u02b6\u02b7\7n\2\2\u02b7\u02b8") + buf.write("\7k\2\2\u02b8\u02b9\7e\2\2\u02b9d\3\2\2\2\u02ba\u02bb") + buf.write("\7t\2\2\u02bb\u02bc\7g\2\2\u02bc\u02bd\7i\2\2\u02bd\u02be") + buf.write("\7k\2\2\u02be\u02bf\7u\2\2\u02bf\u02c0\7v\2\2\u02c0\u02c1") + buf.write("\7g\2\2\u02c1\u02c2\7t\2\2\u02c2f\3\2\2\2\u02c3\u02c4") + buf.write("\7t\2\2\u02c4\u02c5\7g\2\2\u02c5\u02c6\7k\2\2\u02c6\u02c7") + buf.write("\7p\2\2\u02c7\u02c8\7v\2\2\u02c8\u02c9\7g\2\2\u02c9\u02ca") + buf.write("\7t\2\2\u02ca\u02cb\7r\2\2\u02cb\u02cc\7t\2\2\u02cc\u02cd") + buf.write("\7g\2\2\u02cd\u02ce\7v\2\2\u02ce\u02cf\7a\2\2\u02cf\u02d0") + buf.write("\7e\2\2\u02d0\u02d1\7c\2\2\u02d1\u02d2\7u\2\2\u02d2\u02d3") + buf.write("\7v\2\2\u02d3h\3\2\2\2\u02d4\u02d5\7t\2\2\u02d5\u02d6") + buf.write("\7g\2\2\u02d6\u02d7\7v\2\2\u02d7\u02d8\7w\2\2\u02d8\u02d9") + buf.write("\7t\2\2\u02d9\u02da\7p\2\2\u02daj\3\2\2\2\u02db\u02dc") + buf.write("\7u\2\2\u02dc\u02dd\7j\2\2\u02dd\u02de\7q\2\2\u02de\u02df") + buf.write("\7t\2\2\u02df\u02e0\7v\2\2\u02e0l\3\2\2\2\u02e1\u02e2") + buf.write("\7u\2\2\u02e2\u02e3\7k\2\2\u02e3\u02e4\7i\2\2\u02e4\u02e5") + buf.write("\7p\2\2\u02e5\u02e6\7g\2\2\u02e6\u02e7\7f\2\2\u02e7n\3") + buf.write("\2\2\2\u02e8\u02e9\7u\2\2\u02e9\u02ea\7k\2\2\u02ea\u02eb") + buf.write("\7|\2\2\u02eb\u02ec\7g\2\2\u02ec\u02ed\7q\2\2\u02ed\u02ee") + buf.write("\7h\2\2\u02eep\3\2\2\2\u02ef\u02f0\7u\2\2\u02f0\u02f1") + buf.write("\7v\2\2\u02f1\u02f2\7c\2\2\u02f2\u02f3\7v\2\2\u02f3\u02f4") + buf.write("\7k\2\2\u02f4\u02f5\7e\2\2\u02f5r\3\2\2\2\u02f6\u02f7") + buf.write("\7u\2\2\u02f7\u02f8\7v\2\2\u02f8\u02f9\7c\2\2\u02f9\u02fa") + buf.write("\7v\2\2\u02fa\u02fb\7k\2\2\u02fb\u02fc\7e\2\2\u02fc\u02fd") + buf.write("\7a\2\2\u02fd\u02fe\7c\2\2\u02fe\u02ff\7u\2\2\u02ff\u0300") + buf.write("\7u\2\2\u0300\u0301\7g\2\2\u0301\u0302\7t\2\2\u0302\u0303") + buf.write("\7v\2\2\u0303t\3\2\2\2\u0304\u0305\7u\2\2\u0305\u0306") + buf.write("\7v\2\2\u0306\u0307\7c\2\2\u0307\u0308\7v\2\2\u0308\u0309") + buf.write("\7k\2\2\u0309\u030a\7e\2\2\u030a\u030b\7a\2\2\u030b\u030c") + buf.write("\7e\2\2\u030c\u030d\7c\2\2\u030d\u030e\7u\2\2\u030e\u030f") + buf.write("\7v\2\2\u030fv\3\2\2\2\u0310\u0311\7u\2\2\u0311\u0312") + buf.write("\7v\2\2\u0312\u0313\7t\2\2\u0313\u0314\7w\2\2\u0314\u0315") + buf.write("\7e\2\2\u0315\u0316\7v\2\2\u0316x\3\2\2\2\u0317\u0318") + buf.write("\7u\2\2\u0318\u0319\7y\2\2\u0319\u031a\7k\2\2\u031a\u031b") + buf.write("\7v\2\2\u031b\u031c\7e\2\2\u031c\u031d\7j\2\2\u031dz\3") + buf.write("\2\2\2\u031e\u031f\7v\2\2\u031f\u0320\7g\2\2\u0320\u0321") + buf.write("\7o\2\2\u0321\u0322\7r\2\2\u0322\u0323\7n\2\2\u0323\u0324") + buf.write("\7c\2\2\u0324\u0325\7v\2\2\u0325\u0326\7g\2\2\u0326|\3") + buf.write("\2\2\2\u0327\u0328\7v\2\2\u0328\u0329\7j\2\2\u0329\u032a") + buf.write("\7k\2\2\u032a\u032b\7u\2\2\u032b~\3\2\2\2\u032c\u032d") + buf.write("\7v\2\2\u032d\u032e\7j\2\2\u032e\u032f\7t\2\2\u032f\u0330") + buf.write("\7g\2\2\u0330\u0331\7c\2\2\u0331\u0332\7f\2\2\u0332\u0333") + buf.write("\7a\2\2\u0333\u0334\7n\2\2\u0334\u0335\7q\2\2\u0335\u0336") + buf.write("\7e\2\2\u0336\u0337\7c\2\2\u0337\u0338\7n\2\2\u0338\u0080") + buf.write("\3\2\2\2\u0339\u033a\7v\2\2\u033a\u033b\7j\2\2\u033b\u033c") + buf.write("\7t\2\2\u033c\u033d\7q\2\2\u033d\u033e\7y\2\2\u033e\u0082") + buf.write("\3\2\2\2\u033f\u0340\7v\2\2\u0340\u0341\7t\2\2\u0341\u0342") + buf.write("\7w\2\2\u0342\u0343\7g\2\2\u0343\u0084\3\2\2\2\u0344\u0345") + buf.write("\7v\2\2\u0345\u0346\7t\2\2\u0346\u0347\7{\2\2\u0347\u0086") + buf.write("\3\2\2\2\u0348\u0349\7v\2\2\u0349\u034a\7{\2\2\u034a\u034b") + buf.write("\7r\2\2\u034b\u034c\7g\2\2\u034c\u034d\7f\2\2\u034d\u034e") + buf.write("\7g\2\2\u034e\u034f\7h\2\2\u034f\u0088\3\2\2\2\u0350\u0351") + buf.write("\7v\2\2\u0351\u0352\7{\2\2\u0352\u0353\7r\2\2\u0353\u0354") + buf.write("\7g\2\2\u0354\u0355\7k\2\2\u0355\u0356\7f\2\2\u0356\u008a") + buf.write("\3\2\2\2\u0357\u0358\7v\2\2\u0358\u0359\7{\2\2\u0359\u035a") + buf.write("\7r\2\2\u035a\u035b\7g\2\2\u035b\u035c\7p\2\2\u035c\u035d") + buf.write("\7c\2\2\u035d\u035e\7o\2\2\u035e\u035f\7g\2\2\u035f\u008c") + buf.write("\3\2\2\2\u0360\u0361\7w\2\2\u0361\u0362\7p\2\2\u0362\u0363") + buf.write("\7k\2\2\u0363\u0364\7q\2\2\u0364\u0365\7p\2\2\u0365\u008e") + buf.write("\3\2\2\2\u0366\u0367\7w\2\2\u0367\u0368\7p\2\2\u0368\u0369") + buf.write("\7u\2\2\u0369\u036a\7k\2\2\u036a\u036b\7i\2\2\u036b\u036c") + buf.write("\7p\2\2\u036c\u036d\7g\2\2\u036d\u036e\7f\2\2\u036e\u0090") + buf.write("\3\2\2\2\u036f\u0370\7w\2\2\u0370\u0371\7u\2\2\u0371\u0372") + buf.write("\7k\2\2\u0372\u0373\7p\2\2\u0373\u0374\7i\2\2\u0374\u0092") + buf.write("\3\2\2\2\u0375\u0376\7x\2\2\u0376\u0377\7k\2\2\u0377\u0378") + buf.write("\7t\2\2\u0378\u0379\7v\2\2\u0379\u037a\7w\2\2\u037a\u037b") + buf.write("\7c\2\2\u037b\u037c\7n\2\2\u037c\u0094\3\2\2\2\u037d\u037e") + buf.write("\7x\2\2\u037e\u037f\7q\2\2\u037f\u0380\7k\2\2\u0380\u0381") + buf.write("\7f\2\2\u0381\u0096\3\2\2\2\u0382\u0383\7x\2\2\u0383\u0384") + buf.write("\7q\2\2\u0384\u0385\7n\2\2\u0385\u0386\7c\2\2\u0386\u0387") + buf.write("\7v\2\2\u0387\u0388\7k\2\2\u0388\u0389\7n\2\2\u0389\u038a") + buf.write("\7g\2\2\u038a\u0098\3\2\2\2\u038b\u038c\7y\2\2\u038c\u038d") + buf.write("\7e\2\2\u038d\u038e\7j\2\2\u038e\u038f\7c\2\2\u038f\u0390") + buf.write("\7t\2\2\u0390\u0391\7a\2\2\u0391\u0392\7v\2\2\u0392\u009a") + buf.write("\3\2\2\2\u0393\u0394\7y\2\2\u0394\u0395\7j\2\2\u0395\u0396") + buf.write("\7k\2\2\u0396\u0397\7n\2\2\u0397\u0398\7g\2\2\u0398\u009c") + buf.write("\3\2\2\2\u0399\u039a\7*\2\2\u039a\u009e\3\2\2\2\u039b") + buf.write("\u039c\7+\2\2\u039c\u00a0\3\2\2\2\u039d\u039e\7]\2\2\u039e") + buf.write("\u00a2\3\2\2\2\u039f\u03a0\7_\2\2\u03a0\u00a4\3\2\2\2") + buf.write("\u03a1\u03a2\7}\2\2\u03a2\u00a6\3\2\2\2\u03a3\u03a4\7") + buf.write("\177\2\2\u03a4\u00a8\3\2\2\2\u03a5\u03a6\7-\2\2\u03a6") + buf.write("\u00aa\3\2\2\2\u03a7\u03a8\7/\2\2\u03a8\u00ac\3\2\2\2") + buf.write("\u03a9\u03aa\7,\2\2\u03aa\u00ae\3\2\2\2\u03ab\u03ac\7") + buf.write("\61\2\2\u03ac\u00b0\3\2\2\2\u03ad\u03ae\7\'\2\2\u03ae") + buf.write("\u00b2\3\2\2\2\u03af\u03b0\7`\2\2\u03b0\u00b4\3\2\2\2") + buf.write("\u03b1\u03b2\7(\2\2\u03b2\u00b6\3\2\2\2\u03b3\u03b4\7") + buf.write("~\2\2\u03b4\u00b8\3\2\2\2\u03b5\u03b6\7\u0080\2\2\u03b6") + buf.write("\u00ba\3\2\2\2\u03b7\u03b8\7#\2\2\u03b8\u00bc\3\2\2\2") + buf.write("\u03b9\u03ba\7?\2\2\u03ba\u00be\3\2\2\2\u03bb\u03bc\7") + buf.write(">\2\2\u03bc\u00c0\3\2\2\2\u03bd\u03be\7@\2\2\u03be\u00c2") + buf.write("\3\2\2\2\u03bf\u03c0\7-\2\2\u03c0\u03c1\7?\2\2\u03c1\u00c4") + buf.write("\3\2\2\2\u03c2\u03c3\7/\2\2\u03c3\u03c4\7?\2\2\u03c4\u00c6") + buf.write("\3\2\2\2\u03c5\u03c6\7,\2\2\u03c6\u03c7\7?\2\2\u03c7\u00c8") + buf.write("\3\2\2\2\u03c8\u03c9\7\61\2\2\u03c9\u03ca\7?\2\2\u03ca") + buf.write("\u00ca\3\2\2\2\u03cb\u03cc\7\'\2\2\u03cc\u03cd\7?\2\2") + buf.write("\u03cd\u00cc\3\2\2\2\u03ce\u03cf\7`\2\2\u03cf\u03d0\7") + buf.write("?\2\2\u03d0\u00ce\3\2\2\2\u03d1\u03d2\7(\2\2\u03d2\u03d3") + buf.write("\7?\2\2\u03d3\u00d0\3\2\2\2\u03d4\u03d5\7~\2\2\u03d5\u03d6") + buf.write("\7?\2\2\u03d6\u00d2\3\2\2\2\u03d7\u03d8\7>\2\2\u03d8\u03d9") + buf.write("\7>\2\2\u03d9\u00d4\3\2\2\2\u03da\u03db\7>\2\2\u03db\u03dc") + buf.write("\7>\2\2\u03dc\u03dd\7?\2\2\u03dd\u00d6\3\2\2\2\u03de\u03df") + buf.write("\7?\2\2\u03df\u03e0\7?\2\2\u03e0\u00d8\3\2\2\2\u03e1\u03e2") + buf.write("\7#\2\2\u03e2\u03e3\7?\2\2\u03e3\u00da\3\2\2\2\u03e4\u03e5") + buf.write("\7>\2\2\u03e5\u03e6\7?\2\2\u03e6\u00dc\3\2\2\2\u03e7\u03e8") + buf.write("\7@\2\2\u03e8\u03e9\7?\2\2\u03e9\u00de\3\2\2\2\u03ea\u03eb") + buf.write("\7(\2\2\u03eb\u03ec\7(\2\2\u03ec\u00e0\3\2\2\2\u03ed\u03ee") + buf.write("\7~\2\2\u03ee\u03ef\7~\2\2\u03ef\u00e2\3\2\2\2\u03f0\u03f1") + buf.write("\7-\2\2\u03f1\u03f2\7-\2\2\u03f2\u00e4\3\2\2\2\u03f3\u03f4") + buf.write("\7/\2\2\u03f4\u03f5\7/\2\2\u03f5\u00e6\3\2\2\2\u03f6\u03f7") + buf.write("\7.\2\2\u03f7\u00e8\3\2\2\2\u03f8\u03f9\7/\2\2\u03f9\u03fa") + buf.write("\7@\2\2\u03fa\u03fb\7,\2\2\u03fb\u00ea\3\2\2\2\u03fc\u03fd") + buf.write("\7/\2\2\u03fd\u03fe\7@\2\2\u03fe\u00ec\3\2\2\2\u03ff\u0400") + buf.write("\7A\2\2\u0400\u00ee\3\2\2\2\u0401\u0402\7<\2\2\u0402\u00f0") + buf.write("\3\2\2\2\u0403\u0404\7<\2\2\u0404\u0405\7<\2\2\u0405\u00f2") + buf.write("\3\2\2\2\u0406\u0407\7=\2\2\u0407\u00f4\3\2\2\2\u0408") + buf.write("\u0409\7\60\2\2\u0409\u00f6\3\2\2\2\u040a\u040b\7\60\2") + buf.write("\2\u040b\u040c\7,\2\2\u040c\u00f8\3\2\2\2\u040d\u040e") + buf.write("\7\60\2\2\u040e\u040f\7\60\2\2\u040f\u0410\7\60\2\2\u0410") + buf.write("\u00fa\3\2\2\2\u0411\u0412\5\u0115\u008b\2\u0412\u0413") + buf.write("\5\u0115\u008b\2\u0413\u0414\5\u0115\u008b\2\u0414\u0415") + buf.write("\5\u0115\u008b\2\u0415\u00fc\3\2\2\2\u0416\u0417\7^\2") + buf.write("\2\u0417\u0418\7w\2\2\u0418\u0419\3\2\2\2\u0419\u0421") + buf.write("\5\u00fb~\2\u041a\u041b\7^\2\2\u041b\u041c\7W\2\2\u041c") + buf.write("\u041d\3\2\2\2\u041d\u041e\5\u00fb~\2\u041e\u041f\5\u00fb") + buf.write("~\2\u041f\u0421\3\2\2\2\u0420\u0416\3\2\2\2\u0420\u041a") + buf.write("\3\2\2\2\u0421\u00fe\3\2\2\2\u0422\u0427\5\u0101\u0081") + buf.write("\2\u0423\u0426\5\u0101\u0081\2\u0424\u0426\5\u0105\u0083") + buf.write("\2\u0425\u0423\3\2\2\2\u0425\u0424\3\2\2\2\u0426\u0429") + buf.write("\3\2\2\2\u0427\u0425\3\2\2\2\u0427\u0428\3\2\2\2\u0428") + buf.write("\u0100\3\2\2\2\u0429\u0427\3\2\2\2\u042a\u042d\5\u0103") + buf.write("\u0082\2\u042b\u042d\5\u00fd\177\2\u042c\u042a\3\2\2\2") + buf.write("\u042c\u042b\3\2\2\2\u042d\u0102\3\2\2\2\u042e\u042f\t") + buf.write("\3\2\2\u042f\u0104\3\2\2\2\u0430\u0431\t\4\2\2\u0431\u0106") + buf.write("\3\2\2\2\u0432\u0434\5\u0109\u0085\2\u0433\u0435\5\u0119") + buf.write("\u008d\2\u0434\u0433\3\2\2\2\u0434\u0435\3\2\2\2\u0435") + buf.write("\u0443\3\2\2\2\u0436\u0438\5\u010b\u0086\2\u0437\u0439") + buf.write("\5\u0119\u008d\2\u0438\u0437\3\2\2\2\u0438\u0439\3\2\2") + buf.write("\2\u0439\u0443\3\2\2\2\u043a\u043c\5\u010d\u0087\2\u043b") + buf.write("\u043d\5\u0119\u008d\2\u043c\u043b\3\2\2\2\u043c\u043d") + buf.write("\3\2\2\2\u043d\u0443\3\2\2\2\u043e\u0440\5\u010f\u0088") + buf.write("\2\u043f\u0441\5\u0119\u008d\2\u0440\u043f\3\2\2\2\u0440") + buf.write("\u0441\3\2\2\2\u0441\u0443\3\2\2\2\u0442\u0432\3\2\2\2") + buf.write("\u0442\u0436\3\2\2\2\u0442\u043a\3\2\2\2\u0442\u043e\3") + buf.write("\2\2\2\u0443\u0108\3\2\2\2\u0444\u044b\5\u0111\u0089\2") + buf.write("\u0445\u0447\7)\2\2\u0446\u0445\3\2\2\2\u0446\u0447\3") + buf.write("\2\2\2\u0447\u0448\3\2\2\2\u0448\u044a\5\u0105\u0083\2") + buf.write("\u0449\u0446\3\2\2\2\u044a\u044d\3\2\2\2\u044b\u0449\3") + buf.write("\2\2\2\u044b\u044c\3\2\2\2\u044c\u010a\3\2\2\2\u044d\u044b") + buf.write("\3\2\2\2\u044e\u0455\7\62\2\2\u044f\u0451\7)\2\2\u0450") + buf.write("\u044f\3\2\2\2\u0450\u0451\3\2\2\2\u0451\u0452\3\2\2\2") + buf.write("\u0452\u0454\5\u0113\u008a\2\u0453\u0450\3\2\2\2\u0454") + buf.write("\u0457\3\2\2\2\u0455\u0453\3\2\2\2\u0455\u0456\3\2\2\2") + buf.write("\u0456\u010c\3\2\2\2\u0457\u0455\3\2\2\2\u0458\u0459\7") + buf.write("\62\2\2\u0459\u045d\7z\2\2\u045a\u045b\7\62\2\2\u045b") + buf.write("\u045d\7Z\2\2\u045c\u0458\3\2\2\2\u045c\u045a\3\2\2\2") + buf.write("\u045d\u045e\3\2\2\2\u045e\u0465\5\u0115\u008b\2\u045f") + buf.write("\u0461\7)\2\2\u0460\u045f\3\2\2\2\u0460\u0461\3\2\2\2") + buf.write("\u0461\u0462\3\2\2\2\u0462\u0464\5\u0115\u008b\2\u0463") + buf.write("\u0460\3\2\2\2\u0464\u0467\3\2\2\2\u0465\u0463\3\2\2\2") + buf.write("\u0465\u0466\3\2\2\2\u0466\u010e\3\2\2\2\u0467\u0465\3") + buf.write("\2\2\2\u0468\u0469\7\62\2\2\u0469\u046d\7d\2\2\u046a\u046b") + buf.write("\7\62\2\2\u046b\u046d\7D\2\2\u046c\u0468\3\2\2\2\u046c") + buf.write("\u046a\3\2\2\2\u046d\u046e\3\2\2\2\u046e\u0475\5\u0117") + buf.write("\u008c\2\u046f\u0471\7)\2\2\u0470\u046f\3\2\2\2\u0470") + buf.write("\u0471\3\2\2\2\u0471\u0472\3\2\2\2\u0472\u0474\5\u0117") + buf.write("\u008c\2\u0473\u0470\3\2\2\2\u0474\u0477\3\2\2\2\u0475") + buf.write("\u0473\3\2\2\2\u0475\u0476\3\2\2\2\u0476\u0110\3\2\2\2") + buf.write("\u0477\u0475\3\2\2\2\u0478\u0479\t\5\2\2\u0479\u0112\3") + buf.write("\2\2\2\u047a\u047b\t\6\2\2\u047b\u0114\3\2\2\2\u047c\u047d") + buf.write("\t\7\2\2\u047d\u0116\3\2\2\2\u047e\u047f\t\b\2\2\u047f") + buf.write("\u0118\3\2\2\2\u0480\u0482\5\u011b\u008e\2\u0481\u0483") + buf.write("\5\u011d\u008f\2\u0482\u0481\3\2\2\2\u0482\u0483\3\2\2") + buf.write("\2\u0483\u0491\3\2\2\2\u0484\u0486\5\u011b\u008e\2\u0485") + buf.write("\u0487\5\u011f\u0090\2\u0486\u0485\3\2\2\2\u0486\u0487") + buf.write("\3\2\2\2\u0487\u0491\3\2\2\2\u0488\u048a\5\u011d\u008f") + buf.write("\2\u0489\u048b\5\u011b\u008e\2\u048a\u0489\3\2\2\2\u048a") + buf.write("\u048b\3\2\2\2\u048b\u0491\3\2\2\2\u048c\u048e\5\u011f") + buf.write("\u0090\2\u048d\u048f\5\u011b\u008e\2\u048e\u048d\3\2\2") + buf.write("\2\u048e\u048f\3\2\2\2\u048f\u0491\3\2\2\2\u0490\u0480") + buf.write("\3\2\2\2\u0490\u0484\3\2\2\2\u0490\u0488\3\2\2\2\u0490") + buf.write("\u048c\3\2\2\2\u0491\u011a\3\2\2\2\u0492\u0493\t\t\2\2") + buf.write("\u0493\u011c\3\2\2\2\u0494\u0495\t\n\2\2\u0495\u011e\3") + buf.write("\2\2\2\u0496\u0497\7n\2\2\u0497\u049b\7n\2\2\u0498\u0499") + buf.write("\7N\2\2\u0499\u049b\7N\2\2\u049a\u0496\3\2\2\2\u049a\u0498") + buf.write("\3\2\2\2\u049b\u0120\3\2\2\2\u049c\u049e\7)\2\2\u049d") + buf.write("\u049f\5\u0123\u0092\2\u049e\u049d\3\2\2\2\u049f\u04a0") + buf.write("\3\2\2\2\u04a0\u049e\3\2\2\2\u04a0\u04a1\3\2\2\2\u04a1") + buf.write("\u04a2\3\2\2\2\u04a2\u04a3\7)\2\2\u04a3\u04c0\3\2\2\2") + buf.write("\u04a4\u04a5\7w\2\2\u04a5\u04a7\7)\2\2\u04a6\u04a8\5\u0123") + buf.write("\u0092\2\u04a7\u04a6\3\2\2\2\u04a8\u04a9\3\2\2\2\u04a9") + buf.write("\u04a7\3\2\2\2\u04a9\u04aa\3\2\2\2\u04aa\u04ab\3\2\2\2") + buf.write("\u04ab\u04ac\7)\2\2\u04ac\u04c0\3\2\2\2\u04ad\u04ae\7") + buf.write("W\2\2\u04ae\u04b0\7)\2\2\u04af\u04b1\5\u0123\u0092\2\u04b0") + buf.write("\u04af\3\2\2\2\u04b1\u04b2\3\2\2\2\u04b2\u04b0\3\2\2\2") + buf.write("\u04b2\u04b3\3\2\2\2\u04b3\u04b4\3\2\2\2\u04b4\u04b5\7") + buf.write(")\2\2\u04b5\u04c0\3\2\2\2\u04b6\u04b7\7N\2\2\u04b7\u04b9") + buf.write("\7)\2\2\u04b8\u04ba\5\u0123\u0092\2\u04b9\u04b8\3\2\2") + buf.write("\2\u04ba\u04bb\3\2\2\2\u04bb\u04b9\3\2\2\2\u04bb\u04bc") + buf.write("\3\2\2\2\u04bc\u04bd\3\2\2\2\u04bd\u04be\7)\2\2\u04be") + buf.write("\u04c0\3\2\2\2\u04bf\u049c\3\2\2\2\u04bf\u04a4\3\2\2\2") + buf.write("\u04bf\u04ad\3\2\2\2\u04bf\u04b6\3\2\2\2\u04c0\u0122\3") + buf.write("\2\2\2\u04c1\u04c5\n\13\2\2\u04c2\u04c5\5\u0125\u0093") + buf.write("\2\u04c3\u04c5\5\u00fd\177\2\u04c4\u04c1\3\2\2\2\u04c4") + buf.write("\u04c2\3\2\2\2\u04c4\u04c3\3\2\2\2\u04c5\u0124\3\2\2\2") + buf.write("\u04c6\u04ca\5\u0127\u0094\2\u04c7\u04ca\5\u0129\u0095") + buf.write("\2\u04c8\u04ca\5\u012b\u0096\2\u04c9\u04c6\3\2\2\2\u04c9") + buf.write("\u04c7\3\2\2\2\u04c9\u04c8\3\2\2\2\u04ca\u0126\3\2\2\2") + buf.write("\u04cb\u04cc\7^\2\2\u04cc\u04e2\7)\2\2\u04cd\u04ce\7^") + buf.write("\2\2\u04ce\u04e2\7$\2\2\u04cf\u04d0\7^\2\2\u04d0\u04e2") + buf.write("\7A\2\2\u04d1\u04d2\7^\2\2\u04d2\u04e2\7^\2\2\u04d3\u04d4") + buf.write("\7^\2\2\u04d4\u04e2\7c\2\2\u04d5\u04d6\7^\2\2\u04d6\u04e2") + buf.write("\7d\2\2\u04d7\u04d8\7^\2\2\u04d8\u04e2\7h\2\2\u04d9\u04da") + buf.write("\7^\2\2\u04da\u04e2\7p\2\2\u04db\u04dc\7^\2\2\u04dc\u04e2") + buf.write("\7t\2\2\u04dd\u04de\7^\2\2\u04de\u04e2\7v\2\2\u04df\u04e0") + buf.write("\7^\2\2\u04e0\u04e2\7x\2\2\u04e1\u04cb\3\2\2\2\u04e1\u04cd") + buf.write("\3\2\2\2\u04e1\u04cf\3\2\2\2\u04e1\u04d1\3\2\2\2\u04e1") + buf.write("\u04d3\3\2\2\2\u04e1\u04d5\3\2\2\2\u04e1\u04d7\3\2\2\2") + buf.write("\u04e1\u04d9\3\2\2\2\u04e1\u04db\3\2\2\2\u04e1\u04dd\3") + buf.write("\2\2\2\u04e1\u04df\3\2\2\2\u04e2\u0128\3\2\2\2\u04e3\u04e4") + buf.write("\7^\2\2\u04e4\u04ef\5\u0113\u008a\2\u04e5\u04e6\7^\2\2") + buf.write("\u04e6\u04e7\5\u0113\u008a\2\u04e7\u04e8\5\u0113\u008a") + buf.write("\2\u04e8\u04ef\3\2\2\2\u04e9\u04ea\7^\2\2\u04ea\u04eb") + buf.write("\5\u0113\u008a\2\u04eb\u04ec\5\u0113\u008a\2\u04ec\u04ed") + buf.write("\5\u0113\u008a\2\u04ed\u04ef\3\2\2\2\u04ee\u04e3\3\2\2") + buf.write("\2\u04ee\u04e5\3\2\2\2\u04ee\u04e9\3\2\2\2\u04ef\u012a") + buf.write("\3\2\2\2\u04f0\u04f1\7^\2\2\u04f1\u04f2\7z\2\2\u04f2\u04f4") + buf.write("\3\2\2\2\u04f3\u04f5\5\u0115\u008b\2\u04f4\u04f3\3\2\2") + buf.write("\2\u04f5\u04f6\3\2\2\2\u04f6\u04f4\3\2\2\2\u04f6\u04f7") + buf.write("\3\2\2\2\u04f7\u012c\3\2\2\2\u04f8\u04fa\5\u012f\u0098") + buf.write("\2\u04f9\u04fb\5\u0131\u0099\2\u04fa\u04f9\3\2\2\2\u04fa") + buf.write("\u04fb\3\2\2\2\u04fb\u04fd\3\2\2\2\u04fc\u04fe\5\u0137") + buf.write("\u009c\2\u04fd\u04fc\3\2\2\2\u04fd\u04fe\3\2\2\2\u04fe") + buf.write("\u0505\3\2\2\2\u04ff\u0500\5\u0135\u009b\2\u0500\u0502") + buf.write("\5\u0131\u0099\2\u0501\u0503\5\u0137\u009c\2\u0502\u0501") + buf.write("\3\2\2\2\u0502\u0503\3\2\2\2\u0503\u0505\3\2\2\2\u0504") + buf.write("\u04f8\3\2\2\2\u0504\u04ff\3\2\2\2\u0505\u012e\3\2\2\2") + buf.write("\u0506\u0508\5\u0135\u009b\2\u0507\u0506\3\2\2\2\u0507") + buf.write("\u0508\3\2\2\2\u0508\u0509\3\2\2\2\u0509\u050a\7\60\2") + buf.write("\2\u050a\u050f\5\u0135\u009b\2\u050b\u050c\5\u0135\u009b") + buf.write("\2\u050c\u050d\7\60\2\2\u050d\u050f\3\2\2\2\u050e\u0507") + buf.write("\3\2\2\2\u050e\u050b\3\2\2\2\u050f\u0130\3\2\2\2\u0510") + buf.write("\u0512\7g\2\2\u0511\u0513\5\u0133\u009a\2\u0512\u0511") + buf.write("\3\2\2\2\u0512\u0513\3\2\2\2\u0513\u0514\3\2\2\2\u0514") + buf.write("\u051b\5\u0135\u009b\2\u0515\u0517\7G\2\2\u0516\u0518") + buf.write("\5\u0133\u009a\2\u0517\u0516\3\2\2\2\u0517\u0518\3\2\2") + buf.write("\2\u0518\u0519\3\2\2\2\u0519\u051b\5\u0135\u009b\2\u051a") + buf.write("\u0510\3\2\2\2\u051a\u0515\3\2\2\2\u051b\u0132\3\2\2\2") + buf.write("\u051c\u051d\t\f\2\2\u051d\u0134\3\2\2\2\u051e\u0525\5") + buf.write("\u0105\u0083\2\u051f\u0521\7)\2\2\u0520\u051f\3\2\2\2") + buf.write("\u0520\u0521\3\2\2\2\u0521\u0522\3\2\2\2\u0522\u0524\5") + buf.write("\u0105\u0083\2\u0523\u0520\3\2\2\2\u0524\u0527\3\2\2\2") + buf.write("\u0525\u0523\3\2\2\2\u0525\u0526\3\2\2\2\u0526\u0136\3") + buf.write("\2\2\2\u0527\u0525\3\2\2\2\u0528\u0529\t\r\2\2\u0529\u0138") + buf.write("\3\2\2\2\u052a\u052c\5\u013b\u009e\2\u052b\u052a\3\2\2") + buf.write("\2\u052b\u052c\3\2\2\2\u052c\u052d\3\2\2\2\u052d\u0531") + buf.write("\7$\2\2\u052e\u0530\5\u013d\u009f\2\u052f\u052e\3\2\2") + buf.write("\2\u0530\u0533\3\2\2\2\u0531\u052f\3\2\2\2\u0531\u0532") + buf.write("\3\2\2\2\u0532\u0534\3\2\2\2\u0533\u0531\3\2\2\2\u0534") + buf.write("\u053b\7$\2\2\u0535\u0537\5\u013b\u009e\2\u0536\u0535") + buf.write("\3\2\2\2\u0536\u0537\3\2\2\2\u0537\u0538\3\2\2\2\u0538") + buf.write("\u0539\7T\2\2\u0539\u053b\5\u013f\u00a0\2\u053a\u052b") + buf.write("\3\2\2\2\u053a\u0536\3\2\2\2\u053b\u013a\3\2\2\2\u053c") + buf.write("\u053d\7w\2\2\u053d\u0540\7:\2\2\u053e\u0540\t\16\2\2") + buf.write("\u053f\u053c\3\2\2\2\u053f\u053e\3\2\2\2\u0540\u013c\3") + buf.write("\2\2\2\u0541\u0545\n\17\2\2\u0542\u0545\5\u0125\u0093") + buf.write("\2\u0543\u0545\5\u00fd\177\2\u0544\u0541\3\2\2\2\u0544") + buf.write("\u0542\3\2\2\2\u0544\u0543\3\2\2\2\u0545\u013e\3\2\2\2") + buf.write("\u0546\u054a\7$\2\2\u0547\u0549\13\2\2\2\u0548\u0547\3") + buf.write("\2\2\2\u0549\u054c\3\2\2\2\u054a\u054b\3\2\2\2\u054a\u0548") + buf.write("\3\2\2\2\u054b\u054d\3\2\2\2\u054c\u054a\3\2\2\2\u054d") + buf.write("\u0551\7*\2\2\u054e\u0550\13\2\2\2\u054f\u054e\3\2\2\2") + buf.write("\u0550\u0553\3\2\2\2\u0551\u0552\3\2\2\2\u0551\u054f\3") + buf.write("\2\2\2\u0552\u0554\3\2\2\2\u0553\u0551\3\2\2\2\u0554\u0558") + buf.write("\7+\2\2\u0555\u0557\13\2\2\2\u0556\u0555\3\2\2\2\u0557") + buf.write("\u055a\3\2\2\2\u0558\u0559\3\2\2\2\u0558\u0556\3\2\2\2") + buf.write("\u0559\u055b\3\2\2\2\u055a\u0558\3\2\2\2\u055b\u055c\7") + buf.write("$\2\2\u055c\u0140\3\2\2\2\u055d\u055e\5\u0109\u0085\2") + buf.write("\u055e\u055f\5\u0149\u00a5\2\u055f\u056a\3\2\2\2\u0560") + buf.write("\u0561\5\u010b\u0086\2\u0561\u0562\5\u0149\u00a5\2\u0562") + buf.write("\u056a\3\2\2\2\u0563\u0564\5\u010d\u0087\2\u0564\u0565") + buf.write("\5\u0149\u00a5\2\u0565\u056a\3\2\2\2\u0566\u0567\5\u010f") + buf.write("\u0088\2\u0567\u0568\5\u0149\u00a5\2\u0568\u056a\3\2\2") + buf.write("\2\u0569\u055d\3\2\2\2\u0569\u0560\3\2\2\2\u0569\u0563") + buf.write("\3\2\2\2\u0569\u0566\3\2\2\2\u056a\u0142\3\2\2\2\u056b") + buf.write("\u056d\5\u012f\u0098\2\u056c\u056e\5\u0131\u0099\2\u056d") + buf.write("\u056c\3\2\2\2\u056d\u056e\3\2\2\2\u056e\u056f\3\2\2\2") + buf.write("\u056f\u0570\5\u0149\u00a5\2\u0570\u0576\3\2\2\2\u0571") + buf.write("\u0572\5\u0135\u009b\2\u0572\u0573\5\u0131\u0099\2\u0573") + buf.write("\u0574\5\u0149\u00a5\2\u0574\u0576\3\2\2\2\u0575\u056b") + buf.write("\3\2\2\2\u0575\u0571\3\2\2\2\u0576\u0144\3\2\2\2\u0577") + buf.write("\u0578\5\u0139\u009d\2\u0578\u0579\5\u0149\u00a5\2\u0579") + buf.write("\u0146\3\2\2\2\u057a\u057b\5\u0121\u0091\2\u057b\u057c") + buf.write("\5\u0149\u00a5\2\u057c\u0148\3\2\2\2\u057d\u057e\5\u00ff") + buf.write("\u0080\2\u057e\u014a\3\2\2\2\u057f\u0581\t\20\2\2\u0580") + buf.write("\u057f\3\2\2\2\u0581\u0582\3\2\2\2\u0582\u0580\3\2\2\2") + buf.write("\u0582\u0583\3\2\2\2\u0583\u0584\3\2\2\2\u0584\u0585\b") + buf.write("\u00a6\3\2\u0585\u014c\3\2\2\2\u0586\u0588\7\17\2\2\u0587") + buf.write("\u0589\7\f\2\2\u0588\u0587\3\2\2\2\u0588\u0589\3\2\2\2") + buf.write("\u0589\u058c\3\2\2\2\u058a\u058c\7\f\2\2\u058b\u0586\3") + buf.write("\2\2\2\u058b\u058a\3\2\2\2\u058c\u058d\3\2\2\2\u058d\u058e") + buf.write("\b\u00a7\3\2\u058e\u014e\3\2\2\2\u058f\u0590\7\61\2\2") + buf.write("\u0590\u0591\7,\2\2\u0591\u0595\3\2\2\2\u0592\u0594\13") + buf.write("\2\2\2\u0593\u0592\3\2\2\2\u0594\u0597\3\2\2\2\u0595\u0596") + buf.write("\3\2\2\2\u0595\u0593\3\2\2\2\u0596\u0598\3\2\2\2\u0597") + buf.write("\u0595\3\2\2\2\u0598\u0599\7,\2\2\u0599\u059a\7\61\2\2") + buf.write("\u059a\u059b\3\2\2\2\u059b\u059c\b\u00a8\3\2\u059c\u0150") + buf.write("\3\2\2\2\u059d\u059e\7\61\2\2\u059e\u059f\7\61\2\2\u059f") + buf.write("\u05a3\3\2\2\2\u05a0\u05a2\n\21\2\2\u05a1\u05a0\3\2\2") + buf.write("\2\u05a2\u05a5\3\2\2\2\u05a3\u05a1\3\2\2\2\u05a3\u05a4") + buf.write("\3\2\2\2\u05a4\u05a6\3\2\2\2\u05a5\u05a3\3\2\2\2\u05a6") + buf.write("\u05a7\b\u00a9\3\2\u05a7\u0152\3\2\2\2G\2\u0157\u015c") + buf.write("\u0161\u0166\u016e\u0420\u0425\u0427\u042c\u0434\u0438") + buf.write("\u043c\u0440\u0442\u0446\u044b\u0450\u0455\u045c\u0460") + buf.write("\u0465\u046c\u0470\u0475\u0482\u0486\u048a\u048e\u0490") + buf.write("\u049a\u04a0\u04a9\u04b2\u04bb\u04bf\u04c4\u04c9\u04e1") + buf.write("\u04ee\u04f6\u04fa\u04fd\u0502\u0504\u0507\u050e\u0512") + buf.write("\u0517\u051a\u0520\u0525\u052b\u0531\u0536\u053a\u053f") + buf.write("\u0544\u054a\u0551\u0558\u0569\u056d\u0575\u0582\u0588") + buf.write("\u058b\u0595\u05a3\4\2\3\2\b\2\2") + return buf.getvalue() + + +class CPP14Lexer(Lexer): + + atn = ATNDeserializer().deserialize(serializedATN()) + + decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] + + MultiLineMacro = 1 + Directive = 2 + Alignas = 3 + Alignof = 4 + Asm = 5 + Auto = 6 + Bool = 7 + Break = 8 + Case = 9 + Catch = 10 + Char = 11 + Char16 = 12 + Char32 = 13 + Class = 14 + Const = 15 + Constexpr = 16 + Const_cast = 17 + Continue = 18 + Decltype = 19 + Default = 20 + Delete = 21 + Do = 22 + Double = 23 + Dynamic_cast = 24 + Else = 25 + Enum = 26 + Explicit = 27 + Export = 28 + Extern = 29 + False_ = 30 + Final = 31 + Float = 32 + For = 33 + Friend = 34 + Goto = 35 + If = 36 + Inline = 37 + Int = 38 + Long = 39 + Mutable = 40 + Namespace = 41 + New = 42 + Noexcept = 43 + Nullptr = 44 + Operator = 45 + Override = 46 + Private = 47 + Protected = 48 + Public = 49 + Register = 50 + Reinterpret_cast = 51 + Return = 52 + Short = 53 + Signed = 54 + Sizeof = 55 + Static = 56 + Static_assert = 57 + Static_cast = 58 + Struct = 59 + Switch = 60 + Template = 61 + This = 62 + Thread_local = 63 + Throw = 64 + True_ = 65 + Try = 66 + Typedef = 67 + Typeid = 68 + Typename = 69 + Union = 70 + Unsigned = 71 + Using = 72 + Virtual = 73 + Void = 74 + Volatile = 75 + Wchar = 76 + While = 77 + LeftParen = 78 + RightParen = 79 + LeftBracket = 80 + RightBracket = 81 + LeftBrace = 82 + RightBrace = 83 + Plus = 84 + Minus = 85 + Star = 86 + Div = 87 + Mod = 88 + Caret = 89 + And = 90 + Or = 91 + Tilde = 92 + Not = 93 + Assign = 94 + Less = 95 + Greater = 96 + PlusAssign = 97 + MinusAssign = 98 + StarAssign = 99 + DivAssign = 100 + ModAssign = 101 + XorAssign = 102 + AndAssign = 103 + OrAssign = 104 + LeftShift = 105 + LeftShiftAssign = 106 + Equal = 107 + NotEqual = 108 + LessEqual = 109 + GreaterEqual = 110 + AndAnd = 111 + OrOr = 112 + PlusPlus = 113 + MinusMinus = 114 + Comma = 115 + ArrowStar = 116 + Arrow = 117 + Question = 118 + Colon = 119 + Doublecolon = 120 + Semi = 121 + Dot = 122 + DotStar = 123 + Ellipsis = 124 + Identifier = 125 + Integerliteral = 126 + Decimalliteral = 127 + Octalliteral = 128 + Hexadecimalliteral = 129 + Binaryliteral = 130 + Integersuffix = 131 + Characterliteral = 132 + Floatingliteral = 133 + Stringliteral = 134 + Userdefinedintegerliteral = 135 + Userdefinedfloatingliteral = 136 + Userdefinedstringliteral = 137 + Userdefinedcharacterliteral = 138 + Whitespace = 139 + Newline = 140 + BlockComment = 141 + LineComment = 142 + + channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ] + + modeNames = [ "DEFAULT_MODE" ] + + literalNames = [ "", + "'alignas'", "'alignof'", "'asm'", "'auto'", "'bool'", "'break'", + "'case'", "'catch'", "'char'", "'char16_t'", "'char32_t'", "'class'", + "'const'", "'constexpr'", "'const_cast'", "'continue'", "'decltype'", + "'default'", "'delete'", "'do'", "'double'", "'dynamic_cast'", + "'else'", "'enum'", "'explicit'", "'export'", "'extern'", "'false'", + "'final'", "'float'", "'for'", "'friend'", "'goto'", "'if'", + "'inline'", "'int'", "'long'", "'mutable'", "'namespace'", "'new'", + "'noexcept'", "'nullptr'", "'operator'", "'override'", "'private'", + "'protected'", "'public'", "'register'", "'reinterpret_cast'", + "'return'", "'short'", "'signed'", "'sizeof'", "'static'", "'static_assert'", + "'static_cast'", "'struct'", "'switch'", "'template'", "'this'", + "'thread_local'", "'throw'", "'true'", "'try'", "'typedef'", + "'typeid'", "'typename'", "'union'", "'unsigned'", "'using'", + "'virtual'", "'void'", "'volatile'", "'wchar_t'", "'while'", + "'('", "')'", "'['", "']'", "'{'", "'}'", "'+'", "'-'", "'*'", + "'/'", "'%'", "'^'", "'&'", "'|'", "'~'", "'!'", "'='", "'<'", + "'>'", "'+='", "'-='", "'*='", "'/='", "'%='", "'^='", "'&='", + "'|='", "'<<'", "'<<='", "'=='", "'!='", "'<='", "'>='", "'&&'", + "'||'", "'++'", "'--'", "','", "'->*'", "'->'", "'?'", "':'", + "'::'", "';'", "'.'", "'.*'", "'...'" ] + + symbolicNames = [ "", + "MultiLineMacro", "Directive", "Alignas", "Alignof", "Asm", + "Auto", "Bool", "Break", "Case", "Catch", "Char", "Char16", + "Char32", "Class", "Const", "Constexpr", "Const_cast", "Continue", + "Decltype", "Default", "Delete", "Do", "Double", "Dynamic_cast", + "Else", "Enum", "Explicit", "Export", "Extern", "False_", "Final", + "Float", "For", "Friend", "Goto", "If", "Inline", "Int", "Long", + "Mutable", "Namespace", "New", "Noexcept", "Nullptr", "Operator", + "Override", "Private", "Protected", "Public", "Register", "Reinterpret_cast", + "Return", "Short", "Signed", "Sizeof", "Static", "Static_assert", + "Static_cast", "Struct", "Switch", "Template", "This", "Thread_local", + "Throw", "True_", "Try", "Typedef", "Typeid", "Typename", "Union", + "Unsigned", "Using", "Virtual", "Void", "Volatile", "Wchar", + "While", "LeftParen", "RightParen", "LeftBracket", "RightBracket", + "LeftBrace", "RightBrace", "Plus", "Minus", "Star", "Div", "Mod", + "Caret", "And", "Or", "Tilde", "Not", "Assign", "Less", "Greater", + "PlusAssign", "MinusAssign", "StarAssign", "DivAssign", "ModAssign", + "XorAssign", "AndAssign", "OrAssign", "LeftShift", "LeftShiftAssign", + "Equal", "NotEqual", "LessEqual", "GreaterEqual", "AndAnd", + "OrOr", "PlusPlus", "MinusMinus", "Comma", "ArrowStar", "Arrow", + "Question", "Colon", "Doublecolon", "Semi", "Dot", "DotStar", + "Ellipsis", "Identifier", "Integerliteral", "Decimalliteral", + "Octalliteral", "Hexadecimalliteral", "Binaryliteral", "Integersuffix", + "Characterliteral", "Floatingliteral", "Stringliteral", "Userdefinedintegerliteral", + "Userdefinedfloatingliteral", "Userdefinedstringliteral", "Userdefinedcharacterliteral", + "Whitespace", "Newline", "BlockComment", "LineComment" ] + + ruleNames = [ "MultiLineMacro", "Directive", "Alignas", "Alignof", "Asm", + "Auto", "Bool", "Break", "Case", "Catch", "Char", "Char16", + "Char32", "Class", "Const", "Constexpr", "Const_cast", + "Continue", "Decltype", "Default", "Delete", "Do", "Double", + "Dynamic_cast", "Else", "Enum", "Explicit", "Export", + "Extern", "False_", "Final", "Float", "For", "Friend", + "Goto", "If", "Inline", "Int", "Long", "Mutable", "Namespace", + "New", "Noexcept", "Nullptr", "Operator", "Override", + "Private", "Protected", "Public", "Register", "Reinterpret_cast", + "Return", "Short", "Signed", "Sizeof", "Static", "Static_assert", + "Static_cast", "Struct", "Switch", "Template", "This", + "Thread_local", "Throw", "True_", "Try", "Typedef", "Typeid", + "Typename", "Union", "Unsigned", "Using", "Virtual", "Void", + "Volatile", "Wchar", "While", "LeftParen", "RightParen", + "LeftBracket", "RightBracket", "LeftBrace", "RightBrace", + "Plus", "Minus", "Star", "Div", "Mod", "Caret", "And", + "Or", "Tilde", "Not", "Assign", "Less", "Greater", "PlusAssign", + "MinusAssign", "StarAssign", "DivAssign", "ModAssign", + "XorAssign", "AndAssign", "OrAssign", "LeftShift", "LeftShiftAssign", + "Equal", "NotEqual", "LessEqual", "GreaterEqual", "AndAnd", + "OrOr", "PlusPlus", "MinusMinus", "Comma", "ArrowStar", + "Arrow", "Question", "Colon", "Doublecolon", "Semi", "Dot", + "DotStar", "Ellipsis", "Hexquad", "Universalcharactername", + "Identifier", "Identifiernondigit", "NONDIGIT", "DIGIT", + "Integerliteral", "Decimalliteral", "Octalliteral", "Hexadecimalliteral", + "Binaryliteral", "NONZERODIGIT", "OCTALDIGIT", "HEXADECIMALDIGIT", + "BINARYDIGIT", "Integersuffix", "Unsignedsuffix", "Longsuffix", + "Longlongsuffix", "Characterliteral", "Cchar", "Escapesequence", + "Simpleescapesequence", "Octalescapesequence", "Hexadecimalescapesequence", + "Floatingliteral", "Fractionalconstant", "Exponentpart", + "SIGN", "Digitsequence", "Floatingsuffix", "Stringliteral", + "Encodingprefix", "Schar", "Rawstring", "Userdefinedintegerliteral", + "Userdefinedfloatingliteral", "Userdefinedstringliteral", + "Userdefinedcharacterliteral", "Udsuffix", "Whitespace", + "Newline", "BlockComment", "LineComment" ] + + grammarFileName = "CPP14.g4" + + def __init__(self, input=None, output:TextIO = sys.stdout): + super().__init__(input, output) + self.checkVersion("4.7.1") + self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache()) + self._actions = None + self._predicates = None + + diff --git a/persper/analytics/lsp_graph_server/jsonrpcutils.py b/persper/analytics/lsp_graph_server/jsonrpcutils.py new file mode 100644 index 00000000000..d9735c132ce --- /dev/null +++ b/persper/analytics/lsp_graph_server/jsonrpcutils.py @@ -0,0 +1,39 @@ +from datetime import datetime +from jsonrpc.streams import JsonRpcStreamReader, JsonRpcStreamWriter + + +class JsonRpcLogger(): + def __init__(self, fileName): + self._fileName = fileName + self._file = open(fileName, "wt") + + def logTX(self, message: dict): + self._file.write("{0} < {1}\n".format(datetime.now(), message)) + + def logRX(self, message: dict): + self._file.write("{0} > {1}\n".format(datetime.now(), message)) + + def __exit__(self, exc_type, exc_value, traceback): + self._file.close() + + +class LoggedJsonRpcStreamReader(JsonRpcStreamReader): + def __init__(self, rfile, logger: JsonRpcLogger): + super().__init__(rfile) + self._logger = logger + + def listen(self, message_consumer): + def wrapper(message): + self._logger.logRX(message) + message_consumer(message) + super().listen(wrapper) + + +class LoggedJsonRpcStreamWriter(JsonRpcStreamWriter): + def __init__(self, wfile, logger: JsonRpcLogger, **json_dumps_args): + super().__init__(wfile, **json_dumps_args) + self._logger = logger + + def write(self, message): + self._logger.logTX(message) + super().write(message) diff --git a/persper/analytics/lsp_graph_server/languageclient/__init__.py b/persper/analytics/lsp_graph_server/languageclient/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/persper/analytics/lsp_graph_server/languageclient/lspclient.py b/persper/analytics/lsp_graph_server/languageclient/lspclient.py new file mode 100644 index 00000000000..e349a318217 --- /dev/null +++ b/persper/analytics/lsp_graph_server/languageclient/lspclient.py @@ -0,0 +1,73 @@ +""" +LSP client implementation. +""" +import logging +import threading + +from jsonrpc.dispatchers import MethodDispatcher +from jsonrpc.endpoint import Endpoint +from jsonrpc.streams import JsonRpcStreamReader, JsonRpcStreamWriter + +from languageclient.lspcontract import MessageType +from languageclient.lspserver import LspServerStub +from jsonrpcutils import LoggedJsonRpcStreamReader, LoggedJsonRpcStreamWriter, JsonRpcLogger + +_logger = logging.getLogger(__name__) + + +class LspClient(MethodDispatcher): + def __init__(self, rx, tx, logFile: str = None): + super().__init__() + self._rpclogger = JsonRpcLogger(logFile) if logFile else None + self._streamReader = LoggedJsonRpcStreamReader(rx, self._rpclogger) if logFile else JsonRpcStreamReader(rx) + self._streamWriter = LoggedJsonRpcStreamWriter(tx, self._rpclogger) if logFile else JsonRpcStreamWriter(tx) + self._nextJsonRpcMessageId = 0 + # Some language server, e.g. cquery, only supports numerical request Ids. + self._endpoint = Endpoint(self, self._streamWriter.write, self.nextJsonRpcMessageId) + self._listenerThread = None + self._shutdownEvent = threading.Event() + self._serverStub = LspServerStub(self._endpoint) + + def nextJsonRpcMessageId(self): + self._nextJsonRpcMessageId += 1 + if self._nextJsonRpcMessageId >= 0x7FFFFFFF: + self._nextJsonRpcMessageId = 0 + return str(self._nextJsonRpcMessageId) + + def start(self): + self._listenerThread = threading.Thread(target=self._startListener, daemon=True) + self._listenerThread.start() + + def stop(self): + self._endpoint.shutdown() + self._streamReader.close() + self._streamWriter.close() + self._shutdownEvent.set() + self._listenerThread.join(timeout=30) + + def initializeServer(self): + raise NotImplementedError() + + @property + def server(self): + return self._serverStub + + def _startListener(self): + self._streamReader.listen(self._endpoint.consume) + + def m_window__show_message(self, type: MessageType, message: str): + type = MessageType(type) + _logger.info(type, message) + + def m_window__show_message_request(self, type: MessageType, message: str, actions): + type = MessageType(type) + print(type, message, actions) + return actions[0]["title"] + + def m_window__log_message(self, type: MessageType, message: str): + type = MessageType(type) + _logger.info(type, message) + + def m_text_document__publish_diagnostics(self, uri: str, diagnostics): + # ignore all diagnostic information for now. + pass diff --git a/persper/analytics/lsp_graph_server/languageclient/lspcontract.py b/persper/analytics/lsp_graph_server/languageclient/lspcontract.py new file mode 100644 index 00000000000..3f3d9914c9f --- /dev/null +++ b/persper/analytics/lsp_graph_server/languageclient/lspcontract.py @@ -0,0 +1,304 @@ +import pathlib +from enum import Enum +from typing import Tuple, Union +import logging +import os + +_logger = logging.getLogger(__name__) + + +class MessageType(Enum): + Error = 1 + Warning = 2 + Info = 3 + Log = 4 + + +class SymbolKind(Enum): + Unknown = 0 + File = 1 + Module = 2 + Namespace = 3 + Package = 4 + Class = 5 + Method = 6 + Property = 7 + Field = 8 + Constructor = 9 + Enum = 10 + Interface = 11 + Function = 12 + Variable = 13 + Constant = 14 + String = 15 + Number = 16 + Boolean = 17 + Array = 18 + Object = 19 + Key = 20 + Null = 21 + EnumMember = 22 + Struct = 23 + Event = 24 + Operator = 25 + TypeParameter = 26 + + # cquery extensions + # See also https://github.com/Microsoft/language-server-protocol/issues/344 + # for new SymbolKind clang/Index/IndexSymbol.h clang::index::SymbolKind + TypeAlias = 252 + Parameter = 253 + StaticMethod = 254 + Macro = 255 + + +class CompletionItemKind(Enum): + Text = 1 + Method = 2 + Function = 3 + Constructor = 4 + Field = 5 + Variable = 6 + Class = 7 + Interface = 8 + Module = 9 + Property = 10 + Unit = 11 + Value = 12 + Enum = 13 + Keyword = 14 + Snippet = 15 + Color = 16 + File = 17 + Reference = 18 + Folder = 19 + EnumMember = 20 + Constant = 21 + Struct = 22 + Event = 23 + Operator = 24 + TypeParameter = 25 + + +class LspContractObject: + def __init__(self): + pass + + def __repr__(self): + return self.__str__() + + +class Position(LspContractObject): + """ + Line position in a document (zero-based). + """ + + def __init__(self, line: int, character: int): + self.line = line + self.character = character + + def __str__(self): + return str(self.line) + "," + str(self.character) + + def __eq__(self, other: "Position"): + return self.line == other.line and self.character == other.character + + def __ne__(self, other: "Position"): + return self.line != other.line or self.character != other.character + + def __le__(self, other: "Position"): + return self.line < other.line or self.line == other.line and self.character <= other.character + + def __lt__(self, other: "Position"): + return self.line < other.line or self.line == other.line and self.character < other.character + + def toTuple(self): + return (self.line, self.character) + + def toDict(self): + return {"line": self.line, "character": self.character} + + @staticmethod + def fromDict(d: dict): + return Position(d["line"], d["character"]) + + @staticmethod + def parse(expr: Union[Tuple[int, int], "Position"]): + if isinstance(expr, Position): + return expr + if isinstance(expr, (list, tuple)): + return Position(expr[0], expr[1]) + raise TypeError("Invalid expr type.") + + +class Range(LspContractObject): + """ + A range in a text document expressed as (zero-based) start and end positions. + """ + + def __init__(self, start: Position, end: Position): + self.start = start + self.end = end + + def __str__(self): + return str(self.start) + "-" + str(self.end) + + def toDict(self): + return {"start": self.start.toDict(), + "end": self.end.toDict()} + + @staticmethod + def fromDict(d: dict): + return Range(Position.fromDict(d["start"]), Position.fromDict(d["end"])) + + +class Location(LspContractObject): + """ + Represents a location inside a resource, such as a line inside a text file. + """ + + def __init__(self, uri: str, range: Range): + self.uri = uri + self.range = range + + def __str__(self): + return str(self.uri) + ":" + str(self.range) + + def toDict(self): + return {"uri": self.uri, "range": self.range.toDict()} + + @staticmethod + def fromDict(d: dict): + return Location(d["uri"], Range.fromDict(d["range"])) + + +class TextDocument(LspContractObject): + """ + An item to transfer a text document from the client to the server. + """ + + def __init__(self, uri: str, languageId: str, version: int, text: str): + self.uri = uri + self.languageId = languageId + self.version = version + self.text = text + + def __str__(self): + return str.format("{0};{1};[{2}]", self.uri, self.languageId, self.version) + + def toDict(self): + return {"uri": self.uri, "languageId": self.languageId, "version": self.version, "text": self.text} + + @staticmethod + def fromDict(d: dict): + return TextDocument(d["uri"], d["languageId"], d["version"], d["text"]) + + @staticmethod + def loadFile(fileName: str, languageId: str, version: int = 1): + content = None + try: + with open(fileName, "rt", encoding="utf-8", errors="replace") as file: + content = file.read() + return TextDocument(TextDocument.fileNameToUri(fileName), languageId, version, content) + except Exception as ex: + raise ValueError("Cannot load from {0}.".format(fileName)) from ex + + @staticmethod + def fileNameToUri(fileName: str): + return pathlib.Path(fileName).as_uri() + + +class DocumentSymbol(LspContractObject): + """ + Represents programming constructs like variables, classes, interfaces etc. that appear in a document. Document symbols can be + hierarchical and they have two ranges: one that encloses its definition and one that points to its most interesting range, + e.g. the range of an identifier. + """ + + def __init__(self, name: str, detail: str, kind: SymbolKind, deprecated: bool, range: Range, selectionRange: Range, children: list): + self.name = name + self.detail = detail + self.kind = kind + self.deprecated = deprecated + self.range = range + self.selectionRange = selectionRange + self.children = list(children) + + def getSymbolRange(self): + return self.selectionRange + + def __str__(self): + return self.name + "[" + self.kind + "]" + + def toDict(self): + raise NotImplementedError() + + @staticmethod + def fromDict(d: dict): + children = () + if d.get("children"): + children = (DocumentSymbol.fromDict(cd) for cd in d["children"]) + return DocumentSymbol(d["name"], d.get("detail"), SymbolKind(d["kind"] if d["kind"] else 0), + d.get("deprecated"), Range.fromDict(d["range"]), Range.fromDict(d["selectionRange"]), + children) + + +class SymbolInformation(LspContractObject): + """ + Represents information about programming constructs like variables, classes, + interfaces etc. + """ + + def __init__(self, name: str, kind: SymbolKind, deprecated: bool, location: Location, containerName: str): + self.name = name + self.kind = kind + self.deprecated = deprecated + self.location = location + self.containerName = containerName + + def getSymbolRange(self): + return self.location.range + + def __str__(self): + return self.name + "[" + self.kind + "]" + + def toDict(self): + raise NotImplementedError() + + @staticmethod + def fromDict(d: dict): + try: + return SymbolInformation(d["name"], SymbolKind(d["kind"]) if d["kind"] else None, + d.get("deprecated"), Location.fromDict(d["location"]), + d.get("containerName")) + except Exception as ex: + raise ValueError("Invalid input: {0}.".format(d)) from ex + + +class TextDocumentContentChangeEvent(LspContractObject): + """ + An event describing a change to a text document. If range and rangeLength are omitted + the new text is considered to be the full content of the document. + """ + + def __init__(self, text: str, range: Range = None, rangeLength: int = None): + self.text = text + self.range = range + self.rangeLength = rangeLength + + def toDict(self): + d = {"text": self.text} + if self.range is not None: + d["range"] = self.range + if self.rangeLength is not None: + d["rangeLength"] = self.rangeLength + return d + + +class TextDocumentSaveReason(Enum): + """ + Represents reasons why a text document is saved. + """ + Manual = 1 + AfterDelay = 2 + FocusOut = 3 diff --git a/persper/analytics/lsp_graph_server/languageclient/lspserver.py b/persper/analytics/lsp_graph_server/languageclient/lspserver.py new file mode 100644 index 00000000000..e74816f317c --- /dev/null +++ b/persper/analytics/lsp_graph_server/languageclient/lspserver.py @@ -0,0 +1,202 @@ +""" +LSP server contracts. +""" +import asyncio +import os +from collections.abc import Iterable +from pathlib import Path +from typing import Iterable, List, Tuple, Union + +from jsonrpc.endpoint import Endpoint + +from .lspcontract import (DocumentSymbol, Location, Position, + SymbolInformation, TextDocument, + TextDocumentContentChangeEvent, + TextDocumentSaveReason) + +DEFAULT_CAPABILITIES = { + "workspace": { + "applyEdit": False, + "workspaceEdit": { + "documentChanges": True + }, + "didChangeConfiguration": { + "dynamicRegistration": True + }, + "didChangeWatchedFiles": { + "dynamicRegistration": True + }, + "symbol": { + "dynamicRegistration": True, + "symbolKind": { + "valueSet": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26] + } + }, + "executeCommand": { + "dynamicRegistration": True + }, + "configuration": True, + "workspaceFolders": False + }, + "textDocument": { + "publishDiagnostics": None, + "synchronization": { + "dynamicRegistration": True, + "willSave": True, + "willSaveWaitUntil": True, + "didSave": True + }, + "completion": { + "dynamicRegistration": True, + "contextSupport": True, + "completionItem": { + "snippetSupport": True, + "commitCharactersSupport": True, + "documentationFormat": ["markdown", "plaintext"] + }, + "completionItemKind": { + "valueSet": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25] + } + }, + "hover": { + "dynamicRegistration": True, + "contentFormat": ["markdown", "plaintext"] + }, + "signatureHelp": { + "dynamicRegistration": True, + "signatureInformation": { + "documentationFormat": ["markdown", "plaintext"] + } + }, + "definition": { + "dynamicRegistration": True + }, + "references": { + "dynamicRegistration": True + }, + "documentHighlight": { + "dynamicRegistration": True + }, + "documentSymbol": { + "dynamicRegistration": True, + "symbolKind": { + "valueSet": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26] + }, + "hierarchicalDocumentSymbolSupport": True + }, + "codeAction": { + "dynamicRegistration": True + }, + "codeLens": { + "dynamicRegistration": True + }, + "formatting": { + "dynamicRegistration": True + }, + "rangeFormatting": { + "dynamicRegistration": True + }, + "onTypeFormatting": { + "dynamicRegistration": True + }, + "rename": { + "dynamicRegistration": True + }, + "documentLink": { + "dynamicRegistration": True + }, + "typeDefinition": { + "dynamicRegistration": True + }, + "implementation": { + "dynamicRegistration": True + }, + "colorProvider": { + "dynamicRegistration": True + } + }} + + +class LspServerStub(): + def __init__(self, endpoint: Endpoint): + if not isinstance(endpoint, Endpoint): + raise TypeError("Expect Endpoint instance.") + self._endpoint = endpoint + + def request(self, method, params=None): + return asyncio.wrap_future(self._endpoint.request(method, params)) + + def notify(self, method, params=None): + self._endpoint.notify(method, params) + + def initialize(self, processId=None, rootFolder=None, initializationOptions=None, capabilities=None): + if processId is None: + processId = os.getpid() + cap = DEFAULT_CAPABILITIES.copy() + if capabilities: + cap.update(capabilities) + rootUri = Path(rootFolder).as_uri() + return self.request("initialize", { + "processId": processId, + "rootUri": rootUri, + "initializationOptions": initializationOptions, + "capabilities": cap + }) + + def initialized(self): + self.notify("initialized") + + def shutdown(self): + return self.request("shutdown") + + def exit(self): + self.notify("exit") + + def textDocumentDidOpen(self, document: TextDocument): + self.notify("textDocument/didOpen", {"textDocument": document.toDict()}) + + def textDocumentDidClose(self, documentUri: str): + self.notify("textDocument/didClose", {"textDocument": {"uri": documentUri}}) + + async def textDocumentGotoDefinition(self, documentUri: str, position: Union[Tuple[int, int], Position]): + result = await self.request( + "textDocument/definition", + { + "textDocument": {"uri": documentUri}, + "position": Position.parse(position).toDict() + } + ) + if not result: + return [] + if isinstance(result, Iterable): + return [Location.fromDict(r) for r in result] + return [Location.fromDict(result)] + + async def textDocumentGetSymbols(self, documentUri: str) -> List[DocumentSymbol]: + result = await self.request("textDocument/documentSymbol", {"textDocument": {"uri": documentUri}}) + if not result: + return [] + + def fromDict(d: dict): + if "location" in d: + return SymbolInformation.fromDict(d) + return DocumentSymbol.fromDict(d) + + return [fromDict(d) for d in result] + + def textDocumentDidChange(self, documentUri: str, documentVersion: int, contentChanges: Iterable[TextDocumentContentChangeEvent]): + self.notify("textDocument/didChange", {"textDocument": {"uri": documentUri, "version": documentVersion}, + "contentChanges": [c.toDict() for c in contentChanges]}) + + def textDocumentWillSave(self, documentUri: str, reason: TextDocumentSaveReason = TextDocumentSaveReason.Manual): + self.notify("textDocument/willSave", {"textDocument": {"uri": documentUri}, + "reason": reason.value}) + + def textDocumentDidSave(self, documentUri: str, text: str = None): + self.notify("textDocument/didSave", {"textDocument": {"uri": documentUri}, + "text": text}) + + async def textDocumentCodeLens(self, documentUri: str): + result = await self.request("textDocument/codeLens", {"textDocument": {"uri": documentUri}}) + # We call this method only to synchronize the time sequence + return result diff --git a/persper/analytics/lsp_graph_server/main.py b/persper/analytics/lsp_graph_server/main.py new file mode 100644 index 00000000000..5e05e2cf00a --- /dev/null +++ b/persper/analytics/lsp_graph_server/main.py @@ -0,0 +1,86 @@ +import asyncio +import logging +import subprocess +from os import path, sys + +from callgraph.manager import CallGraphManager +from ccls import CclsCallGraphBuilder, CclsLspClient + +# Thus you need to place cquery in rootfolder/bin/cquery, and execute ./src/main.py in root folder. +LANGUAGE_SERVER_COMMAND = "./bin/cquery --record cquerystd --log-file cquery.log --ci" # --log-all-to-stderr" +LANGUAGE_SERVER_COMMAND = "./bin/ccls -log-file=ccls.log" +SOURCE_ROOT = "./demoroot/cpp/" +CACHE_ROOT = "./demoroot/cache/" +# SOURCE_ROOT = "./demoroot/cpp-simple/" +ENTRYPOINT_PATTERN = path.join(SOURCE_ROOT, "./Eigen/src/Cholesky/*.h") +JSON_RPC_DUMP_PATH = "rpctrace.txt" + +logging.basicConfig(format='%(asctime)s %(levelname)-8s [%(name)s] %(message)s', + level=logging.INFO) +logging.getLogger('asyncio').setLevel(logging.CRITICAL) + + +async def main(): + logger = logging.getLogger() + # an simple approach to let user change entrypoint file from commandline arguments + global ENTRYPOINT_PATTERN, SOURCE_ROOT + if (len(sys.argv) == 2): + ENTRYPOINT_PATTERN = sys.argv[1] + elif (len(sys.argv) == 3): + SOURCE_ROOT, ENTRYPOINT_PATTERN = sys.argv[1], sys.argv[2] + with subprocess.Popen(LANGUAGE_SERVER_COMMAND, stdin=subprocess.PIPE, stdout=subprocess.PIPE, + creationflags=subprocess.CREATE_NEW_CONSOLE) as serverProc: + try: + logger.info("Started language server with PID: %d.", serverProc.pid) + client = CclsLspClient(serverProc.stdout, serverProc.stdin, JSON_RPC_DUMP_PATH) + client.start() + logger.info(await client.server.initialize( + rootFolder=path.abspath(SOURCE_ROOT), + initializationOptions={"cacheDirectory": path.abspath(CACHE_ROOT), + "diagnostics": {"onParse": False, "onType": False}, + "discoverSystemIncludes": True, + "enableCacheRead": True, + "enableCacheWrite": True, + "progressReportFrequencyMs": 500, + "clang": { + "excludeArgs": [], + "extraArgs": ["-nocudalib"], + "pathMappings": [], + "resourceDir": "" + } + })) + client.server.initialized() + + builder = CclsCallGraphBuilder(client) + builder.workspaceFilePatterns = [path.abspath(path.join(SOURCE_ROOT, "/**/*"))] + manager = CallGraphManager(builder) + await manager.buildGraph(ENTRYPOINT_PATTERN) + manager.graph.dumpTo("graph.txt") + + testFile = path.join(SOURCE_ROOT, "Utility.h") + if path.exists(testFile): + for i in range(2): + logger.info("Now we are replacing Utility.h with a single line of content.") + oldContent = builder.modifyFile(testFile, "// Removed file content.") + await manager.rebuildGraph([testFile]) + manager.graph.dumpTo("graph1.txt") + logger.info("Now we are restoring Utility.h.") + builder.modifyFile(testFile, oldContent) + await manager.rebuildGraph([testFile]) + manager.graph.dumpTo("graph2.txt") + + logger.info("Shutting down language server...") + await asyncio.wait_for(client.server.shutdown(), 10) + client.server.exit() + logger.info("Language server exited with code: %s.", serverProc.wait(10)) + finally: + if serverProc.returncode is None: + # kill server process to avoid infinite wait in Popen.__exit__ + serverProc.kill() + logger.warning("Killed language server.") + + +if __name__ == '__main__': + loop = asyncio.new_event_loop() + loop.set_debug(True) + loop.run_until_complete(main()) diff --git a/persper/analytics/lsp_graph_server/wildcards.py b/persper/analytics/lsp_graph_server/wildcards.py new file mode 100644 index 00000000000..542d0a76d67 --- /dev/null +++ b/persper/analytics/lsp_graph_server/wildcards.py @@ -0,0 +1,47 @@ +import os +import re + + +def translate(pat): + """ + Translate a shell PATTERN to a regular expression. + There is no way to quote meta-characters. + This version can handle **/ pattern properly, compared with fnmatch. + """ + + i, n = 0, len(pat) + res = '' + while i < n: + c = pat[i] + i = i + 1 + if c == '*': + if i < n and pat[i] == '*': + res = res + '.*?' + i = i + 1 + if i < n and pat[i] == os.sep: + i = i + 1 + else: + res = res + '[^\/]+' + elif c == '?': + res = res + '.' + elif c == '[': + j = i + if j < n and pat[j] == '!': + j = j + 1 + if j < n and pat[j] == ']': + j = j + 1 + while j < n and pat[j] != ']': + j = j + 1 + if j >= n: + res = res + '\\[' + else: + stuff = pat[i:j].replace('\\', '\\\\') + i = j + 1 + if stuff[0] == '!': + stuff = '^' + stuff[1:] + elif stuff[0] == '^': + stuff = '\\' + stuff + res = '%s[%s]' % (res, stuff) + else: + res = res + re.escape(c) + return res + '$(?ms)' From 738e32fea4de51badc159fa1ff4a81c48e04a386 Mon Sep 17 00:00:00 2001 From: xinyan Date: Tue, 27 Nov 2018 00:14:00 +0800 Subject: [PATCH 02/84] Fix imports and dependency. --- Pipfile | 3 + Pipfile.lock | 328 +++++++++--------- .../analytics/lsp_graph_server/__init__.py | 27 ++ .../lsp_graph_server/callgraph/__init__.py | 7 +- .../lsp_graph_server/callgraph/builder.py | 10 +- persper/analytics/lsp_graph_server/ccls.py | 13 +- persper/analytics/lsp_graph_server/cquery.py | 10 +- .../languageclient/lspclient.py | 6 +- persper/analytics/lsp_graph_server/setup.cfg | 2 + 9 files changed, 227 insertions(+), 179 deletions(-) create mode 100644 persper/analytics/lsp_graph_server/__init__.py create mode 100644 persper/analytics/lsp_graph_server/setup.cfg diff --git a/Pipfile b/Pipfile index 98efd4b5ae2..5fda71a5d88 100644 --- a/Pipfile +++ b/Pipfile @@ -17,6 +17,9 @@ pytest = "*" pydot = "*" ipykernel = "*" matplotlib = "*" +antlr4-python3-runtime = "*" +python-jsonrpc-server = "*" +jsonpickle = "*" [dev-packages] diff --git a/Pipfile.lock b/Pipfile.lock index a1ed5eb9a33..b672acae202 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "c903c65267c881c1458b0a6878c64ac1f9a50e46711ae1bcfadc28f77242e056" + "sha256": "d04772a318aa545b5bfd2b3281d2433c3b6fb3d9e626ee57ba5c45540f45db16" }, "pipfile-spec": 6, "requires": { @@ -16,20 +16,18 @@ ] }, "default": { - "appnope": { + "antlr4-python3-runtime": { "hashes": [ - "sha256:5b26757dc6f79a3b7dc9fab95359328d5747fcb2409d331ea66d0272b90ab2a0", - "sha256:8b995ffe925347a2138d7ac0fe77155e4311a0ea6d6da4f5128fe4b3cbe5ed71" + "sha256:1b26b72c4492cef310542da10bf6b2ab4aa1775618fc6003f75b55ae9eaa3fd3" ], - "markers": "sys_platform == 'darwin'", - "version": "==0.1.0" + "index": "pypi", + "version": "==4.7.1" }, "atomicwrites": { "hashes": [ "sha256:0312ad34fcad8fac3704d441f7b317e50af620823353ec657a53e981f92920c0", "sha256:ec9ae8adaae229e4f8446952d204a3e4b5fdd2d099f9be3aaf556120135fb3ee" ], - "markers": "python_version != '3.2.*' and python_version >= '2.7' and python_version != '3.3.*' and python_version != '3.1.*' and python_version != '3.0.*'", "version": "==1.2.1" }, "attrs": { @@ -48,10 +46,10 @@ }, "certifi": { "hashes": [ - "sha256:376690d6f16d32f9d1fe8932551d80b23e9d393a8578c5633a2ed39a64861638", - "sha256:456048c7e371c089d0a77a5212fb37a2c2dce1e24146e3b7e0261736aaeaa22a" + "sha256:339dc09518b07e2fa7eda5450740925974815557727d6bd35d319c1524a04a4c", + "sha256:6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a" ], - "version": "==2018.8.24" + "version": "==2018.10.15" }, "chardet": { "hashes": [ @@ -60,6 +58,14 @@ ], "version": "==3.0.4" }, + "colorama": { + "hashes": [ + "sha256:05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d", + "sha256:f8ac84de7840f5b9c4e3347b3c1eaa50f7e49c2b07596221daec5edaabbd7c48" + ], + "markers": "sys_platform == 'win32'", + "version": "==0.4.1" + }, "cycler": { "hashes": [ "sha256:1d8a5ae1ff6c5cf9b93e8811e581232ad8920aeec647c37316ceac982b08cb2d", @@ -80,12 +86,18 @@ ], "version": "==1.0.1" }, + "future": { + "hashes": [ + "sha256:67045236dcfd6816dc439556d009594abf643e5eb48992e36beac09c2ca659b8" + ], + "version": "==0.17.1" + }, "gitdb2": { "hashes": [ - "sha256:87783b7f4a8f6b71c7fe81d32179b3c8781c1a7d6fa0c69bff2f315b00aff4f8", - "sha256:bb4c85b8a58531c51373c89f92163b92f30f81369605a67cd52d1fc21246c044" + "sha256:83361131a1836661a155172932a13c08bda2db3674e4caa32368aa6eb02f38c2", + "sha256:e3a0141c5f2a3f635c7209d56c496ebe1ad35da82fe4d3ec4aaa36278d70648a" ], - "version": "==2.0.4" + "version": "==2.0.5" }, "gitpython": { "hashes": [ @@ -104,18 +116,18 @@ }, "ipykernel": { "hashes": [ - "sha256:3e0ffdf545c0bf80d9dab6523ec6829831408c474772487aeb6eb9f0348b6a1e", - "sha256:7cd5e90bc882c13f9c5e76330cb5242280e293cbe9f1a622508762124a103a82" + "sha256:0aeb7ec277ac42cc2b59ae3d08b10909b2ec161dc6908096210527162b53675d", + "sha256:0fc0bf97920d454102168ec2008620066878848fcfca06c22b669696212e292f" ], "index": "pypi", - "version": "==5.0.0" + "version": "==5.1.0" }, "ipython": { "hashes": [ - "sha256:47b17ea874454a5c2eacc2732b04a750d260b01ba479323155ac8a39031f5535", - "sha256:9fed506c3772c875a3048bc134a25e6f5e997b1569b2636f6a5d891f34cbfd46" + "sha256:a5781d6934a3341a1f9acb4ea5acdc7ea0a0855e689dbe755d070ca51e995435", + "sha256:b10a7ddd03657c761fc503495bc36471c8158e3fc948573fb9fe82a7029d8efd" ], - "version": "==7.0.1" + "version": "==7.1.1" }, "ipython-genutils": { "hashes": [ @@ -138,6 +150,15 @@ ], "version": "==0.13.1" }, + "jsonpickle": { + "hashes": [ + "sha256:8b6212f1155f43ce67fa945efae6d010ed059f3ca5ed377aa070e5903d45b722", + "sha256:d43ede55b3d9b5524a8e11566ea0b11c9c8109116ef6a509a1b619d2041e7397", + "sha256:ed4adf0d14564c56023862eabfac211cf01211a20c5271896c8ab6f80c68086c" + ], + "index": "pypi", + "version": "==1.0" + }, "jupyter-client": { "hashes": [ "sha256:27befcf0446b01e29853014d6a902dd101ad7d7f94e2252b1adca17c3466b761", @@ -183,7 +204,6 @@ "sha256:efabbcd4f406b532206b8801058c8bab9e79645b9880329253ae3322b7b02cd5", "sha256:f923406e6b32c86309261b8195e24e18b6a8801df0cfc7814ac44017bfcb3939" ], - "markers": "python_version >= '2.7' and python_version != '3.0.*' and python_version != '3.2.*' and python_version != '3.3.*' and python_version != '3.1.*'", "version": "==1.0.1" }, "lxml": { @@ -224,22 +244,22 @@ }, "matplotlib": { "hashes": [ - "sha256:0f738b57051e8a0f8bc8282031d0a82e9dedbd10a94fd54d4c3830d708607a8b", - "sha256:0f87d188528ff3c86286603bc13170a5932e631c0c69d9995aae86448a7d9692", - "sha256:290864f3c69d1e71d6648c9c75093db28486f1bf058b0ab2fda9d2d6814ddf19", - "sha256:70aba3a1c7dbef5a997db8afe06e256c6b67e1dc15bb16d8b55d140ea5375a8d", - "sha256:7318d11a4784c3e37f5de0c9141d18eac08565d303da7d3d557662369f2f866b", - "sha256:77c6edc4e25b36430df8a445195030abc8d5766d068b9aeed1a58a684cc0eb3b", - "sha256:91669d38938ae7b66db084e444ee5dceed09b59a6622fda10dfb021d5ce6d0dc", - "sha256:a547edc4d0ce68f3eb397ed8701314f254a0de593045ee0eecad4f1efc664951", - "sha256:b4e2333c98a7c2c1ff6eb930cd2b57d4b818de5437c5048802096b32f66e65f9", - "sha256:c99b3908e76de5d1582e6941dc34de086eb38d18539520f4ae4ffa29b8f2644f", - "sha256:e3acc990b3672132a670b23cc055b967d0aa04183dbc5be82a38a0426ee6d1a6", - "sha256:eaa8f8248c20eacfade26faf749e248adc1bec1edc2d08b05916297cc76a72bd", - "sha256:ede6d9676c43844e4994b041ffca08dd157ce171190a8ccb40fed9b377db5653" + "sha256:16aa61846efddf91df623bbb4598e63be1068a6b6a2e6361cc802b41c7a286eb", + "sha256:1975b71a33ac986bb39b6d5cfbc15c7b1f218f1134efb4eb3881839d6ae69984", + "sha256:2b222744bd54781e6cc0b717fa35a54e5f176ba2ced337f27c5b435b334ef854", + "sha256:317643c0e88fad55414347216362b2e229c130edd5655fea5f8159a803098468", + "sha256:4269ce3d1b897d46fc3cc2273a0cc2a730345bb47e4456af662e6fca85c89dd7", + "sha256:65214fd668975077cdf8d408ccf2b2d6bdf73b4e6895a79f8e99ce4f0b43fcdb", + "sha256:74bc213ab8a92d86a0b304d9359d1e1d14168d4c6121b83862c9d8a88b89a738", + "sha256:88949be0db54755995dfb0210d0099a8712a3c696c860441971354c3debfc4af", + "sha256:8e1223d868be89423ec95ada5f37aa408ee64fe76ccb8e4d5f533699ba4c0e4a", + "sha256:9fa00f2d7a552a95fa6016e498fdeb6d74df537853dda79a9055c53dfc8b6e1a", + "sha256:c27fd46cab905097ba4bc28d5ba5289930f313fb1970c9d41092c9975b80e9b4", + "sha256:c94b792af431f6adb6859eb218137acd9a35f4f7442cea57e4a59c54751c36af", + "sha256:f4c12a01eb2dc16693887a874ba948b18c92f425c4d329639ece6d3bb8e631bb" ], "index": "pypi", - "version": "==3.0.0" + "version": "==3.0.2" }, "more-itertools": { "hashes": [ @@ -258,51 +278,51 @@ }, "nltk": { "hashes": [ - "sha256:fe0eda251be65843be86d7de9abfbf7161732256f742e623b21243ec47bdb718" + "sha256:286f6797204ffdb52525a1d21ec0a221ec68b8e3fa4f2d25f412ac8e63c70e8d" ], "index": "pypi", - "version": "==3.3.0" + "version": "==3.4" }, "numpy": { "hashes": [ - "sha256:1b1cf8f7300cf7b11ddb4250b3898c711a6187df05341b5b7153db23ffe5d498", - "sha256:27a0d018f608a3fe34ac5e2b876f4c23c47e38295c47dd0775cc294cd2614bc1", - "sha256:3fde172e28c899580d32dc21cb6d4a1225d62362f61050b654545c662eac215a", - "sha256:497d7c86df4f85eb03b7f58a7dd0f8b948b1f582e77629341f624ba301b4d204", - "sha256:4e28e66cf80c09a628ae680efeb0aa9a066eb4bb7db2a5669024c5b034891576", - "sha256:58be95faf0ca2d886b5b337e7cba2923e3ad1224b806a91223ea39f1e0c77d03", - "sha256:5b4dfb6551eaeaf532054e2c6ef4b19c449c2e3a709ebdde6392acb1372ecabc", - "sha256:63f833a7c622e9082df3cbaf03b4fd92d7e0c11e2f9d87cb57dbf0e84441964b", - "sha256:71bf3b7ca15b1967bba3a1ef6a8e87286382a8b5e46ac76b42a02fe787c5237d", - "sha256:733dc5d47e71236263837825b69c975bc08728ae638452b34aeb1d6fa347b780", - "sha256:82f00a1e2695a0e5b89879aa25ea614530b8ebdca6d49d4834843d498e8a5e92", - "sha256:866bf72b9c3bfabe4476d866c70ee1714ad3e2f7b7048bb934892335e7b6b1f7", - "sha256:8aeac8b08f4b8c52129518efcd93706bb6d506ccd17830b67d18d0227cf32d9e", - "sha256:8d2cfb0aef7ec8759736cce26946efa084cdf49797712333539ef7d135e0295e", - "sha256:981224224bbf44d95278eb37996162e8beb6f144d2719b144e86dfe2fce6c510", - "sha256:981daff58fa3985a26daa4faa2b726c4e7a1d45178100125c0e1fdaf2ac64978", - "sha256:9ad36dbfdbb0cba90a08e7343fadf86f43cf6d87450e8d2b5d71d7c7202907e4", - "sha256:a251570bb3cb04f1627f23c234ad09af0e54fc8194e026cf46178f2e5748d647", - "sha256:b5ff7dae352fd9e1edddad1348698e9fea14064460a7e39121ef9526745802e6", - "sha256:c898f9cca806102fcacb6309899743aa39efb2ad2a302f4c319f54db9f05cd84", - "sha256:cf4b970042ce148ad8dce4369c02a4078b382dadf20067ce2629c239d76460d1", - "sha256:d1569013e8cc8f37e9769d19effdd85e404c976cd0ca28a94e3ddc026c216ae8", - "sha256:dca261e85fe0d34b2c242ecb31c9ab693509af2cf955d9caf01ee3ef3669abd0", - "sha256:ec8bf53ef7c92c99340972519adbe122e82c81d5b87cbd955c74ba8a8cd2a4ad", - "sha256:f2e55726a9ee2e8129d6ce6abb466304868051bcc7a09d652b3b07cd86e801a2", - "sha256:f4dee74f2626c783a3804df9191e9008946a104d5a284e52427a53ff576423cb", - "sha256:f592fd7fe1f20b5041928cce1330937eca62f9058cb41e69c2c2d83cffc0d1e3", - "sha256:ffab5b80bba8c86251291b8ce2e6c99a61446459d4c6637f5d5cc8c9ce37c972" + "sha256:0df89ca13c25eaa1621a3f09af4c8ba20da849692dcae184cb55e80952c453fb", + "sha256:154c35f195fd3e1fad2569930ca51907057ae35e03938f89a8aedae91dd1b7c7", + "sha256:18e84323cdb8de3325e741a7a8dd4a82db74fde363dce32b625324c7b32aa6d7", + "sha256:1e8956c37fc138d65ded2d96ab3949bd49038cc6e8a4494b1515b0ba88c91565", + "sha256:23557bdbca3ccbde3abaa12a6e82299bc92d2b9139011f8c16ca1bb8c75d1e95", + "sha256:24fd645a5e5d224aa6e39d93e4a722fafa9160154f296fd5ef9580191c755053", + "sha256:36e36b6868e4440760d4b9b44587ea1dc1f06532858d10abba98e851e154ca70", + "sha256:3d734559db35aa3697dadcea492a423118c5c55d176da2f3be9c98d4803fc2a7", + "sha256:416a2070acf3a2b5d586f9a6507bb97e33574df5bd7508ea970bbf4fc563fa52", + "sha256:4a22dc3f5221a644dfe4a63bf990052cc674ef12a157b1056969079985c92816", + "sha256:4d8d3e5aa6087490912c14a3c10fbdd380b40b421c13920ff468163bc50e016f", + "sha256:4f41fd159fba1245e1958a99d349df49c616b133636e0cf668f169bce2aeac2d", + "sha256:561ef098c50f91fbac2cc9305b68c915e9eb915a74d9038ecf8af274d748f76f", + "sha256:56994e14b386b5c0a9b875a76d22d707b315fa037affc7819cda08b6d0489756", + "sha256:73a1f2a529604c50c262179fcca59c87a05ff4614fe8a15c186934d84d09d9a5", + "sha256:7da99445fd890206bfcc7419f79871ba8e73d9d9e6b82fe09980bc5bb4efc35f", + "sha256:99d59e0bcadac4aa3280616591fb7bcd560e2218f5e31d5223a2e12a1425d495", + "sha256:a4cc09489843c70b22e8373ca3dfa52b3fab778b57cf81462f1203b0852e95e3", + "sha256:a61dc29cfca9831a03442a21d4b5fd77e3067beca4b5f81f1a89a04a71cf93fa", + "sha256:b1853df739b32fa913cc59ad9137caa9cc3d97ff871e2bbd89c2a2a1d4a69451", + "sha256:b1f44c335532c0581b77491b7715a871d0dd72e97487ac0f57337ccf3ab3469b", + "sha256:b261e0cb0d6faa8fd6863af26d30351fd2ffdb15b82e51e81e96b9e9e2e7ba16", + "sha256:c857ae5dba375ea26a6228f98c195fec0898a0fd91bcf0e8a0cae6d9faf3eca7", + "sha256:cf5bb4a7d53a71bb6a0144d31df784a973b36d8687d615ef6a7e9b1809917a9b", + "sha256:db9814ff0457b46f2e1d494c1efa4111ca089e08c8b983635ebffb9c1573361f", + "sha256:df04f4bad8a359daa2ff74f8108ea051670cafbca533bb2636c58b16e962989e", + "sha256:ecf81720934a0e18526177e645cbd6a8a21bb0ddc887ff9738de07a1df5c6b61", + "sha256:edfa6fba9157e0e3be0f40168eb142511012683ac3dc82420bee4a3f3981b30e" ], "index": "pypi", - "version": "==1.15.2" + "version": "==1.15.4" }, "openpyxl": { "hashes": [ - "sha256:22904d7bdfaaab33d65d50a0915a65eeb2f29c85d9ec53081563850678a29927" + "sha256:8b0c2a44f394a7a913a2e7cdcc1dc601d5f45c59b85a356e591e2ac5463e21e7" ], "index": "pypi", - "version": "==2.5.8" + "version": "==2.5.11" }, "parso": { "hashes": [ @@ -311,14 +331,6 @@ ], "version": "==0.3.1" }, - "pexpect": { - "hashes": [ - "sha256:2a8e88259839571d1251d278476f3eec5db26deb73a70be5ed5dc5435e418aba", - "sha256:3fbd41d4caf27fa4a377bfd16fef87271099463e6fa73e92a52f92dfee5d425b" - ], - "markers": "sys_platform != 'win32'", - "version": "==4.6.0" - }, "pickleshare": { "hashes": [ "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca", @@ -328,71 +340,69 @@ }, "pluggy": { "hashes": [ - "sha256:6e3836e39f4d36ae72840833db137f7b7d35105079aee6ec4a62d9f80d594dd1", - "sha256:95eb8364a4708392bae89035f45341871286a333f749c3141c20573d2b3876e1" + "sha256:447ba94990e8014ee25ec853339faf7b0fc8050cdc3289d4d71f7f410fb90095", + "sha256:bde19360a8ec4dfd8a20dcb811780a30998101f078fc7ded6162f0076f50508f" ], - "markers": "python_version != '3.2.*' and python_version >= '2.7' and python_version != '3.3.*' and python_version != '3.1.*' and python_version != '3.0.*'", - "version": "==0.7.1" + "version": "==0.8.0" }, "prompt-toolkit": { "hashes": [ - "sha256:5eff0c9fd652384ecfe730bbcdf3658868725c6928fbf608d9338834d7a974b6", - "sha256:81da9ecf6ca6806a549697529af8ec3ac5b739c13ac14607218e650db1b53131", - "sha256:c67c1c264d8a0d9e1070e9272bacee00f76c81daab7bc4bf09ff991bd1e224a7" + "sha256:c1d6aff5252ab2ef391c2fe498ed8c088066f66bc64a8d5c095bbf795d9fec34", + "sha256:d4c47f79b635a0e70b84fdb97ebd9a274203706b1ee5ed44c10da62755cf3ec9", + "sha256:fd17048d8335c1e6d5ee403c3569953ba3eb8555d710bfc548faf0712666ea39" ], - "version": "==2.0.5" - }, - "ptyprocess": { - "hashes": [ - "sha256:923f299cc5ad920c68f2bc0bc98b75b9f838b93b599941a6b63ddbc2476394c0", - "sha256:d7cc528d76e76342423ca640335bd3633420dc1366f258cb31d05e865ef5ca1f" - ], - "version": "==0.6.0" + "version": "==2.0.7" }, "py": { "hashes": [ - "sha256:06a30435d058473046be836d3fc4f27167fd84c45b99704f2fb5509ef61f9af1", - "sha256:50402e9d1c9005d759426988a492e0edaadb7f4e68bcddfea586bc7432d009c6" + "sha256:bf92637198836372b520efcba9e020c330123be8ce527e535d185ed4b6f45694", + "sha256:e76826342cefe3c3d5f7e8ee4316b80d1dd8a300781612ddbc765c17ba25a6c6" ], - "markers": "python_version != '3.2.*' and python_version >= '2.7' and python_version != '3.3.*' and python_version != '3.1.*' and python_version != '3.0.*'", - "version": "==1.6.0" + "version": "==1.7.0" }, "pydot": { "hashes": [ - "sha256:92d2e2d15531d00710f2d6fb5540d2acabc5399d464f2f20d5d21073af241eb6" + "sha256:33babe795de6ea2a26c18455e6d8a78b1dbbea6c41363a6136d9f95e9ce78425", + "sha256:dbb8c123ea6ea6cf09eca2403e597244a960b31e70c2a5c8a2a8a2e2594ce191" ], "index": "pypi", - "version": "==1.2.4" + "version": "==1.3.0" }, "pygments": { "hashes": [ - "sha256:78f3f434bcc5d6ee09020f92ba487f95ba50f1e3ef83ae96b9d5ffa1bab25c5d", - "sha256:dbae1046def0efb574852fab9e90209b23f556367b5a320c0bcb871c77c3e8cc" + "sha256:6301ecb0997a52d2d31385e62d0a4a4cf18d2f2da7054a5ddad5c366cd39cee7", + "sha256:82666aac15622bd7bb685a4ee7f6625dd716da3ef7473620c192c0168aae64fc" ], - "version": "==2.2.0" + "version": "==2.3.0" }, "pyparsing": { "hashes": [ - "sha256:bc6c7146b91af3f567cf6daeaec360bc07d45ffec4cf5353f4d7a208ce7ca30a", - "sha256:d29593d8ebe7b57d6967b62494f8c72b03ac0262b1eed63826c6f788b3606401" + "sha256:40856e74d4987de5d01761a22d1621ae1c7f8774585acae358aa5c5936c6c90b", + "sha256:f353aab21fd474459d97b709e527b5571314ee5f067441dc9f88e33eecd96592" ], - "markers": "python_version != '3.2.*' and python_version != '3.1.*' and python_version >= '2.6' and python_version != '3.0.*'", - "version": "==2.2.2" + "version": "==2.3.0" }, "pytest": { "hashes": [ - "sha256:7e258ee50338f4e46957f9e09a0f10fb1c2d05493fa901d113a8dafd0790de4e", - "sha256:9332147e9af2dcf46cd7ceb14d5acadb6564744ddff1fe8c17f0ce60ece7d9a2" + "sha256:1d131cc532be0023ef8ae265e2a779938d0619bb6c2510f52987ffcba7fa1ee4", + "sha256:ca4761407f1acc85ffd1609f464ca20bb71a767803505bd4127d0e45c5a50e23" ], "index": "pypi", - "version": "==3.8.2" + "version": "==4.0.1" }, "python-dateutil": { "hashes": [ - "sha256:1adb80e7a782c12e52ef9a8182bebeb73f1d7e24e374397af06fb4956c8dc5c0", - "sha256:e27001de32f627c22380a688bcc43ce83504a7bc5da472209b4c70f02829f0b8" + "sha256:063df5763652e21de43de7d9e00ccf239f953a832941e37be541614732cdfc93", + "sha256:88f9287c0174266bb0d8cedd395cfba9c58e87e5ad86b2ce58859bc11be3cf02" + ], + "version": "==2.7.5" + }, + "python-jsonrpc-server": { + "hashes": [ + "sha256:533434fa982eb42c36ddb0b6758cef8e6eaf46d014f76b70a401b8790a3e6d57" ], - "version": "==2.7.3" + "index": "pypi", + "version": "==0.0.2" }, "pyzmq": { "hashes": [ @@ -422,50 +432,48 @@ "sha256:e9767c7ab2eb552796440168d5c6e23a99ecaade08dda16266d43ad461730192", "sha256:ebf8b800d42d217e4710d1582b0c8bff20cdcb4faad7c7213e52644034300924" ], - "markers": "python_version != '3.0*' and python_version != '3.2*' and python_version >= '2.7' and python_version != '3.1*'", "version": "==17.1.2" }, "requests": { "hashes": [ - "sha256:63b52e3c866428a224f97cab011de738c36aec0185aa91cfacd418b5d58911d1", - "sha256:ec22d826a36ed72a7358ff3fe56cbd4ba69dd7a6718ffd450ff0e9df7a47ce6a" + "sha256:65b3a120e4329e33c9889db89c80976c5272f56ea92d3e74da8a463992e3ff54", + "sha256:ea881206e59f41dbd0bd445437d792e43906703fff75ca8ff43ccdb11f33f263" ], "index": "pypi", - "version": "==2.19.1" + "version": "==2.20.1" }, "scikit-learn": { "hashes": [ - "sha256:1ca280bbdeb0f9950f9427c71e29d9f14e63b2ffa3e8fdf95f25e13773e6d898", - "sha256:33ad23aa0928c64567a24aac771aea4e179fab2a20f9f786ab00ca9fe0a13c82", - "sha256:344bc433ccbfbadcac8c16b4cec9d7c4722bcea9ce19f6da42e2c2f805571941", - "sha256:35ee532b5e992a6e8d8a71d325fd9e0b58716894657e7d3da3e7a1d888c2e7d4", - "sha256:37cbbba2d2a3895bba834d50488d22268a511279e053135bb291f637fe30512b", - "sha256:40cf1908ee712545f4286cc21f3ee21f3466c81438320204725ab37c96849f27", - "sha256:4130760ac54f5946523c1a1fb32a6c0925e5245f77285270a8f6fb5901b7b733", - "sha256:46cc8c32496f02affde7abe507af99cd752de0e41aec951a0bc40c693c2a1e07", - "sha256:4a364cf22be381a17c05ada9f9ce102733a0f75893c51b83718cd9358444921e", - "sha256:56aff3fa3417cd69807c1c74db69aee34ce08d7161cbdfebbff9b4023d9d224b", - "sha256:58debb34a15cfc03f4876e450068dbd711d9ec36ae5503ed2868f2c1f88522f7", - "sha256:7bcf7ade62ef3443470af32afb82646640d653f42502cf31a13cc17d3ff85d57", - "sha256:7d4eab203ed260075f47e2bf6a2bd656367e4e8683b3ad46d4651070c5d1e9aa", - "sha256:86697c6e4c2d74fbbf110c6d5979d34196a55108fa9896bf424f9795a8d935ad", - "sha256:911115db6669c9b11efd502dcc5483cd0c53e4e3c4bcdfe2e73bbb27eb5e81da", - "sha256:97d1d971f8ec257011e64b7d655df68081dd3097322690afa1a71a1d755f8c18", - "sha256:99f22c3228ec9ab3933597825dc7d595b6c8c7b9ae725cfa557f16353fac8314", - "sha256:a2e18e5a4095b3ca4852eb087d28335f3bb8515df4ccf906d380ee627613837f", - "sha256:a3070f71a4479a9827148609f24f2978f10acffa3b8012fe9606720d271066bd", - "sha256:a6a197499429d2eaa2ae922760aa3966ef353545422d5f47ea2ca9369cbf7d26", - "sha256:a7f6f5b3bc7b8e2066076098788579af12bd507ccea8ca6859e52761aa61eaca", - "sha256:a82b90b6037fcc6b311431395c11b02555a3fbf96921a0667c8f8b0c495991cb", - "sha256:ab2c4266b8cd159a266eb03c709ad5400756dca9c45aa48fb523263344475093", - "sha256:b983a2dfdb9d707c78790608bcfd63692e5c2d996865a9689f3db768d0a2978d", - "sha256:bb33d447f4c6fb164d426467d7bf8a4901c303333c5809b85319b2e0626763cd", - "sha256:bc2a0116a67081167f1fbfed731d361671e5925db291b70e65fa66170045c53f", - "sha256:bd189f6d0c2fdccb7c0d3fd1227c6626dc17d00257edbb63dd7c88f31928db61", - "sha256:d393f810da9cd4746cad7350fb89f0509c3ae702c79d2ba8bd875201be4102d1" - ], - "markers": "python_version != '3.2.*' and python_version >= '2.7' and python_version != '3.3.*' and python_version != '3.1.*' and python_version != '3.0.*'", - "version": "==0.20.0" + "sha256:1ccbc5d9bdd933f3f75a3e8f0a60eac764388be84cee9e66bfd51d16992996f0", + "sha256:3281dd405f15f8647e617062003b229b29dd74e966bfd1c033be720897402c54", + "sha256:40c8f8f2e72dabb183820c7d77caa7b826d87d919056a02507c941f0e583c83e", + "sha256:48cef3af610f83460a4419de6c77757fc3b687da4ad39aedcbe3a92c6714b0ae", + "sha256:5d801633c69e67f215c1b796a7040ba33e2c6b3bab3d335c2caeca8a24679081", + "sha256:635978ca918da035a60e20d390273803417a1fc691290e93d9d5852a016a2bc3", + "sha256:68df1b389647f300f6f93535b0eb94efacbf5e670982c1936885e7981409dd3f", + "sha256:6aa8b434de69c2507ee2bb874642f7bbfedb0a96c234a9129b8957e109d3bd47", + "sha256:7c4569c7ca504db5cd1f4d8dfca6500bbf6f1d0e25618831f25d9f9ab8b4e33d", + "sha256:82743bf812380ed767a9c3c263a8f0d76e7af93abd2404ee199bf46e57e56f4f", + "sha256:8d15d61d2b6324d99f135cb8ef95d1475367a4ebb3b9a1fc0f5b53a16c21974a", + "sha256:95f892fa68105f6bcce0f206262d3810924f3865abd93927eb41949adb50e538", + "sha256:9ebea27859a26c97ef33b6d07dc36190890a1ca4988eac317544a61001b90e92", + "sha256:9ee7e147fcb7355ea646916f13eacb71f89c90ccbf9b067078f14c4675cc0003", + "sha256:aae03d8e640e2cd58471add3759f2002c1d726548f53a70840a7b6f330210bc2", + "sha256:b8891cd8489aa622f2789a4a48b8e99e068bfef00653ae907a8fc89ee6c54682", + "sha256:c1fd93ba6b684c3624a7f9b8e5afc6c78bc381fd2607504860c0367a7055994d", + "sha256:c2a1ce4e405ab61857c4dba89aa997522a9098e4cbc383fa95805ae6be8c772e", + "sha256:c8482ff66457a1b148973240c0eaa0456b62b8b8e5e26615f7f00ba08cc5c132", + "sha256:cb439dc1d8ee60f441594db1a12543c4e91260cd548b7ad9bc58bff3916f8dde", + "sha256:cc723aa0de67f0398821e3961ce0dcad32f276564c6fff2fc6061cc885a89c9e", + "sha256:cfb3ffdc7098b7328b120bc5819d81045c59e0392e7221441b0ac50ed7853a73", + "sha256:e045a5445e772cafe4fee5b3954947af7984a3057dcac0407ec55e8bab91bbee", + "sha256:e45a49f3bd713540b5607b77e8fc7057a289a795c5fe182333b286a4d3e0338b", + "sha256:e8010c4fe8f1d3625c27a89625b35906869651094b5ef5a0b22cd8514501d7d8", + "sha256:f231c2d0e397309ef18b62a8adfe914de38e335c6f8fc06a3655d5ae8b524d69", + "sha256:fa1869c18fef812e321b9ed875519daefb3a7ea016ba1392526d231a7994e81c", + "sha256:fc4d3cff012c1aa2459e5745cdd3df31a30e2ff27dcbfadf1999e16dd8497a6e" + ], + "version": "==0.20.1" }, "scipy": { "hashes": [ @@ -501,11 +509,12 @@ "index": "pypi", "version": "==1.1.0" }, - "simplegeneric": { + "singledispatch": { "hashes": [ - "sha256:dc972e06094b9af5b855b3df4a646395e43d1c9d0d39ed345b7393560d0b9173" + "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c", + "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8" ], - "version": "==0.8.1" + "version": "==3.4.0.3" }, "six": { "hashes": [ @@ -523,10 +532,10 @@ }, "smmap2": { "hashes": [ - "sha256:0dd53d991af487f9b22774fa89451358da3607c02b9b886a54736c6a313ece0b", - "sha256:dc216005e529d57007ace27048eb336dcecb7fc413cfb3b2f402bb25972b69c6" + "sha256:0555a7bf4df71d1ef4218e4807bbf9b201f910174e6e08af2e138d4e517b4dde", + "sha256:29a9ffa0497e7f2be94ca0ed1ca1aa3cd4cf25a1f6b4f5f87f74b46ed91d609a" ], - "version": "==2.0.4" + "version": "==2.0.5" }, "tornado": { "hashes": [ @@ -538,7 +547,6 @@ "sha256:d4b3e5329f572f055b587efc57d29bd051589fb5a43ec8898c77a47ec2fa2bbb", "sha256:e5f2585afccbff22390cddac29849df463b252b711aa2ce7c5f3f342a5b3b444" ], - "markers": "python_version != '3.2.*' and python_version >= '2.7' and python_version != '3.3.*' and python_version != '3.1.*' and python_version != '3.0.*'", "version": "==5.1.1" }, "traitlets": { @@ -550,11 +558,10 @@ }, "urllib3": { "hashes": [ - "sha256:a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf", - "sha256:b5725a0bd4ba422ab0e66e89e030c806576753ea3ee08554382c14e685d117b5" + "sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39", + "sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22" ], - "markers": "python_version != '3.2.*' and python_version >= '2.6' and python_version != '3.3.*' and python_version < '4' and python_version != '3.1.*' and python_version != '3.0.*'", - "version": "==1.23" + "version": "==1.24.1" }, "wcwidth": { "hashes": [ @@ -564,5 +571,14 @@ "version": "==0.1.7" } }, - "develop": {} + "develop": { + "v": { + "hashes": [ + "sha256:605a936bb41413ea696d92acc74dcf1a1eadde03e73b13655591c2b9074b9dd8", + "sha256:c2d77319519fc8b48bafd0677a9117d2717d266fa8a7512ad2158a6a40b31c01" + ], + "index": "pypi", + "version": "==4.0.0" + } + } } diff --git a/persper/analytics/lsp_graph_server/__init__.py b/persper/analytics/lsp_graph_server/__init__.py new file mode 100644 index 00000000000..75c08f930ff --- /dev/null +++ b/persper/analytics/lsp_graph_server/__init__.py @@ -0,0 +1,27 @@ +from persper.analytics.patch_parser import PatchParser +from persper.analytics.graph_server import GraphServer +from persper.analytics.call_commit_graph import CallCommitGraph + + +class LspClientGraphServer(GraphServer): + def __init__(self, filename_regex_strs): + pass + + def register_commit(self, hexsha, author_name, author_email, commit_message): + pass + + def update_graph(self, old_filename: str, old_src: str, + new_filename: str, new_src: str, patch: bytes): + pass + + def get_graph(self): + pass + + def reset_graph(self): + pass + + def filter_file(self, filename): + pass + + def config(self, param: dict): + pass diff --git a/persper/analytics/lsp_graph_server/callgraph/__init__.py b/persper/analytics/lsp_graph_server/callgraph/__init__.py index 679e5127224..80341353ced 100644 --- a/persper/analytics/lsp_graph_server/callgraph/__init__.py +++ b/persper/analytics/lsp_graph_server/callgraph/__init__.py @@ -8,10 +8,9 @@ import jsonpickle -from languageclient.lspcontract import (DocumentSymbol, Location, Position, - SymbolInformation, SymbolKind, - TextDocument, - TextDocumentContentChangeEvent) +from persper.analytics.lsp_graph_server.languageclient.lspcontract import \ + DocumentSymbol, Location, Position, SymbolInformation, SymbolKind, \ + TextDocument, TextDocumentContentChangeEvent _logger = logging.getLogger(__name__) diff --git a/persper/analytics/lsp_graph_server/callgraph/builder.py b/persper/analytics/lsp_graph_server/callgraph/builder.py index 5876e55225b..1077a6f73b3 100644 --- a/persper/analytics/lsp_graph_server/callgraph/builder.py +++ b/persper/analytics/lsp_graph_server/callgraph/builder.py @@ -12,12 +12,10 @@ from jsonrpc.exceptions import JsonRpcException import wildcards -from languageclient.lspclient import LspClient -from languageclient.lspcontract import (DocumentSymbol, Location, Position, - SymbolInformation, SymbolKind, - TextDocument, - TextDocumentContentChangeEvent) - +from persper.analytics.lsp_graph_server.languageclient.lspclient import LspClient +from persper.analytics.lsp_graph_server.languageclient.lspcontract import \ + DocumentSymbol, Location, Position, SymbolInformation, SymbolKind, \ + TextDocument, TextDocumentContentChangeEvent from . import CallGraphBranch, CallGraphNode, CallGraphScope _logger = logging.getLogger(__name__) diff --git a/persper/analytics/lsp_graph_server/ccls.py b/persper/analytics/lsp_graph_server/ccls.py index 8ebafa99d4a..20c134d13f5 100644 --- a/persper/analytics/lsp_graph_server/ccls.py +++ b/persper/analytics/lsp_graph_server/ccls.py @@ -9,11 +9,11 @@ from jsonrpc.endpoint import Endpoint from jsonrpc.exceptions import JsonRpcException -from callgraph.builder import CallGraphBuilder -from fileparsers.CPP14Lexer import CPP14Lexer -from languageclient.lspclient import LspClient -from languageclient.lspcontract import TextDocument -from languageclient.lspserver import LspServerStub +from persper.analytics.lsp_graph_server.callgraph.builder import CallGraphBuilder +from persper.analytics.lsp_graph_server.fileparsers.CPP14Lexer import CPP14Lexer +from persper.analytics.lsp_graph_server.languageclient.lspclient import LspClient +from persper.analytics.lsp_graph_server.languageclient.lspcontract import TextDocument +from persper.analytics.lsp_graph_server.languageclient.lspserver import LspServerStub _logger = logging.getLogger(__name__) @@ -121,3 +121,6 @@ async def openDocument(self, textDoc: TextDocument): _logger.warning("The file seems invalid. Server error: %s", ex.message) return False raise + +class CclsGraphServer(GraphServer): + pass diff --git a/persper/analytics/lsp_graph_server/cquery.py b/persper/analytics/lsp_graph_server/cquery.py index e268a3d8e94..1e1c176c9ff 100644 --- a/persper/analytics/lsp_graph_server/cquery.py +++ b/persper/analytics/lsp_graph_server/cquery.py @@ -6,11 +6,11 @@ from jsonrpc.endpoint import Endpoint from jsonrpc.exceptions import JsonRpcException -from callgraph import CallGraphBuilder -from fileparsers.CPP14Lexer import CPP14Lexer -from languageclient.lspclient import LspClient -from languageclient.lspcontract import TextDocument -from languageclient.lspserver import LspServerStub +from persper.analytics.lsp_graph_server.callgraph.builder import CallGraphBuilder +from persper.analytics.lsp_graph_server.fileparsers.CPP14Lexer import CPP14Lexer +from persper.analytics.lsp_graph_server.languageclient.lspclient import LspClient +from persper.analytics.lsp_graph_server.languageclient.lspcontract import TextDocument +from persper.analytics.lsp_graph_server.languageclient.lspserver import LspServerStub _logger = logging.getLogger(__name__) diff --git a/persper/analytics/lsp_graph_server/languageclient/lspclient.py b/persper/analytics/lsp_graph_server/languageclient/lspclient.py index e349a318217..d9268326c34 100644 --- a/persper/analytics/lsp_graph_server/languageclient/lspclient.py +++ b/persper/analytics/lsp_graph_server/languageclient/lspclient.py @@ -8,9 +8,9 @@ from jsonrpc.endpoint import Endpoint from jsonrpc.streams import JsonRpcStreamReader, JsonRpcStreamWriter -from languageclient.lspcontract import MessageType -from languageclient.lspserver import LspServerStub -from jsonrpcutils import LoggedJsonRpcStreamReader, LoggedJsonRpcStreamWriter, JsonRpcLogger +from persper.analytics.lsp_graph_server.languageclient.lspcontract import MessageType +from persper.analytics.lsp_graph_server.languageclient.lspserver import LspServerStub +from persper.analytics.lsp_graph_server.jsonrpcutils import LoggedJsonRpcStreamReader, LoggedJsonRpcStreamWriter, JsonRpcLogger _logger = logging.getLogger(__name__) diff --git a/persper/analytics/lsp_graph_server/setup.cfg b/persper/analytics/lsp_graph_server/setup.cfg new file mode 100644 index 00000000000..68859ad034c --- /dev/null +++ b/persper/analytics/lsp_graph_server/setup.cfg @@ -0,0 +1,2 @@ +[pep8] +max-line-length = 120 From dc3d0f93b3ed4e03482f35a07b4436ab81592d34 Mon Sep 17 00:00:00 2001 From: xinyan Date: Thu, 29 Nov 2018 22:20:34 +0800 Subject: [PATCH 03/84] Checkpoint. --- .../analytics/lsp_graph_server/__init__.py | 67 ++++++++++++++++--- .../lsp_graph_server/callgraph/builder.py | 17 ++++- .../lsp_graph_server/callgraph/manager.py | 25 +++++-- 3 files changed, 93 insertions(+), 16 deletions(-) diff --git a/persper/analytics/lsp_graph_server/__init__.py b/persper/analytics/lsp_graph_server/__init__.py index 75c08f930ff..6dff5308cae 100644 --- a/persper/analytics/lsp_graph_server/__init__.py +++ b/persper/analytics/lsp_graph_server/__init__.py @@ -1,27 +1,78 @@ from persper.analytics.patch_parser import PatchParser from persper.analytics.graph_server import GraphServer from persper.analytics.call_commit_graph import CallCommitGraph +from abc import abstractclassmethod, abstractproperty +from .languageclient.lspclient import LspClient +from .callgraph.manager import CallGraphManager +from .callgraph.builder import CallGraphBuilder +from pathlib import Path +from typing import Union +import asyncio class LspClientGraphServer(GraphServer): - def __init__(self, filename_regex_strs): - pass + def __init__(self, workspaceRoot: str): + self._ccgraph = CallCommitGraph() + self._workspaceRoot: Path = Path(workspaceRoot).resolve() + self._invalidatedFiles = set() + self._loop = asyncio.new_event_loop() + if not self._workspaceRoot.exists(): + self._workspaceRoot.touch() def register_commit(self, hexsha, author_name, author_email, commit_message): - pass + self._ccgraph.add_commit(hexsha, author_name, author_email, commit_message) - def update_graph(self, old_filename: str, old_src: str, - new_filename: str, new_src: str, patch: bytes): - pass + def update_graph(self, old_filename: str, old_src: str, new_filename: str, new_src: str, patch: bytes): + self._loop.run_until_complete(self.onFileChanged(old_filename, old_src, new_filename, new_src, patch)) + + async def onFileChanged(self, old_filename: str, old_src: str, new_filename: str, new_src: str, patch: bytes): + oldPath = self._workspaceRoot.joinpath(old_filename).resolve() if old_filename else None + newPath = self._workspaceRoot.joinpath(new_filename).resolve() if new_filename else None + if old_filename != new_filename: + await self._callGraphBuilder.deleteFile(oldPath) + await self._callGraphBuilder.modifyFile(new_filename, new_src) + self._invalidatedFiles.add(oldPath) + self._invalidatedFiles.add(newPath) def get_graph(self): - pass + self._loop.run_until_complete(self.updateGraph()) + def reset_graph(self): - pass + self._callGraphManager.graph.clear() def filter_file(self, filename): pass def config(self, param: dict): pass + + @abstractclassmethod + async def startLspClient(self): + raise NotImplementedError() + + @abstractclassmethod + async def _stopLspClient(self): + raise NotImplementedError() + + @abstractproperty + def _lspClient(self) -> LspClient: + raise NotImplementedError() + + @abstractproperty + def _callGraphBuilder(self) -> CallGraphBuilder: + raise NotImplementedError() + + @abstractproperty + def _callGraphManager(self) -> CallGraphManager: + raise NotImplementedError() + + def invalidateFile(self, path: Union[str, Path]): + if isinstance(path, str): + path = Path(path).resolve() + self._invalidatedFiles.add(path) + + async def updateGraph(self): + affectedFiles = self._callGraphManager.removeByFiles(self._invalidatedFiles) + self._callGraphManager.buildGraph(fileNames=affectedFiles) + self._invalidatedFiles.clear() diff --git a/persper/analytics/lsp_graph_server/callgraph/builder.py b/persper/analytics/lsp_graph_server/callgraph/builder.py index 1077a6f73b3..7f9d3d7c222 100644 --- a/persper/analytics/lsp_graph_server/callgraph/builder.py +++ b/persper/analytics/lsp_graph_server/callgraph/builder.py @@ -336,6 +336,19 @@ async def buildCallGraphInFile(self, fileName: str) -> Iterable[CallGraphBranch] await self.closeDocument(textDoc.uri) _logger.info("Yielded %d branches.", counter) + async def deleteFile(self, fileName: str): + path = Path(fileName).resolve() + if not path.exists: + return False + self.removeDocumentCache(path) + doc = TextDocument(TextDocument.fileNameToUri(str(path)), self.inferLanguageId(path), 1, "") + self._lspClient.server.textDocumentDidOpen(doc) + # Empty the file and notify language server. + self._lspClient.server.textDocumentDidChange(doc.uri, 2, [TextDocumentContentChangeEvent("")]) + path.unlink() + self._lspClient.server.textDocumentDidSave(doc.uri) + await self.closeDocument(doc.uri) + async def modifyFile(self, fileName: str, newContent: str): """ Modify a file's content, notifying the language server, as if the file @@ -345,7 +358,9 @@ async def modifyFile(self, fileName: str, newContent: str): newContent = "" path = Path(fileName).resolve() try: - doc = TextDocument.loadFile(str(path), self.inferLanguageId(path), 1) + doc = TextDocument.loadFile(str(path), self.inferLanguageId(path), 1) \ + if path.exists() \ + else TextDocument(TextDocument.fileNameToUri(str(path)), self.inferLanguageId(path), 1, "") try: self.removeDocumentCache(path) self._lspClient.server.textDocumentDidOpen(doc) diff --git a/persper/analytics/lsp_graph_server/callgraph/manager.py b/persper/analytics/lsp_graph_server/callgraph/manager.py index 598a2b33f7a..7c91e651ff6 100644 --- a/persper/analytics/lsp_graph_server/callgraph/manager.py +++ b/persper/analytics/lsp_graph_server/callgraph/manager.py @@ -16,6 +16,7 @@ class CallGraphManager(): Used to construct / update call graph independently of specific implementations of CallGraphBuilder. """ + def __init__(self, builder: CallGraphBuilder, callGraph: CallGraph = None): if not isinstance(builder, CallGraphBuilder): raise TypeError("builderType should be a subtype of CallGraphBuilder.") @@ -30,7 +31,7 @@ def graph(self): """ return self._graph - async def buildGraph(self, globPattern: Union[str, Iterable[str]] = None): + async def buildGraph(self, fileNames: Union[str, Iterable[str]] = None, globPattern: Union[str, Iterable[str]] = None): """ Build call graph branches from the specified files. @@ -38,7 +39,9 @@ async def buildGraph(self, globPattern: Union[str, Iterable[str]] = None): from which to build the call graph branches. """ counter = 0 - async for branch in self._builder.buildCallGraphInFiles(globPattern): + + def pushBranch(branch): + nonlocal counter try: self._graph.add(branch) counter += 1 @@ -46,9 +49,19 @@ async def buildGraph(self, globPattern: Union[str, Iterable[str]] = None): _logger.info("Already added %d branches.", counter) except ValueError as ex: _logger.debug("%s Branch: %s", ex, branch) + + if fileNames: + if isinstance(fileNames, str): + fileNames = [fileNames] + for fn in fileNames: + async for b in self._builder.buildCallGraphInFile(fn): + pushBranch(b) + if globPattern or not fileNames: + async for b in self._builder.buildCallGraphInFiles(globPattern): + pushBranch(b) _logger.info("Added %d branches.", counter) - def removeByFiles(self, fileNames: Iterable[str]): + def removeByFiles(self, fileNames: Iterable[str]) -> Iterable[Path]: """ Clear the graph nodes whose source or definition node contains the specified files. """ @@ -56,16 +69,14 @@ def removeByFiles(self, fileNames: Iterable[str]): affectedFiles = set((i.sourceScope.file for i in self._graph.items if i.definitionScope.file in fileNames)) affectedFiles.update(fileNames) self._graph.removeBySourceFiles(affectedFiles) + return affectedFiles async def rebuildGraph(self, fileNames: Iterable[str]): """ Rebuild the source graph for the specified files. This operation will clear and rebuild the graph nodes whose source or definition node contains the specified files. """ - fileNames = set((Path(f).resolve() for f in fileNames)) - affectedFiles = set((i.sourceScope.file for i in self._graph.items if i.definitionScope.file in fileNames)) - affectedFiles.update(fileNames) - self._graph.removeBySourceFiles(affectedFiles) + affectedFiles = self.removeByFiles(fileNames) #self.x += 1 #self._graph.dumpTo("dmp" + str(self.x) + ".txt") await self.buildGraph((str(p) for p in affectedFiles)) From d79416342446c7c47febfcf44947a63bf2284cb0 Mon Sep 17 00:00:00 2001 From: xinyan Date: Sat, 1 Dec 2018 22:14:11 +0800 Subject: [PATCH 04/84] Draft CclsGraphServer. --- .../analytics/lsp_graph_server/__init__.py | 101 ++++++++++++------ .../lsp_graph_server/callgraph/adapters.py | 20 ++++ .../lsp_graph_server/callgraph/builder.py | 21 ++-- persper/analytics/lsp_graph_server/ccls.py | 53 +++++++-- 4 files changed, 144 insertions(+), 51 deletions(-) create mode 100644 persper/analytics/lsp_graph_server/callgraph/adapters.py diff --git a/persper/analytics/lsp_graph_server/__init__.py b/persper/analytics/lsp_graph_server/__init__.py index 6dff5308cae..6e86750cc1d 100644 --- a/persper/analytics/lsp_graph_server/__init__.py +++ b/persper/analytics/lsp_graph_server/__init__.py @@ -1,31 +1,50 @@ -from persper.analytics.patch_parser import PatchParser -from persper.analytics.graph_server import GraphServer -from persper.analytics.call_commit_graph import CallCommitGraph +import asyncio +import logging +import subprocess from abc import abstractclassmethod, abstractproperty -from .languageclient.lspclient import LspClient -from .callgraph.manager import CallGraphManager -from .callgraph.builder import CallGraphBuilder from pathlib import Path -from typing import Union -import asyncio +from typing import List, Union + +from persper.analytics.call_commit_graph import CallCommitGraph +from persper.analytics.graph_server import GraphServer +from persper.analytics.patch_parser import PatchParser + +from .callgraph.adapters import CallCommitGraphSynchronizer +from .callgraph.builder import CallGraphBuilder +from .callgraph.manager import CallGraphManager +from .languageclient.lspclient import LspClient + +_logger = logging.getLogger(__name__) class LspClientGraphServer(GraphServer): - def __init__(self, workspaceRoot: str): + + defaultLanguageServerCommand: Union[str, List[str]] = None + + def __init__(self, workspaceRoot: str, languageServerCommand: Union[str, List[str]] = None): self._ccgraph = CallCommitGraph() + self._callGraph = CallCommitGraphSynchronizer(self._ccgraph) self._workspaceRoot: Path = Path(workspaceRoot).resolve() self._invalidatedFiles = set() - self._loop = asyncio.new_event_loop() if not self._workspaceRoot.exists(): self._workspaceRoot.touch() + self._languageServerCommand = \ + languageServerCommand \ + if languageServerCommand != None \ + else type(self).defaultLanguageServerCommand + self._lspServerProc: subprocess.Popen = None + self._lspClient: LspClient = None + self._callGraphBuilder: CallGraphBuilder = None + self._callGraphManager: CallGraphManager = None def register_commit(self, hexsha, author_name, author_email, commit_message): self._ccgraph.add_commit(hexsha, author_name, author_email, commit_message) def update_graph(self, old_filename: str, old_src: str, new_filename: str, new_src: str, patch: bytes): - self._loop.run_until_complete(self.onFileChanged(old_filename, old_src, new_filename, new_src, patch)) + with asyncio.new_event_loop() as loop: + loop.run_until_complete(self._onFileChanged(old_filename, old_src, new_filename, new_src, patch)) - async def onFileChanged(self, old_filename: str, old_src: str, new_filename: str, new_src: str, patch: bytes): + async def _onFileChanged(self, old_filename: str, old_src: str, new_filename: str, new_src: str, patch: bytes): oldPath = self._workspaceRoot.joinpath(old_filename).resolve() if old_filename else None newPath = self._workspaceRoot.joinpath(new_filename).resolve() if new_filename else None if old_filename != new_filename: @@ -35,37 +54,48 @@ async def onFileChanged(self, old_filename: str, old_src: str, new_filename: str self._invalidatedFiles.add(newPath) def get_graph(self): - self._loop.run_until_complete(self.updateGraph()) - + with asyncio.new_event_loop() as loop: + loop.run_until_complete(self.updateGraph()) + return self._ccgraph def reset_graph(self): - self._callGraphManager.graph.clear() + self._callGraph.clear() def filter_file(self, filename): - pass + return self._callGraphBuilder.filterFile(filename) def config(self, param: dict): pass - @abstractclassmethod async def startLspClient(self): - raise NotImplementedError() - - @abstractclassmethod - async def _stopLspClient(self): - raise NotImplementedError() - - @abstractproperty - def _lspClient(self) -> LspClient: - raise NotImplementedError() - - @abstractproperty - def _callGraphBuilder(self) -> CallGraphBuilder: - raise NotImplementedError() - - @abstractproperty - def _callGraphManager(self) -> CallGraphManager: - raise NotImplementedError() + """ + When overridden in derived class, starts the LSP server process, + and sets the following fields properly: + * self._lspServerProc + * self._lspClient + * self._callGraphBuilder + * self._callGraphManager + """ + self._lspServerProc = subprocess.Popen( + self._languageServerCommand, + stdin=subprocess.PIPE, stdout=subprocess.PIPE, + creationflags=subprocess.CREATE_NEW_CONSOLE) + + async def stopLspClient(self): + if not self._lspServerProc: + return + _logger.info("Shutting down language server...") + await asyncio.wait_for(self._lspClient.server.shutdown(), 10) + self._lspClient.server.exit() + exitCode = self._lspServerProc.wait(10) + if exitCode != None: + _logger.info("Language server %d exited with code: %s.", self._lspServerProc.pid, exitCode) + else: + self._lspServerProc.kill() + _logger.warning("Killed language server %d.", self._lspServerProc.pid) + self._lspServerProc = None + self._callGraphBuilder = None + self._callGraphManager = None def invalidateFile(self, path: Union[str, Path]): if isinstance(path, str): @@ -73,6 +103,9 @@ def invalidateFile(self, path: Union[str, Path]): self._invalidatedFiles.add(path) async def updateGraph(self): + if not self._invalidatedFiles: + return affectedFiles = self._callGraphManager.removeByFiles(self._invalidatedFiles) + _logger.info("Invalidated %d files, affected %d files.", len(self._invalidatedFiles), len(affectedFiles)) self._callGraphManager.buildGraph(fileNames=affectedFiles) self._invalidatedFiles.clear() diff --git a/persper/analytics/lsp_graph_server/callgraph/adapters.py b/persper/analytics/lsp_graph_server/callgraph/adapters.py new file mode 100644 index 00000000000..251a2aa0412 --- /dev/null +++ b/persper/analytics/lsp_graph_server/callgraph/adapters.py @@ -0,0 +1,20 @@ +from persper.analytics.call_commit_graph import CallCommitGraph + +from . import CallGraph, CallGraphBranch + + +class CallCommitGraphSynchronizer(CallGraph): + def __init__(self, callCommitGraph: CallCommitGraph): + super().__init__() + self._callCommitGraph = callCommitGraph + + def add(self, branch: CallGraphBranch): + super().add(branch) + # Use scope full name as identifier. + self._callCommitGraph.add_node(branch.sourceScope.name) + self._callCommitGraph.add_node(branch.definitionScope.name) + self._callCommitGraph.add_edge(branch.sourceScope.name, branch.definitionScope.name) + + def clear(self): + super().clear() + self._callCommitGraph.reset() diff --git a/persper/analytics/lsp_graph_server/callgraph/builder.py b/persper/analytics/lsp_graph_server/callgraph/builder.py index 7f9d3d7c222..f8aa49aff51 100644 --- a/persper/analytics/lsp_graph_server/callgraph/builder.py +++ b/persper/analytics/lsp_graph_server/callgraph/builder.py @@ -143,12 +143,9 @@ class CallGraphBuilder(ABC): Building call graph branches from the given files with the specific Lexer and LspClient. """ - def __init__(self, lexerType: Type, lspClient: LspClient): - if not issubclass(lexerType, Lexer): - raise TypeError("lexerType should be a subtype of Lexer.") + def __init__(self, lspClient: LspClient): if not isinstance(lspClient, LspClient): raise TypeError("lspClient should be an instance of LspClient.") - self._lexerType = lexerType self._lspClient = lspClient self._tokenizedDocCache: Dict[str, TokenizedDocument] = {} self._workspaceFilePatterns: List[str] = None @@ -204,7 +201,7 @@ def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e): return doc textDoc = TextDocument.loadFile(path, self.inferLanguageId(path)) input = FileStream(path, encoding="utf-8", errors="replace") - lexer = self._lexerType(input) + lexer = self.createLexer(input) assert isinstance(lexer, Lexer) lexer.removeErrorListeners() lexer.addErrorListener(MyLexerErrorListener()) @@ -241,12 +238,21 @@ def filterToken(self, token: Token) -> bool: """ raise NotImplementedError + def filterFile(self, fileName: str): + if self._workspaceFilePatternsRegex: + return any(p.match(str(fileName)) for p in self._workspaceFilePatternsRegex) + return True + def inferLanguageId(self, path: PurePath) -> str: """ Infers the language ID for the given document path. """ ext = path.suffix.lower() return _KNOWN_EXTENSION_LANGUAGES[ext] + + @abstractclassmethod + def createLexer(self, fileStream: FileStream) -> Lexer: + raise NotImplementedError async def openDocument(self, textDoc: TextDocument): """ @@ -309,9 +315,8 @@ async def buildCallGraphInFile(self, fileName: str) -> Iterable[CallGraphBranch] for d in defs: d: Location defPath = self.pathFromUri(d.uri) - if self._workspaceFilePatternsRegex: - if all(not p.match(str(defPath)) for p in self._workspaceFilePatternsRegex): - continue + if not self.filterFile(defPath): + continue defsDoc = await self.getTokenizedDocument(defPath) defNode = defsDoc.tokenAt(d.range.start.line, d.range.start.character) defScope = defsDoc.scopeAt(d.range.start.line, d.range.start.character) diff --git a/persper/analytics/lsp_graph_server/ccls.py b/persper/analytics/lsp_graph_server/ccls.py index 20c134d13f5..0c48e11616a 100644 --- a/persper/analytics/lsp_graph_server/ccls.py +++ b/persper/analytics/lsp_graph_server/ccls.py @@ -3,17 +3,21 @@ """ import logging from asyncio import sleep -from pathlib import PurePath +from pathlib import Path, PurePath +from typing import List, Union from antlr4 import Token +from antlr4.FileStream import FileStream from jsonrpc.endpoint import Endpoint from jsonrpc.exceptions import JsonRpcException -from persper.analytics.lsp_graph_server.callgraph.builder import CallGraphBuilder -from persper.analytics.lsp_graph_server.fileparsers.CPP14Lexer import CPP14Lexer -from persper.analytics.lsp_graph_server.languageclient.lspclient import LspClient -from persper.analytics.lsp_graph_server.languageclient.lspcontract import TextDocument -from persper.analytics.lsp_graph_server.languageclient.lspserver import LspServerStub +from . import LspClientGraphServer +from .callgraph.builder import CallGraphBuilder +from .callgraph.manager import CallGraphManager +from .fileparsers.CPP14Lexer import CPP14Lexer +from .languageclient.lspclient import LspClient +from .languageclient.lspcontract import TextDocument +from .languageclient.lspserver import LspServerStub _logger = logging.getLogger(__name__) @@ -78,11 +82,14 @@ class CclsCallGraphBuilder(CallGraphBuilder): # CPP14Lexer.MinusMinus } - def __init__(self, lspClient: LspClient): + def __init__(self, lspClient: CclsLspClient): if not isinstance(lspClient, CclsLspClient): raise TypeError("lspClient should be an instance of CclsLspClient.") super().__init__(CPP14Lexer, lspClient) + def createLexer(self, fileStream: FileStream): + return CPP14Lexer(fileStream) + def filterToken(self, token: Token): return token.type in self._tokensOfInterest @@ -122,5 +129,33 @@ async def openDocument(self, textDoc: TextDocument): return False raise -class CclsGraphServer(GraphServer): - pass + +class CclsGraphServer(LspClientGraphServer): + + defaultLanguageServerCommand = "./bin/ccls -log-file=ccls.log" + + def __init__(self, workspaceRoot: str, cacheRoot: str = None, languageServerCommand: Union[str, List[str]] = None): + super().__init__(workspaceRoot) + self._cacheRoot = Path(cacheRoot).resolve() if cacheRoot else self._workspaceRoot.joinpath(".ccls-cache") + + async def startLspClient(self): + super().startLspClient() + self._lspClient = CclsLspClient(self._lspServerProc.stdout, self._lspServerProc.stdin) + self._callGraphBuilder = CclsCallGraphBuilder(self._lspClient) + self._callGraphManager = CallGraphManager(self._callGraphBuilder, self._callGraph) + self._lspClient.start() + _logger.debug(await self._lspClient.server.initialize( + rootFolder=self._workspaceRoot, + initializationOptions={"cacheDirectory": self._cacheRoot, + "diagnostics": {"onParse": False, "onType": False}, + "discoverSystemIncludes": True, + "enableCacheRead": True, + "enableCacheWrite": True, + "clang": { + "excludeArgs": [], + "extraArgs": ["-nocudalib"], + "pathMappings": [], + "resourceDir": "" + } + })) + self._lspClient.server.initialized() From 4cb4902ca1a504e9c67d885fb1b445a1dfa49ac7 Mon Sep 17 00:00:00 2001 From: xinyan Date: Sun, 2 Dec 2018 18:20:02 +0800 Subject: [PATCH 05/84] Add cpp_test_repo. --- test/cpp_test_repo/A/Exceptions.h | 60 ++++++ test/cpp_test_repo/A/TextFileParsers.cpp | 160 +++++++++++++++ test/cpp_test_repo/A/TextFileParsers.h | 121 ++++++++++++ test/cpp_test_repo/A/TypeTraits.h | 9 + test/cpp_test_repo/A/Utility.cpp | 76 +++++++ test/cpp_test_repo/A/Utility.h | 187 ++++++++++++++++++ test/cpp_test_repo/A/main.cpp | 13 ++ test/cpp_test_repo/A/stdafx.cpp | 7 + test/cpp_test_repo/A/stdafx.h | 39 ++++ test/cpp_test_repo/B/Exceptions.h | 60 ++++++ test/cpp_test_repo/B/TextFileParsers.cpp | 160 +++++++++++++++ test/cpp_test_repo/B/TextFileParsers.h | 121 ++++++++++++ test/cpp_test_repo/B/TypeTraits.h | 9 + test/cpp_test_repo/B/Utility.cpp | 76 +++++++ test/cpp_test_repo/B/Utility.h | 187 ++++++++++++++++++ test/cpp_test_repo/B/main.cpp | 15 ++ test/cpp_test_repo/B/stdafx.cpp | 7 + test/cpp_test_repo/B/stdafx.h | 39 ++++ test/cpp_test_repo/C/CppProject1.vcxproj | 133 +++++++++++++ .../C/CppProject1.vcxproj.filters | 22 +++ test/cpp_test_repo/C/Exceptions.h | 60 ++++++ test/cpp_test_repo/C/TextFileParsers.cpp | 160 +++++++++++++++ test/cpp_test_repo/C/TextFileParsers.h | 121 ++++++++++++ test/cpp_test_repo/C/TypeTraits.h | 9 + test/cpp_test_repo/C/Utility.cpp | 76 +++++++ test/cpp_test_repo/C/Utility.h | 187 ++++++++++++++++++ test/cpp_test_repo/C/main.cpp | 13 ++ test/cpp_test_repo/C/stdafx.cpp | 7 + test/cpp_test_repo/C/stdafx.h | 39 ++++ test/cpp_test_repo/D/Exceptions.h | 60 ++++++ test/cpp_test_repo/D/TextFileParsers.cpp | 160 +++++++++++++++ test/cpp_test_repo/D/TextFileParsers.h | 121 ++++++++++++ test/cpp_test_repo/D/TypeTraits.h | 9 + test/cpp_test_repo/D/Utility.cpp | 76 +++++++ test/cpp_test_repo/D/Utility.h | 187 ++++++++++++++++++ test/cpp_test_repo/D/main.cpp | 14 ++ test/cpp_test_repo/D/stdafx.cpp | 7 + test/cpp_test_repo/D/stdafx.h | 39 ++++ test/cpp_test_repo/cg.dot | 3 + 39 files changed, 2849 insertions(+) create mode 100644 test/cpp_test_repo/A/Exceptions.h create mode 100644 test/cpp_test_repo/A/TextFileParsers.cpp create mode 100644 test/cpp_test_repo/A/TextFileParsers.h create mode 100644 test/cpp_test_repo/A/TypeTraits.h create mode 100644 test/cpp_test_repo/A/Utility.cpp create mode 100644 test/cpp_test_repo/A/Utility.h create mode 100644 test/cpp_test_repo/A/main.cpp create mode 100644 test/cpp_test_repo/A/stdafx.cpp create mode 100644 test/cpp_test_repo/A/stdafx.h create mode 100644 test/cpp_test_repo/B/Exceptions.h create mode 100644 test/cpp_test_repo/B/TextFileParsers.cpp create mode 100644 test/cpp_test_repo/B/TextFileParsers.h create mode 100644 test/cpp_test_repo/B/TypeTraits.h create mode 100644 test/cpp_test_repo/B/Utility.cpp create mode 100644 test/cpp_test_repo/B/Utility.h create mode 100644 test/cpp_test_repo/B/main.cpp create mode 100644 test/cpp_test_repo/B/stdafx.cpp create mode 100644 test/cpp_test_repo/B/stdafx.h create mode 100644 test/cpp_test_repo/C/CppProject1.vcxproj create mode 100644 test/cpp_test_repo/C/CppProject1.vcxproj.filters create mode 100644 test/cpp_test_repo/C/Exceptions.h create mode 100644 test/cpp_test_repo/C/TextFileParsers.cpp create mode 100644 test/cpp_test_repo/C/TextFileParsers.h create mode 100644 test/cpp_test_repo/C/TypeTraits.h create mode 100644 test/cpp_test_repo/C/Utility.cpp create mode 100644 test/cpp_test_repo/C/Utility.h create mode 100644 test/cpp_test_repo/C/main.cpp create mode 100644 test/cpp_test_repo/C/stdafx.cpp create mode 100644 test/cpp_test_repo/C/stdafx.h create mode 100644 test/cpp_test_repo/D/Exceptions.h create mode 100644 test/cpp_test_repo/D/TextFileParsers.cpp create mode 100644 test/cpp_test_repo/D/TextFileParsers.h create mode 100644 test/cpp_test_repo/D/TypeTraits.h create mode 100644 test/cpp_test_repo/D/Utility.cpp create mode 100644 test/cpp_test_repo/D/Utility.h create mode 100644 test/cpp_test_repo/D/main.cpp create mode 100644 test/cpp_test_repo/D/stdafx.cpp create mode 100644 test/cpp_test_repo/D/stdafx.h create mode 100644 test/cpp_test_repo/cg.dot diff --git a/test/cpp_test_repo/A/Exceptions.h b/test/cpp_test_repo/A/Exceptions.h new file mode 100644 index 00000000000..7b73c769e97 --- /dev/null +++ b/test/cpp_test_repo/A/Exceptions.h @@ -0,0 +1,60 @@ +#pragma once + +#include +#include +#include + +// ????????????Ă»???????????? +class Exception : public std::exception +{ + std::string msg; +public: + // ??????????????????????? + Exception() : msg("?????????????") {} + // ??????????????????????????? + explicit Exception(const std::string& message) : msg(message) + { +#if _DEBUG + std::cerr << "Exception constructed: " << message << std::endl; +#endif + } + ~Exception() noexcept override { } + const char* what() const noexcept override { return msg.c_str(); } +}; + +// ????????????????????????????????????????? +class InvalidCastException : public Exception +{ +public: + InvalidCastException(const std::string& message) : Exception(message) { } +}; + +// ????????????????????????? +class ArgumentException : public Exception +{ + static std::string BuildMessage(const std::string& message, const std::string& argumentName) + { + if (argumentName.empty()) return message; + return message + " ????????" + argumentName + "??"; + } + std::string _ArgumentName; +public: + const std::string& ArgumentName() const { return _ArgumentName; } +public: + ArgumentException(const std::string& message) + : Exception(message) { } + ArgumentException(const std::string& message, const std::string& argumentName) + : Exception(BuildMessage(message, argumentName)), _ArgumentName(argumentName) { } +}; + +// ????????????????????????????????????????? +class OperationFailureException : public Exception +{ +private: + int _ErrorCode; +public: + explicit OperationFailureException(int errorCode) + : Exception("???????????????" + std::to_string(errorCode) + "??"), _ErrorCode(errorCode) + { + } +}; diff --git a/test/cpp_test_repo/A/TextFileParsers.cpp b/test/cpp_test_repo/A/TextFileParsers.cpp new file mode 100644 index 00000000000..7eebc791e10 --- /dev/null +++ b/test/cpp_test_repo/A/TextFileParsers.cpp @@ -0,0 +1,160 @@ +#include "stdafx.h" +#include "TextFileParsers.h" + +using namespace std; +using namespace filesystem; + +RowReader& operator>>(RowReader& reader, string& rhs) +{ + // ĘąÓĂÖƱí·ű·Ö¸ôˇŁ + getline(reader.ss, rhs, reader.delim); + // ČĄłý×óÓŇÁ˝˛ŕµÄżŐ°×ˇŁ + if (!reader.keepWhitespace) + { + // Left trim + auto wsEndsAt = find_if(rhs.begin(), rhs.end(), [](char c) {return c < 0 || !isspace(c); }); + rhs.erase(rhs.begin(), wsEndsAt); + // Right trim + auto wsStartsAt = find_if(rhs.rbegin(), rhs.rend(), [](char c) {return c < 0 || !isspace(c); }); + rhs.erase(rhs.rbegin().base(), rhs.end()); + } + return reader; +} + +RowReader& operator>>(RowReader& reader, int& rhs) +{ + string buffer{}; + if (reader >> buffer) rhs = stoi(buffer); + return reader; +} + +RowReader& operator>>(RowReader& reader, long& rhs) +{ + string buffer{}; + if (reader >> buffer) rhs = stol(buffer); + return reader; +} + +RowReader& operator>>(RowReader& reader, float& rhs) +{ + string buffer{}; + if (reader >> buffer) rhs = stof(buffer); + return reader; +} + +RowReader& operator>>(RowReader& reader, double& rhs) +{ + string buffer{}; + if (reader >> buffer) rhs = stod(buffer); + return reader; +} + +RowReader& operator>>(RowReader& reader, bool& rhs) +{ + string buffer{}; + if (reader >> buffer) + { + if (Equal(buffer, "true", StringComparison::IgnoureCase | StringComparison::IgnoreSurroudingWhiteSpaces)) + rhs = true; + else if (Equal(buffer, "false", StringComparison::IgnoureCase | StringComparison::IgnoreSurroudingWhiteSpaces)) + rhs = true; + else + { + try + { + auto value = stoi(buffer); + rhs = (value != 0); + } + catch (const exception&) + { + throw invalid_argument("Cannot convert to bool."); + } + } + } + return reader; +} + +void ConfigurationParser::Load(istream& inputStream) +{ + string buffer{}; + stringstream ss{}; + size_t lineNumber = 0; + while (getline(inputStream, buffer)) + { + lineNumber++; + ss.clear(); + ss.str(buffer); + string key{}; + char ch; + if (!(ss >> key)) continue; + if (key[0] == '#') continue; + if (!(ss >> ch) || ch != '=') + throw Exception("ÎŢЧµÄĹäÖĂĐСŁĆÚÍűŁşˇ°=ˇ±ˇŁĐĐŁş" + to_string(lineNumber) + "ˇŁ"); + string value{}; + if (!(ss >> value)) + throw Exception("ÎŢЧµÄĹäÖĂĐСŁĆÚÍűŁşĹäÖĂÖµˇŁĐĐŁş" + to_string(lineNumber) + "ˇŁ"); + // ISSUE ĿǰĹäÖĂÖµÖв»ÄÜ°üş¬żŐ¸ńŁ¬·ńÔň»áÔÚżŐ¸ń´¦˝Ř¶ĎˇŁ + entries[key] = value; + } +} + +std::string ConfigurationParser::GetString(const std::string& key, const std::string& defaultValue) const +{ + auto v = entries.find(key); + if (v == entries.end()) return defaultValue; + return v->second; +} + +int ConfigurationParser::GetInt(const std::string& key, int defaultValue) const +{ + auto v = entries.find(key); + if (v == entries.end()) return defaultValue; + try + { + return stoi(v->second); + } catch (const exception&) + { + throw_with_nested(Exception("ÎŢ·¨˝«ĹäÖá°" + key + "ˇ±Öµ×Ş»»ÎŞintˇŁ")); + } +} + +double ConfigurationParser::GetDouble(const std::string& key, double defaultValue) const +{ + auto v = entries.find(key); + if (v == entries.end()) return defaultValue; + try + { + return stod(v->second); + } catch (const exception&) + { + throw_with_nested(Exception("ÎŢ·¨˝«ĹäÖá°" + key + "ˇ±Öµ×Ş»»ÎŞdoubleˇŁ")); + } +} + +bool ConfigurationParser::GetBool(const std::string& key, bool defaultValue) const +{ + auto v = entries.find(key); + if (v == entries.end()) return defaultValue; + if (Equal(v->second, "true", StringComparison::IgnoureCase | StringComparison::IgnoreSurroudingWhiteSpaces)) + return true; + else if (Equal(v->second, "false", StringComparison::IgnoureCase | StringComparison::IgnoreSurroudingWhiteSpaces)) + return false; + try + { + return stod(v->second); + } catch (const exception&) + { + throw_with_nested(Exception("ÎŢ·¨˝«ĹäÖá°" + key + "ˇ±Öµ×Ş»»ÎŞboolˇŁ")); + } +} + +ConfigurationParser::ConfigurationParser(istream& inputStream) : entries() +{ + Load(inputStream); +} + +ConfigurationParser::ConfigurationParser(path filePath) : entries() +{ + auto ifs = OpenAndValidate(filePath); + Load(ifs); +} diff --git a/test/cpp_test_repo/A/TextFileParsers.h b/test/cpp_test_repo/A/TextFileParsers.h new file mode 100644 index 00000000000..73952b0deda --- /dev/null +++ b/test/cpp_test_repo/A/TextFileParsers.h @@ -0,0 +1,121 @@ +#pragma once +#include +#include "Utility.h" + +// ÓĂÓÚ´ÓĘäČëÁ÷ÖĐĚáȡһĐĐŁ¬˛˘ĘäłöˇŁ +class RowReader +{ +private: + std::stringstream ss; + char delim; + bool keepWhitespace; + std::size_t _LineNumber; +public: + size_t LineNumber() const { return _LineNumber; } + void ResetLineNumber() { _LineNumber = 0; } + const std::stringstream& LineStream() const { return ss; } +public: + operator bool() const + { + return bool(ss); + } + bool operator !() const + { + return !ss; + } + template + friend TStream& operator>>(TStream& s, RowReader& reader); + friend RowReader& operator>>(RowReader& reader, std::string& rhs); +public: + /** + * \brief + * \param delim ÁĐ·Ö¸ô·ű + */ + explicit RowReader(bool keepWhitespace = false, char delim = '\t') : ss(), delim(delim), keepWhitespace(keepWhitespace), _LineNumber(0) + { + } +}; + +// ´ÓĘäČëÁ÷ÖжÁČëŇ»ĐĐ·ÇżŐ·ÇעĘÍĐСŁ +template +TStream& operator>>(TStream& s, RowReader& reader) +{ + std::string buffer{}; + while (getline(s, buffer)) + { + reader._LineNumber++; + // Ľě˛é´ËĐĐĘÇ·ńΪעĘ͡Ł + // status + // 0 start/×ó˛ŕżŐ°× + // 1 # + // 2 ĆäËű×Ö·ű + char status = 0; + for (auto& c : buffer) + { + switch (status) + { + case 0: + if (c == '#') + { + status = 1; + goto CHECK_STATUS; + } + if (c < 0 || !isspace(c)) + { + status = 2; + goto CHECK_STATUS; + } + break; + default: + assert(false); + break; + } + } + CHECK_STATUS: + switch (status) + { + case 0: + // żŐ°×ĐĐ + break; + case 1: + // עĘÍĐĐ + break; + case 2: + goto SET_RESULT; + default: + assert(false); + break; + } + } +SET_RESULT: + reader.ss.str(buffer); + reader.ss.clear(); + return s; +} + +RowReader& operator>>(RowReader& reader, std::string& rhs); + +RowReader& operator>>(RowReader& reader, int& rhs); + +RowReader& operator>>(RowReader& reader, long& rhs); + +RowReader& operator>>(RowReader& reader, float& rhs); + +RowReader& operator>>(RowReader& reader, double& rhs); + +RowReader& operator>>(RowReader& reader, bool& rhs); + +class ConfigurationParser +{ +private: + std::unordered_map entries; + void Load(std::istream& inputStream); +public: + std::string GetString(const std::string& key, const std::string& defaultValue) const; + int GetInt(const std::string& key, int defaultValue) const; + double GetDouble(const std::string& key, double defaultValue) const; + bool GetBool(const std::string& key, bool defaultValue) const; +public: + ConfigurationParser(std::istream& inputStream); + ConfigurationParser(std::filesystem::path filePath); +}; \ No newline at end of file diff --git a/test/cpp_test_repo/A/TypeTraits.h b/test/cpp_test_repo/A/TypeTraits.h new file mode 100644 index 00000000000..3169ac711d7 --- /dev/null +++ b/test/cpp_test_repo/A/TypeTraits.h @@ -0,0 +1,9 @@ +#pragma once +#include + +template +struct is_flags : std::false_type +{ +}; + +template constexpr bool is_flags_v = is_flags::value; diff --git a/test/cpp_test_repo/A/Utility.cpp b/test/cpp_test_repo/A/Utility.cpp new file mode 100644 index 00000000000..e043e34a718 --- /dev/null +++ b/test/cpp_test_repo/A/Utility.cpp @@ -0,0 +1,76 @@ +#include "stdafx.h" +#include "Utility.h" + +using namespace std; + +#define _DECLARE_ENUM(TYPE, MEMBER) case TYPE::MEMBER : return #MEMBER; +#define _DECLARE_ENUM_DEFAULT(TYPE) default : return string(#TYPE) + "::" + to_string((long)v); + +bool Equal(const string& lhs, const string& rhs, StringComparison comparision) +{ + if (&lhs == &rhs) return true; + size_t pos1 = 0, pos2 = 0; + size_t pos1r = lhs.size(), pos2r = rhs.size(); + if ((comparision & StringComparison::IgnoreSurroudingWhiteSpaces) + == StringComparison::IgnoreSurroudingWhiteSpaces) + { + while (pos1 < lhs.size() && isspace(lhs[pos1])) pos1++; + while (pos2 < lhs.size() && isspace(lhs[pos2])) pos2++; + while (pos1 > 0 && isspace(lhs[pos1 - 1])) pos1--; + while (pos2 > 0 && isspace(lhs[pos2 - 1])) pos2--; + } + if (pos1r - pos1 != pos2r - pos2) return false; + auto ignoreCase = (comparision & StringComparison::IgnoureCase) == StringComparison::IgnoureCase; + while (pos1 < pos1r) + { + if (ignoreCase) + { + if (tolower(lhs[pos1]) != tolower(rhs[pos1])) return false; + } else + { + if (lhs[pos1] != rhs[pos1]) return false; + } + pos1++; + pos2++; + } + return true; +} + +bool Confirm(const std::string& prompt) +{ + cout << prompt << " (Y/N)> " << flush; + while (true) + { + string buffer; + getline(cin, buffer); + stringstream ss(buffer); + if (ss >> buffer) + { + transform(buffer.begin(), buffer.end(), buffer.begin(), [](char c) {return tolower(c); }); + if (buffer == "y" || buffer == "yes") return true; + if (buffer == "n" || buffer == "no") return false; + } + cout << "ÎŢЧµÄĘäČ롣> " << flush; + } +} + +void ReportException(const exception& ex, int level) +{ + if (level > 0) + { + cerr << "<-"; + for (int i = 0; i < level; i++) cerr << '-'; + cerr << ' '; + } + cerr << "[" << typeid(ex).name() << "] " << ex.what() << endl; + try { + rethrow_if_nested(ex); + } + catch (const exception& subEx) { + ReportException(subEx, level + 1); + } + catch (...) + { + cerr << "[Unknown Exception]" << endl; + } +} diff --git a/test/cpp_test_repo/A/Utility.h b/test/cpp_test_repo/A/Utility.h new file mode 100644 index 00000000000..f618f0d1804 --- /dev/null +++ b/test/cpp_test_repo/A/Utility.h @@ -0,0 +1,187 @@ +#pragma once + +#include +#include "Exceptions.h" +#include +#include "TypeTraits.h" + +#define ANSI_COLOR_RED "\x1b[31m" +#define ANSI_COLOR_GREEN "\x1b[32m" +#define ANSI_COLOR_YELLOW "\x1b[33m" +#define ANSI_COLOR_BLUE "\x1b[34m" +#define ANSI_COLOR_MAGENTA "\x1b[35m" +#define ANSI_COLOR_CYAN "\x1b[36m" + +#define ANSI_COLOR_BRIGHT "\x1b[1m" +#define ANSI_COLOR_RESET "\x1b[0m" + +namespace std { + class type_index; +} + +template +bool dynamic_kind_of(const TSrc* obj) +{ + return dynamic_cast(obj) != nullptr; +} + +template +bool pointer_kind_of(const std::shared_ptr obj) +{ + return std::dynamic_pointer_cast(obj) != nullptr; +} + +template +TDest safe_cast(TSrc obj) +{ + if (obj == nullptr) return nullptr; + auto p = dynamic_cast(obj); + if (p == nullptr) throw InvalidCastException("ָ��������ʱָ������ת������Ч�ġ�"); + return p; +} + +template +std::shared_ptr safe_pointer_cast(const std::shared_ptr& obj) +{ + if (obj == nullptr) return std::shared_ptr(); + auto p = std::dynamic_pointer_cast(obj); + if (p == nullptr) throw InvalidCastException("ָ��������ʱָ������ת������Ч�ġ�"); + return p; +} + +template +std::string StreamStatusToString(const TStream& stream) +{ + std::string status = stream.good() ? "good " : ""; + if (stream.eof()) status += "eof "; + if (stream.bad()) status += "bad "; + if (stream.fail()) status += "fail "; + return status; +} + +template +TStream OpenAndValidate(const TPath arg1) +{ + auto fs = TStream(arg1); + if (!fs) { + std::stringstream ss; + ss << "���Դ��ļ�" << arg1 << "ʱ��������" << StreamStatusToString(fs); + throw Exception(ss.str()); + } + return fs; +} + +// �����״̬����ȷ�ԡ��������ȷ����������쳣�� +template +void ValidateStream(const TStream& stream) +{ + if (!stream) { + std::stringstream ss; + ss << "��״̬����" << StreamStatusToString(stream); + throw Exception(ss.str()); + } +} + +// ���ڽ����� map::equal_range �Ⱥ����ķ���ֵת��Ϊ�ɱ� foreach �﷨���ܵĽṹ�� +template +class _RangeToEnumerable +{ + std::pair _Range; +public: + TIterator begin() { return _Range.first; } + TIterator end() { return _Range.second; } + bool empty() { return _Range.first == _Range.second; } + _RangeToEnumerable(const std::pair range) + : _Range(range) + { + + } +}; + +template +_RangeToEnumerable RangeToEnumerable(const std::pair range) +{ + return _RangeToEnumerable(range); +} + +inline std::string to_string(const std::pair& value) +{ + return "[" + value.first + ", " + value.second + "]"; +} + +enum class StringComparison +{ + None = 0, + IgnoreSurroudingWhiteSpaces, + IgnoureCase, +}; + +template<> +struct is_flags : std::true_type +{ + +}; + +bool Equal(const std::string& lhs, const std::string& rhs, StringComparison comparision = StringComparison::None); + +// �������÷�Χö�ٵİ�λ���� +template, int> = 0> +TEnum operator & (TEnum lhs, TEnum rhs) +{ + using T = std::underlying_type_t; + return static_cast(static_cast(lhs) & static_cast(rhs)); +} + +template, int> = 0> +TEnum operator | (TEnum lhs, TEnum rhs) +{ + using T = std::underlying_type_t; + return static_cast(static_cast(lhs) | static_cast(rhs)); +} + +#define _RE_TRACE(iosExpr) //std::cout << "Trace:" << iosExpr << std::endl; + +bool Confirm(const std::string& prompt); + +struct ReliabilityNetworkEntry; +const char* FriendlyNameOf(const std::type_index& type); +const char* FriendlyNameOf(const type_info& type); +const char* FriendlyNameOf(const ReliabilityNetworkEntry& instance); +template +const char* FriendlyNameOf() +{ + return FriendlyNameOf(typeid(T)); +} + +// ��RAII�����ڵ��û������뿪ijһ�����ʱ���Զ�ִ��ijЩ�û�����������߼��� +// �÷��� +// ����Ҫ�����߼��Ĵ������ʹ�� +// BlockExitHandler cleanupHandler(....); +// ���ɡ� +// ע�⣺ +// ��Ҫ�������ͷ������������������ֶΡ� +// ��Ҫ�������Ͷ���Ϊ������������Ϊ�ᱻ�������Ż����� +class BlockExitHandler +{ + std::function handler; +public: + explicit BlockExitHandler(const std::function& handler) : handler(handler) + { + + } + BlockExitHandler(const BlockExitHandler&) = delete; + BlockExitHandler& operator=(const BlockExitHandler&) = delete; + ~BlockExitHandler() + { + try + { + handler(); + } catch (std::exception& e) + { + // �����������������쳣�� + std::cout << "BlockExitHandler: " << e.what() << std::endl; + } + } +}; + +void ReportException(const std::exception& ex, int level = 0); diff --git a/test/cpp_test_repo/A/main.cpp b/test/cpp_test_repo/A/main.cpp new file mode 100644 index 00000000000..df9b79355fd --- /dev/null +++ b/test/cpp_test_repo/A/main.cpp @@ -0,0 +1,13 @@ +#include "stdafx.h" +#include "TextFileParsers.h" +#include "Utility.h" + +using namespace std; + +int main(int argc, char* argv[]) +{ + auto ifs = OpenAndValidate("config.txt"); + auto parser = ConfigurationParser(ifs); + cout << parser.GetBool("testBool", false) << endl; + return 0; +} diff --git a/test/cpp_test_repo/A/stdafx.cpp b/test/cpp_test_repo/A/stdafx.cpp new file mode 100644 index 00000000000..1681a386b77 --- /dev/null +++ b/test/cpp_test_repo/A/stdafx.cpp @@ -0,0 +1,7 @@ +// stdafx.cpp : Ö»°üŔ¨±ę׼°üş¬ÎÄĽţµÄÔ´ÎÄĽţ +// stdafx.obj ˝«°üş¬Ô¤±ŕŇëŔŕĐÍĐĹϢ + +#include "stdafx.h" + +// TODO: ÔÚ STDAFX.H ÖĐŇýÓĂČÎşÎËůĐčµÄ¸˝ĽÓÍ·ÎÄĽţŁ¬ +//¶ř˛»ĘÇÔÚ´ËÎÄĽţÖĐŇýÓĂ diff --git a/test/cpp_test_repo/A/stdafx.h b/test/cpp_test_repo/A/stdafx.h new file mode 100644 index 00000000000..733498297e0 --- /dev/null +++ b/test/cpp_test_repo/A/stdafx.h @@ -0,0 +1,39 @@ +// stdafx.h : ±ę׼ϵͳ°üş¬ÎÄĽţµÄ°üş¬ÎÄĽţŁ¬ +// »ňĘÇľ­łŁĘąÓõ«˛»łŁ¸ü¸ÄµÄ +// Ěض¨ÓÚĎîÄżµÄ°üş¬ÎÄĽţ +// + +#pragma once + +// TODO: ÔÚ´Ë´¦ŇýÓĂłĚĐňĐčŇŞµÄĆäËűÍ·ÎÄĽţ +// ĘĘÓĂÓÚČí˛âĘąÓĂ +//#define _DOWNGRADED_DEMO +#define _SILENCE_STDEXT_ALLOCATORS_DEPRECATION_WARNING + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +// ˝«ĘµŃéĐ﵀ filesystem ĂüĂűżŐĽäµĽČë std ÖСŁŁ¨2016Ł© +namespace std +{ + namespace filesystem = experimental::filesystem::v1; +} + + +using complexd = std::complex; \ No newline at end of file diff --git a/test/cpp_test_repo/B/Exceptions.h b/test/cpp_test_repo/B/Exceptions.h new file mode 100644 index 00000000000..7b73c769e97 --- /dev/null +++ b/test/cpp_test_repo/B/Exceptions.h @@ -0,0 +1,60 @@ +#pragma once + +#include +#include +#include + +// ????????????Ă»???????????? +class Exception : public std::exception +{ + std::string msg; +public: + // ??????????????????????? + Exception() : msg("?????????????") {} + // ??????????????????????????? + explicit Exception(const std::string& message) : msg(message) + { +#if _DEBUG + std::cerr << "Exception constructed: " << message << std::endl; +#endif + } + ~Exception() noexcept override { } + const char* what() const noexcept override { return msg.c_str(); } +}; + +// ????????????????????????????????????????? +class InvalidCastException : public Exception +{ +public: + InvalidCastException(const std::string& message) : Exception(message) { } +}; + +// ????????????????????????? +class ArgumentException : public Exception +{ + static std::string BuildMessage(const std::string& message, const std::string& argumentName) + { + if (argumentName.empty()) return message; + return message + " ????????" + argumentName + "??"; + } + std::string _ArgumentName; +public: + const std::string& ArgumentName() const { return _ArgumentName; } +public: + ArgumentException(const std::string& message) + : Exception(message) { } + ArgumentException(const std::string& message, const std::string& argumentName) + : Exception(BuildMessage(message, argumentName)), _ArgumentName(argumentName) { } +}; + +// ????????????????????????????????????????? +class OperationFailureException : public Exception +{ +private: + int _ErrorCode; +public: + explicit OperationFailureException(int errorCode) + : Exception("???????????????" + std::to_string(errorCode) + "??"), _ErrorCode(errorCode) + { + } +}; diff --git a/test/cpp_test_repo/B/TextFileParsers.cpp b/test/cpp_test_repo/B/TextFileParsers.cpp new file mode 100644 index 00000000000..7eebc791e10 --- /dev/null +++ b/test/cpp_test_repo/B/TextFileParsers.cpp @@ -0,0 +1,160 @@ +#include "stdafx.h" +#include "TextFileParsers.h" + +using namespace std; +using namespace filesystem; + +RowReader& operator>>(RowReader& reader, string& rhs) +{ + // ĘąÓĂÖƱí·ű·Ö¸ôˇŁ + getline(reader.ss, rhs, reader.delim); + // ČĄłý×óÓŇÁ˝˛ŕµÄżŐ°×ˇŁ + if (!reader.keepWhitespace) + { + // Left trim + auto wsEndsAt = find_if(rhs.begin(), rhs.end(), [](char c) {return c < 0 || !isspace(c); }); + rhs.erase(rhs.begin(), wsEndsAt); + // Right trim + auto wsStartsAt = find_if(rhs.rbegin(), rhs.rend(), [](char c) {return c < 0 || !isspace(c); }); + rhs.erase(rhs.rbegin().base(), rhs.end()); + } + return reader; +} + +RowReader& operator>>(RowReader& reader, int& rhs) +{ + string buffer{}; + if (reader >> buffer) rhs = stoi(buffer); + return reader; +} + +RowReader& operator>>(RowReader& reader, long& rhs) +{ + string buffer{}; + if (reader >> buffer) rhs = stol(buffer); + return reader; +} + +RowReader& operator>>(RowReader& reader, float& rhs) +{ + string buffer{}; + if (reader >> buffer) rhs = stof(buffer); + return reader; +} + +RowReader& operator>>(RowReader& reader, double& rhs) +{ + string buffer{}; + if (reader >> buffer) rhs = stod(buffer); + return reader; +} + +RowReader& operator>>(RowReader& reader, bool& rhs) +{ + string buffer{}; + if (reader >> buffer) + { + if (Equal(buffer, "true", StringComparison::IgnoureCase | StringComparison::IgnoreSurroudingWhiteSpaces)) + rhs = true; + else if (Equal(buffer, "false", StringComparison::IgnoureCase | StringComparison::IgnoreSurroudingWhiteSpaces)) + rhs = true; + else + { + try + { + auto value = stoi(buffer); + rhs = (value != 0); + } + catch (const exception&) + { + throw invalid_argument("Cannot convert to bool."); + } + } + } + return reader; +} + +void ConfigurationParser::Load(istream& inputStream) +{ + string buffer{}; + stringstream ss{}; + size_t lineNumber = 0; + while (getline(inputStream, buffer)) + { + lineNumber++; + ss.clear(); + ss.str(buffer); + string key{}; + char ch; + if (!(ss >> key)) continue; + if (key[0] == '#') continue; + if (!(ss >> ch) || ch != '=') + throw Exception("ÎŢЧµÄĹäÖĂĐСŁĆÚÍűŁşˇ°=ˇ±ˇŁĐĐŁş" + to_string(lineNumber) + "ˇŁ"); + string value{}; + if (!(ss >> value)) + throw Exception("ÎŢЧµÄĹäÖĂĐСŁĆÚÍűŁşĹäÖĂÖµˇŁĐĐŁş" + to_string(lineNumber) + "ˇŁ"); + // ISSUE ĿǰĹäÖĂÖµÖв»ÄÜ°üş¬żŐ¸ńŁ¬·ńÔň»áÔÚżŐ¸ń´¦˝Ř¶ĎˇŁ + entries[key] = value; + } +} + +std::string ConfigurationParser::GetString(const std::string& key, const std::string& defaultValue) const +{ + auto v = entries.find(key); + if (v == entries.end()) return defaultValue; + return v->second; +} + +int ConfigurationParser::GetInt(const std::string& key, int defaultValue) const +{ + auto v = entries.find(key); + if (v == entries.end()) return defaultValue; + try + { + return stoi(v->second); + } catch (const exception&) + { + throw_with_nested(Exception("ÎŢ·¨˝«ĹäÖá°" + key + "ˇ±Öµ×Ş»»ÎŞintˇŁ")); + } +} + +double ConfigurationParser::GetDouble(const std::string& key, double defaultValue) const +{ + auto v = entries.find(key); + if (v == entries.end()) return defaultValue; + try + { + return stod(v->second); + } catch (const exception&) + { + throw_with_nested(Exception("ÎŢ·¨˝«ĹäÖá°" + key + "ˇ±Öµ×Ş»»ÎŞdoubleˇŁ")); + } +} + +bool ConfigurationParser::GetBool(const std::string& key, bool defaultValue) const +{ + auto v = entries.find(key); + if (v == entries.end()) return defaultValue; + if (Equal(v->second, "true", StringComparison::IgnoureCase | StringComparison::IgnoreSurroudingWhiteSpaces)) + return true; + else if (Equal(v->second, "false", StringComparison::IgnoureCase | StringComparison::IgnoreSurroudingWhiteSpaces)) + return false; + try + { + return stod(v->second); + } catch (const exception&) + { + throw_with_nested(Exception("ÎŢ·¨˝«ĹäÖá°" + key + "ˇ±Öµ×Ş»»ÎŞboolˇŁ")); + } +} + +ConfigurationParser::ConfigurationParser(istream& inputStream) : entries() +{ + Load(inputStream); +} + +ConfigurationParser::ConfigurationParser(path filePath) : entries() +{ + auto ifs = OpenAndValidate(filePath); + Load(ifs); +} diff --git a/test/cpp_test_repo/B/TextFileParsers.h b/test/cpp_test_repo/B/TextFileParsers.h new file mode 100644 index 00000000000..73952b0deda --- /dev/null +++ b/test/cpp_test_repo/B/TextFileParsers.h @@ -0,0 +1,121 @@ +#pragma once +#include +#include "Utility.h" + +// ÓĂÓÚ´ÓĘäČëÁ÷ÖĐĚáȡһĐĐŁ¬˛˘ĘäłöˇŁ +class RowReader +{ +private: + std::stringstream ss; + char delim; + bool keepWhitespace; + std::size_t _LineNumber; +public: + size_t LineNumber() const { return _LineNumber; } + void ResetLineNumber() { _LineNumber = 0; } + const std::stringstream& LineStream() const { return ss; } +public: + operator bool() const + { + return bool(ss); + } + bool operator !() const + { + return !ss; + } + template + friend TStream& operator>>(TStream& s, RowReader& reader); + friend RowReader& operator>>(RowReader& reader, std::string& rhs); +public: + /** + * \brief + * \param delim ÁĐ·Ö¸ô·ű + */ + explicit RowReader(bool keepWhitespace = false, char delim = '\t') : ss(), delim(delim), keepWhitespace(keepWhitespace), _LineNumber(0) + { + } +}; + +// ´ÓĘäČëÁ÷ÖжÁČëŇ»ĐĐ·ÇżŐ·ÇעĘÍĐСŁ +template +TStream& operator>>(TStream& s, RowReader& reader) +{ + std::string buffer{}; + while (getline(s, buffer)) + { + reader._LineNumber++; + // Ľě˛é´ËĐĐĘÇ·ńΪעĘ͡Ł + // status + // 0 start/×ó˛ŕżŐ°× + // 1 # + // 2 ĆäËű×Ö·ű + char status = 0; + for (auto& c : buffer) + { + switch (status) + { + case 0: + if (c == '#') + { + status = 1; + goto CHECK_STATUS; + } + if (c < 0 || !isspace(c)) + { + status = 2; + goto CHECK_STATUS; + } + break; + default: + assert(false); + break; + } + } + CHECK_STATUS: + switch (status) + { + case 0: + // żŐ°×ĐĐ + break; + case 1: + // עĘÍĐĐ + break; + case 2: + goto SET_RESULT; + default: + assert(false); + break; + } + } +SET_RESULT: + reader.ss.str(buffer); + reader.ss.clear(); + return s; +} + +RowReader& operator>>(RowReader& reader, std::string& rhs); + +RowReader& operator>>(RowReader& reader, int& rhs); + +RowReader& operator>>(RowReader& reader, long& rhs); + +RowReader& operator>>(RowReader& reader, float& rhs); + +RowReader& operator>>(RowReader& reader, double& rhs); + +RowReader& operator>>(RowReader& reader, bool& rhs); + +class ConfigurationParser +{ +private: + std::unordered_map entries; + void Load(std::istream& inputStream); +public: + std::string GetString(const std::string& key, const std::string& defaultValue) const; + int GetInt(const std::string& key, int defaultValue) const; + double GetDouble(const std::string& key, double defaultValue) const; + bool GetBool(const std::string& key, bool defaultValue) const; +public: + ConfigurationParser(std::istream& inputStream); + ConfigurationParser(std::filesystem::path filePath); +}; \ No newline at end of file diff --git a/test/cpp_test_repo/B/TypeTraits.h b/test/cpp_test_repo/B/TypeTraits.h new file mode 100644 index 00000000000..3169ac711d7 --- /dev/null +++ b/test/cpp_test_repo/B/TypeTraits.h @@ -0,0 +1,9 @@ +#pragma once +#include + +template +struct is_flags : std::false_type +{ +}; + +template constexpr bool is_flags_v = is_flags::value; diff --git a/test/cpp_test_repo/B/Utility.cpp b/test/cpp_test_repo/B/Utility.cpp new file mode 100644 index 00000000000..e043e34a718 --- /dev/null +++ b/test/cpp_test_repo/B/Utility.cpp @@ -0,0 +1,76 @@ +#include "stdafx.h" +#include "Utility.h" + +using namespace std; + +#define _DECLARE_ENUM(TYPE, MEMBER) case TYPE::MEMBER : return #MEMBER; +#define _DECLARE_ENUM_DEFAULT(TYPE) default : return string(#TYPE) + "::" + to_string((long)v); + +bool Equal(const string& lhs, const string& rhs, StringComparison comparision) +{ + if (&lhs == &rhs) return true; + size_t pos1 = 0, pos2 = 0; + size_t pos1r = lhs.size(), pos2r = rhs.size(); + if ((comparision & StringComparison::IgnoreSurroudingWhiteSpaces) + == StringComparison::IgnoreSurroudingWhiteSpaces) + { + while (pos1 < lhs.size() && isspace(lhs[pos1])) pos1++; + while (pos2 < lhs.size() && isspace(lhs[pos2])) pos2++; + while (pos1 > 0 && isspace(lhs[pos1 - 1])) pos1--; + while (pos2 > 0 && isspace(lhs[pos2 - 1])) pos2--; + } + if (pos1r - pos1 != pos2r - pos2) return false; + auto ignoreCase = (comparision & StringComparison::IgnoureCase) == StringComparison::IgnoureCase; + while (pos1 < pos1r) + { + if (ignoreCase) + { + if (tolower(lhs[pos1]) != tolower(rhs[pos1])) return false; + } else + { + if (lhs[pos1] != rhs[pos1]) return false; + } + pos1++; + pos2++; + } + return true; +} + +bool Confirm(const std::string& prompt) +{ + cout << prompt << " (Y/N)> " << flush; + while (true) + { + string buffer; + getline(cin, buffer); + stringstream ss(buffer); + if (ss >> buffer) + { + transform(buffer.begin(), buffer.end(), buffer.begin(), [](char c) {return tolower(c); }); + if (buffer == "y" || buffer == "yes") return true; + if (buffer == "n" || buffer == "no") return false; + } + cout << "ÎŢЧµÄĘäČ롣> " << flush; + } +} + +void ReportException(const exception& ex, int level) +{ + if (level > 0) + { + cerr << "<-"; + for (int i = 0; i < level; i++) cerr << '-'; + cerr << ' '; + } + cerr << "[" << typeid(ex).name() << "] " << ex.what() << endl; + try { + rethrow_if_nested(ex); + } + catch (const exception& subEx) { + ReportException(subEx, level + 1); + } + catch (...) + { + cerr << "[Unknown Exception]" << endl; + } +} diff --git a/test/cpp_test_repo/B/Utility.h b/test/cpp_test_repo/B/Utility.h new file mode 100644 index 00000000000..f618f0d1804 --- /dev/null +++ b/test/cpp_test_repo/B/Utility.h @@ -0,0 +1,187 @@ +#pragma once + +#include +#include "Exceptions.h" +#include +#include "TypeTraits.h" + +#define ANSI_COLOR_RED "\x1b[31m" +#define ANSI_COLOR_GREEN "\x1b[32m" +#define ANSI_COLOR_YELLOW "\x1b[33m" +#define ANSI_COLOR_BLUE "\x1b[34m" +#define ANSI_COLOR_MAGENTA "\x1b[35m" +#define ANSI_COLOR_CYAN "\x1b[36m" + +#define ANSI_COLOR_BRIGHT "\x1b[1m" +#define ANSI_COLOR_RESET "\x1b[0m" + +namespace std { + class type_index; +} + +template +bool dynamic_kind_of(const TSrc* obj) +{ + return dynamic_cast(obj) != nullptr; +} + +template +bool pointer_kind_of(const std::shared_ptr obj) +{ + return std::dynamic_pointer_cast(obj) != nullptr; +} + +template +TDest safe_cast(TSrc obj) +{ + if (obj == nullptr) return nullptr; + auto p = dynamic_cast(obj); + if (p == nullptr) throw InvalidCastException("ָ��������ʱָ������ת������Ч�ġ�"); + return p; +} + +template +std::shared_ptr safe_pointer_cast(const std::shared_ptr& obj) +{ + if (obj == nullptr) return std::shared_ptr(); + auto p = std::dynamic_pointer_cast(obj); + if (p == nullptr) throw InvalidCastException("ָ��������ʱָ������ת������Ч�ġ�"); + return p; +} + +template +std::string StreamStatusToString(const TStream& stream) +{ + std::string status = stream.good() ? "good " : ""; + if (stream.eof()) status += "eof "; + if (stream.bad()) status += "bad "; + if (stream.fail()) status += "fail "; + return status; +} + +template +TStream OpenAndValidate(const TPath arg1) +{ + auto fs = TStream(arg1); + if (!fs) { + std::stringstream ss; + ss << "���Դ��ļ�" << arg1 << "ʱ��������" << StreamStatusToString(fs); + throw Exception(ss.str()); + } + return fs; +} + +// �����״̬����ȷ�ԡ��������ȷ����������쳣�� +template +void ValidateStream(const TStream& stream) +{ + if (!stream) { + std::stringstream ss; + ss << "��״̬����" << StreamStatusToString(stream); + throw Exception(ss.str()); + } +} + +// ���ڽ����� map::equal_range �Ⱥ����ķ���ֵת��Ϊ�ɱ� foreach �﷨���ܵĽṹ�� +template +class _RangeToEnumerable +{ + std::pair _Range; +public: + TIterator begin() { return _Range.first; } + TIterator end() { return _Range.second; } + bool empty() { return _Range.first == _Range.second; } + _RangeToEnumerable(const std::pair range) + : _Range(range) + { + + } +}; + +template +_RangeToEnumerable RangeToEnumerable(const std::pair range) +{ + return _RangeToEnumerable(range); +} + +inline std::string to_string(const std::pair& value) +{ + return "[" + value.first + ", " + value.second + "]"; +} + +enum class StringComparison +{ + None = 0, + IgnoreSurroudingWhiteSpaces, + IgnoureCase, +}; + +template<> +struct is_flags : std::true_type +{ + +}; + +bool Equal(const std::string& lhs, const std::string& rhs, StringComparison comparision = StringComparison::None); + +// �������÷�Χö�ٵİ�λ���� +template, int> = 0> +TEnum operator & (TEnum lhs, TEnum rhs) +{ + using T = std::underlying_type_t; + return static_cast(static_cast(lhs) & static_cast(rhs)); +} + +template, int> = 0> +TEnum operator | (TEnum lhs, TEnum rhs) +{ + using T = std::underlying_type_t; + return static_cast(static_cast(lhs) | static_cast(rhs)); +} + +#define _RE_TRACE(iosExpr) //std::cout << "Trace:" << iosExpr << std::endl; + +bool Confirm(const std::string& prompt); + +struct ReliabilityNetworkEntry; +const char* FriendlyNameOf(const std::type_index& type); +const char* FriendlyNameOf(const type_info& type); +const char* FriendlyNameOf(const ReliabilityNetworkEntry& instance); +template +const char* FriendlyNameOf() +{ + return FriendlyNameOf(typeid(T)); +} + +// ��RAII�����ڵ��û������뿪ijһ�����ʱ���Զ�ִ��ijЩ�û�����������߼��� +// �÷��� +// ����Ҫ�����߼��Ĵ������ʹ�� +// BlockExitHandler cleanupHandler(....); +// ���ɡ� +// ע�⣺ +// ��Ҫ�������ͷ������������������ֶΡ� +// ��Ҫ�������Ͷ���Ϊ������������Ϊ�ᱻ�������Ż����� +class BlockExitHandler +{ + std::function handler; +public: + explicit BlockExitHandler(const std::function& handler) : handler(handler) + { + + } + BlockExitHandler(const BlockExitHandler&) = delete; + BlockExitHandler& operator=(const BlockExitHandler&) = delete; + ~BlockExitHandler() + { + try + { + handler(); + } catch (std::exception& e) + { + // �����������������쳣�� + std::cout << "BlockExitHandler: " << e.what() << std::endl; + } + } +}; + +void ReportException(const std::exception& ex, int level = 0); diff --git a/test/cpp_test_repo/B/main.cpp b/test/cpp_test_repo/B/main.cpp new file mode 100644 index 00000000000..6542b044ad0 --- /dev/null +++ b/test/cpp_test_repo/B/main.cpp @@ -0,0 +1,15 @@ +#include "stdafx.h" +#include "TextFileParsers.h" +#include "Utility.h" + +using namespace std; + +int main(int argc, char* argv[]) +{ + auto ifs = OpenAndValidate("config.txt"); + auto parser = ConfigurationParser(ifs); + cout << parser.GetBool("testBool", false) << endl; + cout << parser.GetDouble("textDouble", 1.23) << endl; + cout << parser.GetString("rawValue", "test") << endl; + return 0; +} diff --git a/test/cpp_test_repo/B/stdafx.cpp b/test/cpp_test_repo/B/stdafx.cpp new file mode 100644 index 00000000000..1681a386b77 --- /dev/null +++ b/test/cpp_test_repo/B/stdafx.cpp @@ -0,0 +1,7 @@ +// stdafx.cpp : Ö»°üŔ¨±ę׼°üş¬ÎÄĽţµÄÔ´ÎÄĽţ +// stdafx.obj ˝«°üş¬Ô¤±ŕŇëŔŕĐÍĐĹϢ + +#include "stdafx.h" + +// TODO: ÔÚ STDAFX.H ÖĐŇýÓĂČÎşÎËůĐčµÄ¸˝ĽÓÍ·ÎÄĽţŁ¬ +//¶ř˛»ĘÇÔÚ´ËÎÄĽţÖĐŇýÓĂ diff --git a/test/cpp_test_repo/B/stdafx.h b/test/cpp_test_repo/B/stdafx.h new file mode 100644 index 00000000000..733498297e0 --- /dev/null +++ b/test/cpp_test_repo/B/stdafx.h @@ -0,0 +1,39 @@ +// stdafx.h : ±ę׼ϵͳ°üş¬ÎÄĽţµÄ°üş¬ÎÄĽţŁ¬ +// »ňĘÇľ­łŁĘąÓõ«˛»łŁ¸ü¸ÄµÄ +// Ěض¨ÓÚĎîÄżµÄ°üş¬ÎÄĽţ +// + +#pragma once + +// TODO: ÔÚ´Ë´¦ŇýÓĂłĚĐňĐčŇŞµÄĆäËűÍ·ÎÄĽţ +// ĘĘÓĂÓÚČí˛âĘąÓĂ +//#define _DOWNGRADED_DEMO +#define _SILENCE_STDEXT_ALLOCATORS_DEPRECATION_WARNING + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +// ˝«ĘµŃéĐ﵀ filesystem ĂüĂűżŐĽäµĽČë std ÖСŁŁ¨2016Ł© +namespace std +{ + namespace filesystem = experimental::filesystem::v1; +} + + +using complexd = std::complex; \ No newline at end of file diff --git a/test/cpp_test_repo/C/CppProject1.vcxproj b/test/cpp_test_repo/C/CppProject1.vcxproj new file mode 100644 index 00000000000..5822b3b858e --- /dev/null +++ b/test/cpp_test_repo/C/CppProject1.vcxproj @@ -0,0 +1,133 @@ + + + + + Debug + Win32 + + + Release + Win32 + + + Debug + x64 + + + Release + x64 + + + + + + + + + + + + + + + + + 15.0 + {1C29D994-E2FF-43C4-949C-103F4BF43E08} + CppProject1 + 10.0.17763.0 + + + + Application + true + v141 + MultiByte + + + Application + false + v141 + true + MultiByte + + + Application + true + v141 + MultiByte + + + Application + false + v141 + true + MultiByte + + + + + + + + + + + + + + + + + + + + + + + Level3 + Disabled + true + true + + + + + Level3 + Disabled + true + true + + + + + Level3 + MaxSpeed + true + true + true + true + + + true + true + + + + + Level3 + MaxSpeed + true + true + true + true + + + true + true + + + + + + \ No newline at end of file diff --git a/test/cpp_test_repo/C/CppProject1.vcxproj.filters b/test/cpp_test_repo/C/CppProject1.vcxproj.filters new file mode 100644 index 00000000000..0fa98b2811d --- /dev/null +++ b/test/cpp_test_repo/C/CppProject1.vcxproj.filters @@ -0,0 +1,22 @@ + + + + + {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} + rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/test/cpp_test_repo/C/Exceptions.h b/test/cpp_test_repo/C/Exceptions.h new file mode 100644 index 00000000000..7b73c769e97 --- /dev/null +++ b/test/cpp_test_repo/C/Exceptions.h @@ -0,0 +1,60 @@ +#pragma once + +#include +#include +#include + +// ????????????Ă»???????????? +class Exception : public std::exception +{ + std::string msg; +public: + // ??????????????????????? + Exception() : msg("?????????????") {} + // ??????????????????????????? + explicit Exception(const std::string& message) : msg(message) + { +#if _DEBUG + std::cerr << "Exception constructed: " << message << std::endl; +#endif + } + ~Exception() noexcept override { } + const char* what() const noexcept override { return msg.c_str(); } +}; + +// ????????????????????????????????????????? +class InvalidCastException : public Exception +{ +public: + InvalidCastException(const std::string& message) : Exception(message) { } +}; + +// ????????????????????????? +class ArgumentException : public Exception +{ + static std::string BuildMessage(const std::string& message, const std::string& argumentName) + { + if (argumentName.empty()) return message; + return message + " ????????" + argumentName + "??"; + } + std::string _ArgumentName; +public: + const std::string& ArgumentName() const { return _ArgumentName; } +public: + ArgumentException(const std::string& message) + : Exception(message) { } + ArgumentException(const std::string& message, const std::string& argumentName) + : Exception(BuildMessage(message, argumentName)), _ArgumentName(argumentName) { } +}; + +// ????????????????????????????????????????? +class OperationFailureException : public Exception +{ +private: + int _ErrorCode; +public: + explicit OperationFailureException(int errorCode) + : Exception("???????????????" + std::to_string(errorCode) + "??"), _ErrorCode(errorCode) + { + } +}; diff --git a/test/cpp_test_repo/C/TextFileParsers.cpp b/test/cpp_test_repo/C/TextFileParsers.cpp new file mode 100644 index 00000000000..ba40fd92ad6 --- /dev/null +++ b/test/cpp_test_repo/C/TextFileParsers.cpp @@ -0,0 +1,160 @@ +#include "stdafx.h" +#include "TextFileParsers.h" + +using namespace std; +using namespace filesystem; + +RowReader& operator>>(RowReader& reader, string& rhs) +{ + // ĘąÓĂÖƱí·ű·Ö¸ôˇŁ + getline(reader.ss, rhs, reader.delim); + // ČĄłý×óÓŇÁ˝˛ŕµÄżŐ°×ˇŁ + if (!reader.keepWhitespace) + { + // Left trim + auto wsEndsAt = find_if(rhs.begin(), rhs.end(), [](char c) {return c < 0 || !isspace(c); }); + rhs.erase(rhs.begin(), wsEndsAt); + // Right trim + auto wsStartsAt = find_if(rhs.rbegin(), rhs.rend(), [](char c) {return c < 0 || !isspace(c); }); + rhs.erase(rhs.rbegin().base(), rhs.end()); + } + return reader; +} + +RowReader& operator>>(RowReader& reader, int& rhs) +{ + string buffer{}; + if (reader >> buffer) rhs = stoi(buffer); + return reader; +} + +RowReader& operator>>(RowReader& reader, long& rhs) +{ + string buffer{}; + if (reader >> buffer) rhs = stol(buffer); + return reader; +} + +RowReader& operator>>(RowReader& reader, float& rhs) +{ + string buffer{}; + if (reader >> buffer) rhs = stof(buffer); + return reader; +} + +RowReader& operator>>(RowReader& reader, double& rhs) +{ + string buffer{}; + if (reader >> buffer) rhs = stod(buffer); + return reader; +} + +RowReader& operator>>(RowReader& reader, bool& rhs) +{ + string buffer{}; + if (reader >> buffer) + { + if (Equal(buffer, "true", StringComparison::IgnoreCase | StringComparison::IgnoreSurroudingWhiteSpaces)) + rhs = true; + else if (Equal(buffer, "false", StringComparison::IgnoreCase | StringComparison::IgnoreSurroudingWhiteSpaces)) + rhs = true; + else + { + try + { + auto value = stoi(buffer); + rhs = (value != 0); + } + catch (const exception&) + { + throw invalid_argument("Cannot convert to bool."); + } + } + } + return reader; +} + +void ConfigurationParser::Load(istream& inputStream) +{ + string buffer{}; + stringstream ss{}; + size_t lineNumber = 0; + while (getline(inputStream, buffer)) + { + lineNumber++; + ss.clear(); + ss.str(buffer); + string key{}; + char ch; + if (!(ss >> key)) continue; + if (key[0] == '#') continue; + if (!(ss >> ch) || ch != '=') + throw Exception("ÎŢЧµÄĹäÖĂĐСŁĆÚÍűŁşˇ°=ˇ±ˇŁĐĐŁş" + to_string(lineNumber) + "ˇŁ"); + string value{}; + if (!(ss >> value)) + throw Exception("ÎŢЧµÄĹäÖĂĐСŁĆÚÍűŁşĹäÖĂÖµˇŁĐĐŁş" + to_string(lineNumber) + "ˇŁ"); + // ISSUE ĿǰĹäÖĂÖµÖв»ÄÜ°üş¬żŐ¸ńŁ¬·ńÔň»áÔÚżŐ¸ń´¦˝Ř¶ĎˇŁ + entries[key] = value; + } +} + +std::string ConfigurationParser::GetString(const std::string& key, const std::string& defaultValue) const +{ + auto v = entries.find(key); + if (v == entries.end()) return defaultValue; + return v->second; +} + +int ConfigurationParser::GetInt(const std::string& key, int defaultValue) const +{ + auto v = entries.find(key); + if (v == entries.end()) return defaultValue; + try + { + return stoi(v->second); + } catch (const exception&) + { + throw_with_nested(Exception("ÎŢ·¨˝«ĹäÖá°" + key + "ˇ±Öµ×Ş»»ÎŞintˇŁ")); + } +} + +double ConfigurationParser::GetDouble(const std::string& key, double defaultValue) const +{ + auto v = entries.find(key); + if (v == entries.end()) return defaultValue; + try + { + return stod(v->second); + } catch (const exception&) + { + throw_with_nested(Exception("ÎŢ·¨˝«ĹäÖá°" + key + "ˇ±Öµ×Ş»»ÎŞdoubleˇŁ")); + } +} + +bool ConfigurationParser::GetBool(const std::string& key, bool defaultValue) const +{ + auto v = entries.find(key); + if (v == entries.end()) return defaultValue; + if (Equal(v->second, "true", StringComparison::IgnoreCase | StringComparison::IgnoreSurroudingWhiteSpaces)) + return true; + else if (Equal(v->second, "false", StringComparison::IgnoreCase | StringComparison::IgnoreSurroudingWhiteSpaces)) + return false; + try + { + return stod(v->second); + } catch (const exception&) + { + throw_with_nested(Exception("ÎŢ·¨˝«ĹäÖá°" + key + "ˇ±Öµ×Ş»»ÎŞboolˇŁ")); + } +} + +ConfigurationParser::ConfigurationParser(istream& inputStream) : entries() +{ + Load(inputStream); +} + +ConfigurationParser::ConfigurationParser(path filePath) : entries() +{ + auto ifs = OpenAndValidate(filePath); + Load(ifs); +} diff --git a/test/cpp_test_repo/C/TextFileParsers.h b/test/cpp_test_repo/C/TextFileParsers.h new file mode 100644 index 00000000000..73952b0deda --- /dev/null +++ b/test/cpp_test_repo/C/TextFileParsers.h @@ -0,0 +1,121 @@ +#pragma once +#include +#include "Utility.h" + +// ÓĂÓÚ´ÓĘäČëÁ÷ÖĐĚáȡһĐĐŁ¬˛˘ĘäłöˇŁ +class RowReader +{ +private: + std::stringstream ss; + char delim; + bool keepWhitespace; + std::size_t _LineNumber; +public: + size_t LineNumber() const { return _LineNumber; } + void ResetLineNumber() { _LineNumber = 0; } + const std::stringstream& LineStream() const { return ss; } +public: + operator bool() const + { + return bool(ss); + } + bool operator !() const + { + return !ss; + } + template + friend TStream& operator>>(TStream& s, RowReader& reader); + friend RowReader& operator>>(RowReader& reader, std::string& rhs); +public: + /** + * \brief + * \param delim ÁĐ·Ö¸ô·ű + */ + explicit RowReader(bool keepWhitespace = false, char delim = '\t') : ss(), delim(delim), keepWhitespace(keepWhitespace), _LineNumber(0) + { + } +}; + +// ´ÓĘäČëÁ÷ÖжÁČëŇ»ĐĐ·ÇżŐ·ÇעĘÍĐСŁ +template +TStream& operator>>(TStream& s, RowReader& reader) +{ + std::string buffer{}; + while (getline(s, buffer)) + { + reader._LineNumber++; + // Ľě˛é´ËĐĐĘÇ·ńΪעĘ͡Ł + // status + // 0 start/×ó˛ŕżŐ°× + // 1 # + // 2 ĆäËű×Ö·ű + char status = 0; + for (auto& c : buffer) + { + switch (status) + { + case 0: + if (c == '#') + { + status = 1; + goto CHECK_STATUS; + } + if (c < 0 || !isspace(c)) + { + status = 2; + goto CHECK_STATUS; + } + break; + default: + assert(false); + break; + } + } + CHECK_STATUS: + switch (status) + { + case 0: + // żŐ°×ĐĐ + break; + case 1: + // עĘÍĐĐ + break; + case 2: + goto SET_RESULT; + default: + assert(false); + break; + } + } +SET_RESULT: + reader.ss.str(buffer); + reader.ss.clear(); + return s; +} + +RowReader& operator>>(RowReader& reader, std::string& rhs); + +RowReader& operator>>(RowReader& reader, int& rhs); + +RowReader& operator>>(RowReader& reader, long& rhs); + +RowReader& operator>>(RowReader& reader, float& rhs); + +RowReader& operator>>(RowReader& reader, double& rhs); + +RowReader& operator>>(RowReader& reader, bool& rhs); + +class ConfigurationParser +{ +private: + std::unordered_map entries; + void Load(std::istream& inputStream); +public: + std::string GetString(const std::string& key, const std::string& defaultValue) const; + int GetInt(const std::string& key, int defaultValue) const; + double GetDouble(const std::string& key, double defaultValue) const; + bool GetBool(const std::string& key, bool defaultValue) const; +public: + ConfigurationParser(std::istream& inputStream); + ConfigurationParser(std::filesystem::path filePath); +}; \ No newline at end of file diff --git a/test/cpp_test_repo/C/TypeTraits.h b/test/cpp_test_repo/C/TypeTraits.h new file mode 100644 index 00000000000..3169ac711d7 --- /dev/null +++ b/test/cpp_test_repo/C/TypeTraits.h @@ -0,0 +1,9 @@ +#pragma once +#include + +template +struct is_flags : std::false_type +{ +}; + +template constexpr bool is_flags_v = is_flags::value; diff --git a/test/cpp_test_repo/C/Utility.cpp b/test/cpp_test_repo/C/Utility.cpp new file mode 100644 index 00000000000..884b24e131e --- /dev/null +++ b/test/cpp_test_repo/C/Utility.cpp @@ -0,0 +1,76 @@ +#include "stdafx.h" +#include "Utility.h" + +using namespace std; + +#define _DECLARE_ENUM(TYPE, MEMBER) case TYPE::MEMBER : return #MEMBER; +#define _DECLARE_ENUM_DEFAULT(TYPE) default : return string(#TYPE) + "::" + to_string((long)v); + +bool Equal(const string& lhs, const string& rhs, StringComparison comparision) +{ + if (&lhs == &rhs) return true; + size_t pos1 = 0, pos2 = 0; + size_t pos1r = lhs.size(), pos2r = rhs.size(); + if ((comparision & StringComparison::IgnoreSurroudingWhiteSpaces) + == StringComparison::IgnoreSurroudingWhiteSpaces) + { + while (pos1 < lhs.size() && isspace(lhs[pos1])) pos1++; + while (pos2 < lhs.size() && isspace(lhs[pos2])) pos2++; + while (pos1 > 0 && isspace(lhs[pos1 - 1])) pos1--; + while (pos2 > 0 && isspace(lhs[pos2 - 1])) pos2--; + } + if (pos1r - pos1 != pos2r - pos2) return false; + auto ignoreCase = (comparision & StringComparison::IgnoreCase) == StringComparison::IgnoreCase; + while (pos1 < pos1r) + { + if (ignoreCase) + { + if (tolower(lhs[pos1]) != tolower(rhs[pos1])) return false; + } else + { + if (lhs[pos1] != rhs[pos1]) return false; + } + pos1++; + pos2++; + } + return true; +} + +bool Confirm(const std::string& prompt) +{ + cout << prompt << " (Y/N)> " << flush; + while (true) + { + string buffer; + getline(cin, buffer); + stringstream ss(buffer); + if (ss >> buffer) + { + transform(buffer.begin(), buffer.end(), buffer.begin(), [](char c) {return tolower(c); }); + if (buffer == "y" || buffer == "yes") return true; + if (buffer == "n" || buffer == "no") return false; + } + cout << "ÎŢЧµÄĘäČ롣> " << flush; + } +} + +void ReportException(const exception& ex, int level) +{ + if (level > 0) + { + cerr << "<-"; + for (int i = 0; i < level; i++) cerr << '-'; + cerr << ' '; + } + cerr << "[" << typeid(ex).name() << "] " << ex.what() << endl; + try { + rethrow_if_nested(ex); + } + catch (const exception& subEx) { + ReportException(subEx, level + 1); + } + catch (...) + { + cerr << "[Unknown Exception]" << endl; + } +} diff --git a/test/cpp_test_repo/C/Utility.h b/test/cpp_test_repo/C/Utility.h new file mode 100644 index 00000000000..dd4d698d427 --- /dev/null +++ b/test/cpp_test_repo/C/Utility.h @@ -0,0 +1,187 @@ +#pragma once + +#include +#include "Exceptions.h" +#include +#include "TypeTraits.h" + +#define ANSI_COLOR_RED "\x1b[31m" +#define ANSI_COLOR_GREEN "\x1b[32m" +#define ANSI_COLOR_YELLOW "\x1b[33m" +#define ANSI_COLOR_BLUE "\x1b[34m" +#define ANSI_COLOR_MAGENTA "\x1b[35m" +#define ANSI_COLOR_CYAN "\x1b[36m" + +#define ANSI_COLOR_BRIGHT "\x1b[1m" +#define ANSI_COLOR_RESET "\x1b[0m" + +namespace std { + class type_index; +} + +template +bool dynamic_kind_of(const TSrc* obj) +{ + return dynamic_cast(obj) != nullptr; +} + +template +bool pointer_kind_of(const std::shared_ptr obj) +{ + return std::dynamic_pointer_cast(obj) != nullptr; +} + +template +TDest safe_cast(TSrc obj) +{ + if (obj == nullptr) return nullptr; + auto p = dynamic_cast(obj); + if (p == nullptr) throw InvalidCastException("ָ��������ʱָ������ת������Ч�ġ�"); + return p; +} + +template +std::shared_ptr safe_pointer_cast(const std::shared_ptr& obj) +{ + if (obj == nullptr) return std::shared_ptr(); + auto p = std::dynamic_pointer_cast(obj); + if (p == nullptr) throw InvalidCastException("ָ��������ʱָ������ת������Ч�ġ�"); + return p; +} + +template +std::string StreamStatusToString(const TStream& stream) +{ + std::string status = stream.good() ? "good " : ""; + if (stream.eof()) status += "eof "; + if (stream.bad()) status += "bad "; + if (stream.fail()) status += "fail "; + return status; +} + +template +TStream OpenAndValidate(const TPath arg1) +{ + auto fs = TStream(arg1); + if (!fs) { + std::stringstream ss; + ss << "���Դ��ļ�" << arg1 << "ʱ��������" << StreamStatusToString(fs); + throw Exception(ss.str()); + } + return fs; +} + +// �����״̬����ȷ�ԡ��������ȷ����������쳣�� +template +void ValidateStream(const TStream& stream) +{ + if (!stream) { + std::stringstream ss; + ss << "��״̬����" << StreamStatusToString(stream); + throw Exception(ss.str()); + } +} + +// ���ڽ����� map::equal_range �Ⱥ����ķ���ֵת��Ϊ�ɱ� foreach �﷨���ܵĽṹ�� +template +class _RangeToEnumerable +{ + std::pair _Range; +public: + TIterator begin() { return _Range.first; } + TIterator end() { return _Range.second; } + bool empty() { return _Range.first == _Range.second; } + _RangeToEnumerable(const std::pair range) + : _Range(range) + { + + } +}; + +template +_RangeToEnumerable RangeToEnumerable(const std::pair range) +{ + return _RangeToEnumerable(range); +} + +inline std::string to_string(const std::pair& value) +{ + return "[" + value.first + ", " + value.second + "]"; +} + +enum class StringComparison +{ + None = 0, + IgnoreSurroudingWhiteSpaces, + IgnoreCase, +}; + +template<> +struct is_flags : std::true_type +{ + +}; + +bool Equal(const std::string& lhs, const std::string& rhs, StringComparison comparision = StringComparison::None); + +// �������÷�Χö�ٵİ�λ���� +template, int> = 0> +TEnum operator & (TEnum lhs, TEnum rhs) +{ + using T = std::underlying_type_t; + return static_cast(static_cast(lhs) & static_cast(rhs)); +} + +template, int> = 0> +TEnum operator | (TEnum lhs, TEnum rhs) +{ + using T = std::underlying_type_t; + return static_cast(static_cast(lhs) | static_cast(rhs)); +} + +#define _RE_TRACE(iosExpr) //std::cout << "Trace:" << iosExpr << std::endl; + +bool Confirm(const std::string& prompt); + +struct ReliabilityNetworkEntry; +const char* FriendlyNameOf(const std::type_index& type); +const char* FriendlyNameOf(const type_info& type); +const char* FriendlyNameOf(const ReliabilityNetworkEntry& instance); +template +const char* FriendlyNameOf() +{ + return FriendlyNameOf(typeid(T)); +} + +// ��RAII�����ڵ��û������뿪ijһ�����ʱ���Զ�ִ��ijЩ�û�����������߼��� +// �÷��� +// ����Ҫ�����߼��Ĵ������ʹ�� +// BlockExitHandler cleanupHandler(....); +// ���ɡ� +// ע�⣺ +// ��Ҫ�������ͷ������������������ֶΡ� +// ��Ҫ�������Ͷ���Ϊ������������Ϊ�ᱻ�������Ż����� +class BlockExitHandler +{ + std::function handler; +public: + explicit BlockExitHandler(const std::function& handler) : handler(handler) + { + + } + BlockExitHandler(const BlockExitHandler&) = delete; + BlockExitHandler& operator=(const BlockExitHandler&) = delete; + ~BlockExitHandler() + { + try + { + handler(); + } catch (std::exception& e) + { + // �����������������쳣�� + std::cout << "BlockExitHandler: " << e.what() << std::endl; + } + } +}; + +void ReportException(const std::exception& ex, int level = 0); diff --git a/test/cpp_test_repo/C/main.cpp b/test/cpp_test_repo/C/main.cpp new file mode 100644 index 00000000000..df9b79355fd --- /dev/null +++ b/test/cpp_test_repo/C/main.cpp @@ -0,0 +1,13 @@ +#include "stdafx.h" +#include "TextFileParsers.h" +#include "Utility.h" + +using namespace std; + +int main(int argc, char* argv[]) +{ + auto ifs = OpenAndValidate("config.txt"); + auto parser = ConfigurationParser(ifs); + cout << parser.GetBool("testBool", false) << endl; + return 0; +} diff --git a/test/cpp_test_repo/C/stdafx.cpp b/test/cpp_test_repo/C/stdafx.cpp new file mode 100644 index 00000000000..1681a386b77 --- /dev/null +++ b/test/cpp_test_repo/C/stdafx.cpp @@ -0,0 +1,7 @@ +// stdafx.cpp : Ö»°üŔ¨±ę׼°üş¬ÎÄĽţµÄÔ´ÎÄĽţ +// stdafx.obj ˝«°üş¬Ô¤±ŕŇëŔŕĐÍĐĹϢ + +#include "stdafx.h" + +// TODO: ÔÚ STDAFX.H ÖĐŇýÓĂČÎşÎËůĐčµÄ¸˝ĽÓÍ·ÎÄĽţŁ¬ +//¶ř˛»ĘÇÔÚ´ËÎÄĽţÖĐŇýÓĂ diff --git a/test/cpp_test_repo/C/stdafx.h b/test/cpp_test_repo/C/stdafx.h new file mode 100644 index 00000000000..733498297e0 --- /dev/null +++ b/test/cpp_test_repo/C/stdafx.h @@ -0,0 +1,39 @@ +// stdafx.h : ±ę׼ϵͳ°üş¬ÎÄĽţµÄ°üş¬ÎÄĽţŁ¬ +// »ňĘÇľ­łŁĘąÓõ«˛»łŁ¸ü¸ÄµÄ +// Ěض¨ÓÚĎîÄżµÄ°üş¬ÎÄĽţ +// + +#pragma once + +// TODO: ÔÚ´Ë´¦ŇýÓĂłĚĐňĐčŇŞµÄĆäËűÍ·ÎÄĽţ +// ĘĘÓĂÓÚČí˛âĘąÓĂ +//#define _DOWNGRADED_DEMO +#define _SILENCE_STDEXT_ALLOCATORS_DEPRECATION_WARNING + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +// ˝«ĘµŃéĐ﵀ filesystem ĂüĂűżŐĽäµĽČë std ÖСŁŁ¨2016Ł© +namespace std +{ + namespace filesystem = experimental::filesystem::v1; +} + + +using complexd = std::complex; \ No newline at end of file diff --git a/test/cpp_test_repo/D/Exceptions.h b/test/cpp_test_repo/D/Exceptions.h new file mode 100644 index 00000000000..7b73c769e97 --- /dev/null +++ b/test/cpp_test_repo/D/Exceptions.h @@ -0,0 +1,60 @@ +#pragma once + +#include +#include +#include + +// ????????????Ă»???????????? +class Exception : public std::exception +{ + std::string msg; +public: + // ??????????????????????? + Exception() : msg("?????????????") {} + // ??????????????????????????? + explicit Exception(const std::string& message) : msg(message) + { +#if _DEBUG + std::cerr << "Exception constructed: " << message << std::endl; +#endif + } + ~Exception() noexcept override { } + const char* what() const noexcept override { return msg.c_str(); } +}; + +// ????????????????????????????????????????? +class InvalidCastException : public Exception +{ +public: + InvalidCastException(const std::string& message) : Exception(message) { } +}; + +// ????????????????????????? +class ArgumentException : public Exception +{ + static std::string BuildMessage(const std::string& message, const std::string& argumentName) + { + if (argumentName.empty()) return message; + return message + " ????????" + argumentName + "??"; + } + std::string _ArgumentName; +public: + const std::string& ArgumentName() const { return _ArgumentName; } +public: + ArgumentException(const std::string& message) + : Exception(message) { } + ArgumentException(const std::string& message, const std::string& argumentName) + : Exception(BuildMessage(message, argumentName)), _ArgumentName(argumentName) { } +}; + +// ????????????????????????????????????????? +class OperationFailureException : public Exception +{ +private: + int _ErrorCode; +public: + explicit OperationFailureException(int errorCode) + : Exception("???????????????" + std::to_string(errorCode) + "??"), _ErrorCode(errorCode) + { + } +}; diff --git a/test/cpp_test_repo/D/TextFileParsers.cpp b/test/cpp_test_repo/D/TextFileParsers.cpp new file mode 100644 index 00000000000..7eebc791e10 --- /dev/null +++ b/test/cpp_test_repo/D/TextFileParsers.cpp @@ -0,0 +1,160 @@ +#include "stdafx.h" +#include "TextFileParsers.h" + +using namespace std; +using namespace filesystem; + +RowReader& operator>>(RowReader& reader, string& rhs) +{ + // ĘąÓĂÖƱí·ű·Ö¸ôˇŁ + getline(reader.ss, rhs, reader.delim); + // ČĄłý×óÓŇÁ˝˛ŕµÄżŐ°×ˇŁ + if (!reader.keepWhitespace) + { + // Left trim + auto wsEndsAt = find_if(rhs.begin(), rhs.end(), [](char c) {return c < 0 || !isspace(c); }); + rhs.erase(rhs.begin(), wsEndsAt); + // Right trim + auto wsStartsAt = find_if(rhs.rbegin(), rhs.rend(), [](char c) {return c < 0 || !isspace(c); }); + rhs.erase(rhs.rbegin().base(), rhs.end()); + } + return reader; +} + +RowReader& operator>>(RowReader& reader, int& rhs) +{ + string buffer{}; + if (reader >> buffer) rhs = stoi(buffer); + return reader; +} + +RowReader& operator>>(RowReader& reader, long& rhs) +{ + string buffer{}; + if (reader >> buffer) rhs = stol(buffer); + return reader; +} + +RowReader& operator>>(RowReader& reader, float& rhs) +{ + string buffer{}; + if (reader >> buffer) rhs = stof(buffer); + return reader; +} + +RowReader& operator>>(RowReader& reader, double& rhs) +{ + string buffer{}; + if (reader >> buffer) rhs = stod(buffer); + return reader; +} + +RowReader& operator>>(RowReader& reader, bool& rhs) +{ + string buffer{}; + if (reader >> buffer) + { + if (Equal(buffer, "true", StringComparison::IgnoureCase | StringComparison::IgnoreSurroudingWhiteSpaces)) + rhs = true; + else if (Equal(buffer, "false", StringComparison::IgnoureCase | StringComparison::IgnoreSurroudingWhiteSpaces)) + rhs = true; + else + { + try + { + auto value = stoi(buffer); + rhs = (value != 0); + } + catch (const exception&) + { + throw invalid_argument("Cannot convert to bool."); + } + } + } + return reader; +} + +void ConfigurationParser::Load(istream& inputStream) +{ + string buffer{}; + stringstream ss{}; + size_t lineNumber = 0; + while (getline(inputStream, buffer)) + { + lineNumber++; + ss.clear(); + ss.str(buffer); + string key{}; + char ch; + if (!(ss >> key)) continue; + if (key[0] == '#') continue; + if (!(ss >> ch) || ch != '=') + throw Exception("ÎŢЧµÄĹäÖĂĐСŁĆÚÍűŁşˇ°=ˇ±ˇŁĐĐŁş" + to_string(lineNumber) + "ˇŁ"); + string value{}; + if (!(ss >> value)) + throw Exception("ÎŢЧµÄĹäÖĂĐСŁĆÚÍűŁşĹäÖĂÖµˇŁĐĐŁş" + to_string(lineNumber) + "ˇŁ"); + // ISSUE ĿǰĹäÖĂÖµÖв»ÄÜ°üş¬żŐ¸ńŁ¬·ńÔň»áÔÚżŐ¸ń´¦˝Ř¶ĎˇŁ + entries[key] = value; + } +} + +std::string ConfigurationParser::GetString(const std::string& key, const std::string& defaultValue) const +{ + auto v = entries.find(key); + if (v == entries.end()) return defaultValue; + return v->second; +} + +int ConfigurationParser::GetInt(const std::string& key, int defaultValue) const +{ + auto v = entries.find(key); + if (v == entries.end()) return defaultValue; + try + { + return stoi(v->second); + } catch (const exception&) + { + throw_with_nested(Exception("ÎŢ·¨˝«ĹäÖá°" + key + "ˇ±Öµ×Ş»»ÎŞintˇŁ")); + } +} + +double ConfigurationParser::GetDouble(const std::string& key, double defaultValue) const +{ + auto v = entries.find(key); + if (v == entries.end()) return defaultValue; + try + { + return stod(v->second); + } catch (const exception&) + { + throw_with_nested(Exception("ÎŢ·¨˝«ĹäÖá°" + key + "ˇ±Öµ×Ş»»ÎŞdoubleˇŁ")); + } +} + +bool ConfigurationParser::GetBool(const std::string& key, bool defaultValue) const +{ + auto v = entries.find(key); + if (v == entries.end()) return defaultValue; + if (Equal(v->second, "true", StringComparison::IgnoureCase | StringComparison::IgnoreSurroudingWhiteSpaces)) + return true; + else if (Equal(v->second, "false", StringComparison::IgnoureCase | StringComparison::IgnoreSurroudingWhiteSpaces)) + return false; + try + { + return stod(v->second); + } catch (const exception&) + { + throw_with_nested(Exception("ÎŢ·¨˝«ĹäÖá°" + key + "ˇ±Öµ×Ş»»ÎŞboolˇŁ")); + } +} + +ConfigurationParser::ConfigurationParser(istream& inputStream) : entries() +{ + Load(inputStream); +} + +ConfigurationParser::ConfigurationParser(path filePath) : entries() +{ + auto ifs = OpenAndValidate(filePath); + Load(ifs); +} diff --git a/test/cpp_test_repo/D/TextFileParsers.h b/test/cpp_test_repo/D/TextFileParsers.h new file mode 100644 index 00000000000..73952b0deda --- /dev/null +++ b/test/cpp_test_repo/D/TextFileParsers.h @@ -0,0 +1,121 @@ +#pragma once +#include +#include "Utility.h" + +// ÓĂÓÚ´ÓĘäČëÁ÷ÖĐĚáȡһĐĐŁ¬˛˘ĘäłöˇŁ +class RowReader +{ +private: + std::stringstream ss; + char delim; + bool keepWhitespace; + std::size_t _LineNumber; +public: + size_t LineNumber() const { return _LineNumber; } + void ResetLineNumber() { _LineNumber = 0; } + const std::stringstream& LineStream() const { return ss; } +public: + operator bool() const + { + return bool(ss); + } + bool operator !() const + { + return !ss; + } + template + friend TStream& operator>>(TStream& s, RowReader& reader); + friend RowReader& operator>>(RowReader& reader, std::string& rhs); +public: + /** + * \brief + * \param delim ÁĐ·Ö¸ô·ű + */ + explicit RowReader(bool keepWhitespace = false, char delim = '\t') : ss(), delim(delim), keepWhitespace(keepWhitespace), _LineNumber(0) + { + } +}; + +// ´ÓĘäČëÁ÷ÖжÁČëŇ»ĐĐ·ÇżŐ·ÇעĘÍĐСŁ +template +TStream& operator>>(TStream& s, RowReader& reader) +{ + std::string buffer{}; + while (getline(s, buffer)) + { + reader._LineNumber++; + // Ľě˛é´ËĐĐĘÇ·ńΪעĘ͡Ł + // status + // 0 start/×ó˛ŕżŐ°× + // 1 # + // 2 ĆäËű×Ö·ű + char status = 0; + for (auto& c : buffer) + { + switch (status) + { + case 0: + if (c == '#') + { + status = 1; + goto CHECK_STATUS; + } + if (c < 0 || !isspace(c)) + { + status = 2; + goto CHECK_STATUS; + } + break; + default: + assert(false); + break; + } + } + CHECK_STATUS: + switch (status) + { + case 0: + // żŐ°×ĐĐ + break; + case 1: + // עĘÍĐĐ + break; + case 2: + goto SET_RESULT; + default: + assert(false); + break; + } + } +SET_RESULT: + reader.ss.str(buffer); + reader.ss.clear(); + return s; +} + +RowReader& operator>>(RowReader& reader, std::string& rhs); + +RowReader& operator>>(RowReader& reader, int& rhs); + +RowReader& operator>>(RowReader& reader, long& rhs); + +RowReader& operator>>(RowReader& reader, float& rhs); + +RowReader& operator>>(RowReader& reader, double& rhs); + +RowReader& operator>>(RowReader& reader, bool& rhs); + +class ConfigurationParser +{ +private: + std::unordered_map entries; + void Load(std::istream& inputStream); +public: + std::string GetString(const std::string& key, const std::string& defaultValue) const; + int GetInt(const std::string& key, int defaultValue) const; + double GetDouble(const std::string& key, double defaultValue) const; + bool GetBool(const std::string& key, bool defaultValue) const; +public: + ConfigurationParser(std::istream& inputStream); + ConfigurationParser(std::filesystem::path filePath); +}; \ No newline at end of file diff --git a/test/cpp_test_repo/D/TypeTraits.h b/test/cpp_test_repo/D/TypeTraits.h new file mode 100644 index 00000000000..3169ac711d7 --- /dev/null +++ b/test/cpp_test_repo/D/TypeTraits.h @@ -0,0 +1,9 @@ +#pragma once +#include + +template +struct is_flags : std::false_type +{ +}; + +template constexpr bool is_flags_v = is_flags::value; diff --git a/test/cpp_test_repo/D/Utility.cpp b/test/cpp_test_repo/D/Utility.cpp new file mode 100644 index 00000000000..e043e34a718 --- /dev/null +++ b/test/cpp_test_repo/D/Utility.cpp @@ -0,0 +1,76 @@ +#include "stdafx.h" +#include "Utility.h" + +using namespace std; + +#define _DECLARE_ENUM(TYPE, MEMBER) case TYPE::MEMBER : return #MEMBER; +#define _DECLARE_ENUM_DEFAULT(TYPE) default : return string(#TYPE) + "::" + to_string((long)v); + +bool Equal(const string& lhs, const string& rhs, StringComparison comparision) +{ + if (&lhs == &rhs) return true; + size_t pos1 = 0, pos2 = 0; + size_t pos1r = lhs.size(), pos2r = rhs.size(); + if ((comparision & StringComparison::IgnoreSurroudingWhiteSpaces) + == StringComparison::IgnoreSurroudingWhiteSpaces) + { + while (pos1 < lhs.size() && isspace(lhs[pos1])) pos1++; + while (pos2 < lhs.size() && isspace(lhs[pos2])) pos2++; + while (pos1 > 0 && isspace(lhs[pos1 - 1])) pos1--; + while (pos2 > 0 && isspace(lhs[pos2 - 1])) pos2--; + } + if (pos1r - pos1 != pos2r - pos2) return false; + auto ignoreCase = (comparision & StringComparison::IgnoureCase) == StringComparison::IgnoureCase; + while (pos1 < pos1r) + { + if (ignoreCase) + { + if (tolower(lhs[pos1]) != tolower(rhs[pos1])) return false; + } else + { + if (lhs[pos1] != rhs[pos1]) return false; + } + pos1++; + pos2++; + } + return true; +} + +bool Confirm(const std::string& prompt) +{ + cout << prompt << " (Y/N)> " << flush; + while (true) + { + string buffer; + getline(cin, buffer); + stringstream ss(buffer); + if (ss >> buffer) + { + transform(buffer.begin(), buffer.end(), buffer.begin(), [](char c) {return tolower(c); }); + if (buffer == "y" || buffer == "yes") return true; + if (buffer == "n" || buffer == "no") return false; + } + cout << "ÎŢЧµÄĘäČ롣> " << flush; + } +} + +void ReportException(const exception& ex, int level) +{ + if (level > 0) + { + cerr << "<-"; + for (int i = 0; i < level; i++) cerr << '-'; + cerr << ' '; + } + cerr << "[" << typeid(ex).name() << "] " << ex.what() << endl; + try { + rethrow_if_nested(ex); + } + catch (const exception& subEx) { + ReportException(subEx, level + 1); + } + catch (...) + { + cerr << "[Unknown Exception]" << endl; + } +} diff --git a/test/cpp_test_repo/D/Utility.h b/test/cpp_test_repo/D/Utility.h new file mode 100644 index 00000000000..f618f0d1804 --- /dev/null +++ b/test/cpp_test_repo/D/Utility.h @@ -0,0 +1,187 @@ +#pragma once + +#include +#include "Exceptions.h" +#include +#include "TypeTraits.h" + +#define ANSI_COLOR_RED "\x1b[31m" +#define ANSI_COLOR_GREEN "\x1b[32m" +#define ANSI_COLOR_YELLOW "\x1b[33m" +#define ANSI_COLOR_BLUE "\x1b[34m" +#define ANSI_COLOR_MAGENTA "\x1b[35m" +#define ANSI_COLOR_CYAN "\x1b[36m" + +#define ANSI_COLOR_BRIGHT "\x1b[1m" +#define ANSI_COLOR_RESET "\x1b[0m" + +namespace std { + class type_index; +} + +template +bool dynamic_kind_of(const TSrc* obj) +{ + return dynamic_cast(obj) != nullptr; +} + +template +bool pointer_kind_of(const std::shared_ptr obj) +{ + return std::dynamic_pointer_cast(obj) != nullptr; +} + +template +TDest safe_cast(TSrc obj) +{ + if (obj == nullptr) return nullptr; + auto p = dynamic_cast(obj); + if (p == nullptr) throw InvalidCastException("ָ��������ʱָ������ת������Ч�ġ�"); + return p; +} + +template +std::shared_ptr safe_pointer_cast(const std::shared_ptr& obj) +{ + if (obj == nullptr) return std::shared_ptr(); + auto p = std::dynamic_pointer_cast(obj); + if (p == nullptr) throw InvalidCastException("ָ��������ʱָ������ת������Ч�ġ�"); + return p; +} + +template +std::string StreamStatusToString(const TStream& stream) +{ + std::string status = stream.good() ? "good " : ""; + if (stream.eof()) status += "eof "; + if (stream.bad()) status += "bad "; + if (stream.fail()) status += "fail "; + return status; +} + +template +TStream OpenAndValidate(const TPath arg1) +{ + auto fs = TStream(arg1); + if (!fs) { + std::stringstream ss; + ss << "���Դ��ļ�" << arg1 << "ʱ��������" << StreamStatusToString(fs); + throw Exception(ss.str()); + } + return fs; +} + +// �����״̬����ȷ�ԡ��������ȷ����������쳣�� +template +void ValidateStream(const TStream& stream) +{ + if (!stream) { + std::stringstream ss; + ss << "��״̬����" << StreamStatusToString(stream); + throw Exception(ss.str()); + } +} + +// ���ڽ����� map::equal_range �Ⱥ����ķ���ֵת��Ϊ�ɱ� foreach �﷨���ܵĽṹ�� +template +class _RangeToEnumerable +{ + std::pair _Range; +public: + TIterator begin() { return _Range.first; } + TIterator end() { return _Range.second; } + bool empty() { return _Range.first == _Range.second; } + _RangeToEnumerable(const std::pair range) + : _Range(range) + { + + } +}; + +template +_RangeToEnumerable RangeToEnumerable(const std::pair range) +{ + return _RangeToEnumerable(range); +} + +inline std::string to_string(const std::pair& value) +{ + return "[" + value.first + ", " + value.second + "]"; +} + +enum class StringComparison +{ + None = 0, + IgnoreSurroudingWhiteSpaces, + IgnoureCase, +}; + +template<> +struct is_flags : std::true_type +{ + +}; + +bool Equal(const std::string& lhs, const std::string& rhs, StringComparison comparision = StringComparison::None); + +// �������÷�Χö�ٵİ�λ���� +template, int> = 0> +TEnum operator & (TEnum lhs, TEnum rhs) +{ + using T = std::underlying_type_t; + return static_cast(static_cast(lhs) & static_cast(rhs)); +} + +template, int> = 0> +TEnum operator | (TEnum lhs, TEnum rhs) +{ + using T = std::underlying_type_t; + return static_cast(static_cast(lhs) | static_cast(rhs)); +} + +#define _RE_TRACE(iosExpr) //std::cout << "Trace:" << iosExpr << std::endl; + +bool Confirm(const std::string& prompt); + +struct ReliabilityNetworkEntry; +const char* FriendlyNameOf(const std::type_index& type); +const char* FriendlyNameOf(const type_info& type); +const char* FriendlyNameOf(const ReliabilityNetworkEntry& instance); +template +const char* FriendlyNameOf() +{ + return FriendlyNameOf(typeid(T)); +} + +// ��RAII�����ڵ��û������뿪ijһ�����ʱ���Զ�ִ��ijЩ�û�����������߼��� +// �÷��� +// ����Ҫ�����߼��Ĵ������ʹ�� +// BlockExitHandler cleanupHandler(....); +// ���ɡ� +// ע�⣺ +// ��Ҫ�������ͷ������������������ֶΡ� +// ��Ҫ�������Ͷ���Ϊ������������Ϊ�ᱻ�������Ż����� +class BlockExitHandler +{ + std::function handler; +public: + explicit BlockExitHandler(const std::function& handler) : handler(handler) + { + + } + BlockExitHandler(const BlockExitHandler&) = delete; + BlockExitHandler& operator=(const BlockExitHandler&) = delete; + ~BlockExitHandler() + { + try + { + handler(); + } catch (std::exception& e) + { + // �����������������쳣�� + std::cout << "BlockExitHandler: " << e.what() << std::endl; + } + } +}; + +void ReportException(const std::exception& ex, int level = 0); diff --git a/test/cpp_test_repo/D/main.cpp b/test/cpp_test_repo/D/main.cpp new file mode 100644 index 00000000000..f7da13a1987 --- /dev/null +++ b/test/cpp_test_repo/D/main.cpp @@ -0,0 +1,14 @@ +#include "stdafx.h" +#include "TextFileParsers.h" +#include "Utility.h" + +using namespace std; + +int main(int argc, char* argv[]) +{ + auto ifs = ifstream("config.txt"); + string line{}; + getline(ifs, line); + cout << line << endl; + return 0; +} diff --git a/test/cpp_test_repo/D/stdafx.cpp b/test/cpp_test_repo/D/stdafx.cpp new file mode 100644 index 00000000000..1681a386b77 --- /dev/null +++ b/test/cpp_test_repo/D/stdafx.cpp @@ -0,0 +1,7 @@ +// stdafx.cpp : Ö»°üŔ¨±ę׼°üş¬ÎÄĽţµÄÔ´ÎÄĽţ +// stdafx.obj ˝«°üş¬Ô¤±ŕŇëŔŕĐÍĐĹϢ + +#include "stdafx.h" + +// TODO: ÔÚ STDAFX.H ÖĐŇýÓĂČÎşÎËůĐčµÄ¸˝ĽÓÍ·ÎÄĽţŁ¬ +//¶ř˛»ĘÇÔÚ´ËÎÄĽţÖĐŇýÓĂ diff --git a/test/cpp_test_repo/D/stdafx.h b/test/cpp_test_repo/D/stdafx.h new file mode 100644 index 00000000000..733498297e0 --- /dev/null +++ b/test/cpp_test_repo/D/stdafx.h @@ -0,0 +1,39 @@ +// stdafx.h : ±ę׼ϵͳ°üş¬ÎÄĽţµÄ°üş¬ÎÄĽţŁ¬ +// »ňĘÇľ­łŁĘąÓõ«˛»łŁ¸ü¸ÄµÄ +// Ěض¨ÓÚĎîÄżµÄ°üş¬ÎÄĽţ +// + +#pragma once + +// TODO: ÔÚ´Ë´¦ŇýÓĂłĚĐňĐčŇŞµÄĆäËűÍ·ÎÄĽţ +// ĘĘÓĂÓÚČí˛âĘąÓĂ +//#define _DOWNGRADED_DEMO +#define _SILENCE_STDEXT_ALLOCATORS_DEPRECATION_WARNING + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +// ˝«ĘµŃéĐ﵀ filesystem ĂüĂűżŐĽäµĽČë std ÖСŁŁ¨2016Ł© +namespace std +{ + namespace filesystem = experimental::filesystem::v1; +} + + +using complexd = std::complex; \ No newline at end of file diff --git a/test/cpp_test_repo/cg.dot b/test/cpp_test_repo/cg.dot new file mode 100644 index 00000000000..42efa7add5e --- /dev/null +++ b/test/cpp_test_repo/cg.dot @@ -0,0 +1,3 @@ +digraph cpp_test_branch { + A -> B -> C -> D; +} From efb346ce92f1284ef1389170b3730dd48945b002 Mon Sep 17 00:00:00 2001 From: xinyan Date: Sun, 2 Dec 2018 22:58:18 +0800 Subject: [PATCH 06/84] Get CclsGraphServer working. Note: Ccls has concurrency issue. Single-threading is used for now. Ccls uses full function name rather than signature as identifier. Some function parameter, variables might be treated as stanalone function. --- notebooks/lsp-ccls.ipynb | 247 ++++++++++++++++++ persper/analytics/analyzer.py | 28 +- persper/analytics/graph_server.py | 3 + .../analytics/lsp_graph_server/__init__.py | 38 ++- .../lsp_graph_server/callgraph/builder.py | 22 +- .../lsp_graph_server/callgraph/manager.py | 16 +- persper/analytics/lsp_graph_server/ccls.py | 27 +- .../lsp_graph_server/jsonrpcutils.py | 6 +- 8 files changed, 337 insertions(+), 50 deletions(-) create mode 100644 notebooks/lsp-ccls.ipynb diff --git a/notebooks/lsp-ccls.ipynb b/notebooks/lsp-ccls.ipynb new file mode 100644 index 00000000000..53b99e41894 --- /dev/null +++ b/notebooks/lsp-ccls.ipynb @@ -0,0 +1,247 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import logging\n", + "\n", + "logging.basicConfig(format='%(asctime)s %(levelname)-8s [%(name)s] %(message)s',\n", + " level=logging.INFO)\n", + "logging.getLogger('asyncio').setLevel(logging.CRITICAL)" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "scrolled": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Workspace root: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-02 22:54:11,892 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Created C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Exceptions.h.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- Overview ------\n", + "# of commits on master: 4\n", + "# of commits on branch: 0\n", + "----- No.1 e04529b7290fb09339c51df33b7b98123e5b6b77 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-02 22:54:11,958 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Created C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TextFileParsers.cpp.\n", + "2018-12-02 22:54:12,022 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Created C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TextFileParsers.h.\n", + "2018-12-02 22:54:12,090 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Created C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TypeTraits.h.\n", + "2018-12-02 22:54:12,153 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Created C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.cpp.\n", + "2018-12-02 22:54:12,223 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Created C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.h.\n", + "2018-12-02 22:54:12,286 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Created C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\main.cpp.\n", + "2018-12-02 22:54:12,362 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Created C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\stdafx.cpp.\n", + "2018-12-02 22:54:12,425 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Created C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\stdafx.h.\n", + "2018-12-02 22:54:12,429 INFO [persper.analytics.lsp_graph_server.callgraph] Removed 0 branches by 9 files.\n", + "2018-12-02 22:54:12,430 INFO [persper.analytics.lsp_graph_server] Invalidated 9 files, affected 9 files.\n", + "2018-12-02 22:54:12,431 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TextFileParsers.h\n", + "2018-12-02 22:54:16,084 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 44 branches.\n", + "2018-12-02 22:54:16,086 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\stdafx.cpp\n", + "2018-12-02 22:54:16,196 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 0 branches.\n", + "2018-12-02 22:54:16,197 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.h\n", + "2018-12-02 22:54:17,486 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 79 branches.\n", + "2018-12-02 22:54:17,487 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\main.cpp\n", + "2018-12-02 22:54:17,623 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 6 branches.\n", + "2018-12-02 22:54:17,624 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TextFileParsers.cpp\n", + "2018-12-02 22:54:18,126 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 102 branches.\n", + "2018-12-02 22:54:18,127 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\stdafx.h\n", + "2018-12-02 22:54:18,360 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 0 branches.\n", + "2018-12-02 22:54:18,361 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.cpp\n", + "2018-12-02 22:54:18,573 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 77 branches.\n", + "2018-12-02 22:54:18,574 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TypeTraits.h\n", + "2018-12-02 22:54:18,696 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 2 branches.\n", + "2018-12-02 22:54:18,698 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Exceptions.h\n", + "2018-12-02 22:54:18,876 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 16 branches.\n", + "2018-12-02 22:54:18,877 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 256 branches.\n", + "2018-12-02 22:54:18,877 INFO [persper.analytics.lsp_graph_server] End commit: e04529b7290fb09339c51df33b7b98123e5b6b77\n", + "2018-12-02 22:54:19,049 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\main.cpp.\n", + "2018-12-02 22:54:19,050 INFO [persper.analytics.lsp_graph_server.callgraph] Removed 6 branches by 1 files.\n", + "2018-12-02 22:54:19,051 INFO [persper.analytics.lsp_graph_server] Invalidated 1 files, affected 1 files.\n", + "2018-12-02 22:54:19,052 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\main.cpp\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.2 ff47723c41d94eb1d90fa005986cafdc0b5ba7b3 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-02 22:54:20,010 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 10 branches.\n", + "2018-12-02 22:54:20,011 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 10 branches.\n", + "2018-12-02 22:54:20,012 INFO [persper.analytics.lsp_graph_server] End commit: ff47723c41d94eb1d90fa005986cafdc0b5ba7b3\n", + "2018-12-02 22:54:20,199 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TextFileParsers.cpp." + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.3 4ff40e1456a4de88d802c410e755f34ad31ff303 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\n", + "2018-12-02 22:54:20,315 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.cpp.\n", + "2018-12-02 22:54:20,436 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.h.\n", + "2018-12-02 22:54:20,565 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\main.cpp.\n", + "2018-12-02 22:54:20,567 INFO [persper.analytics.lsp_graph_server.callgraph] Removed 260 branches by 5 files.\n", + "2018-12-02 22:54:20,568 INFO [persper.analytics.lsp_graph_server] Invalidated 4 files, affected 5 files.\n", + "2018-12-02 22:54:20,570 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TextFileParsers.h\n", + "2018-12-02 22:54:23,189 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 44 branches.\n", + "2018-12-02 22:54:23,190 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.h\n", + "2018-12-02 22:54:24,340 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 85 branches.\n", + "2018-12-02 22:54:24,342 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\main.cpp\n", + "2018-12-02 22:54:24,474 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 6 branches.\n", + "2018-12-02 22:54:24,475 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TextFileParsers.cpp\n", + "2018-12-02 22:54:24,987 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 161 branches.\n", + "2018-12-02 22:54:24,987 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.cpp\n", + "2018-12-02 22:54:25,207 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 77 branches.\n", + "2018-12-02 22:54:25,208 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 327 branches.\n", + "2018-12-02 22:54:25,209 INFO [persper.analytics.lsp_graph_server] End commit: 4ff40e1456a4de88d802c410e755f34ad31ff303\n", + "2018-12-02 22:54:25,391 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TextFileParsers.cpp.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.4 62d85a54623c6ddafdef5b6bef0e24f53d49aa65 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-02 22:54:25,508 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.cpp.\n", + "2018-12-02 22:54:25,627 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.h.\n", + "2018-12-02 22:54:25,747 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\main.cpp.\n", + "2018-12-02 22:54:25,750 INFO [persper.analytics.lsp_graph_server.callgraph] Removed 327 branches by 5 files.\n", + "2018-12-02 22:54:25,750 INFO [persper.analytics.lsp_graph_server] Invalidated 4 files, affected 5 files.\n", + "2018-12-02 22:54:25,752 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TextFileParsers.h\n", + "2018-12-02 22:54:28,442 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 44 branches.\n", + "2018-12-02 22:54:28,444 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.h\n", + "2018-12-02 22:54:29,587 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 85 branches.\n", + "2018-12-02 22:54:29,589 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TextFileParsers.cpp\n", + "2018-12-02 22:54:30,116 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 161 branches.\n", + "2018-12-02 22:54:30,117 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\main.cpp\n", + "2018-12-02 22:54:30,251 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 3 branches.\n", + "2018-12-02 22:54:30,253 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.cpp\n", + "2018-12-02 22:54:30,480 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 77 branches.\n", + "2018-12-02 22:54:30,481 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 324 branches.\n", + "2018-12-02 22:54:30,481 INFO [persper.analytics.lsp_graph_server] End commit: 62d85a54623c6ddafdef5b6bef0e24f53d49aa65\n", + "2018-12-02 22:54:30,482 INFO [persper.analytics.lsp_graph_server] Shutting down language server...\n", + "2018-12-02 22:54:30,563 INFO [persper.analytics.lsp_graph_server] Language server 21420 exited with code: 0.\n" + ] + } + ], + "source": [ + "from persper.analytics.analyzer import Analyzer\n", + "from persper.analytics.lsp_graph_server.ccls import CclsGraphServer\n", + "from tempfile import mkdtemp\n", + "\n", + "workspaceRoot = mkdtemp()\n", + "print(\"Workspace root: \", workspaceRoot)\n", + "G = None\n", + "async with CclsGraphServer(workspaceRoot, cacheRoot=\"./.ccls-cache\", languageServerCommand=\"../bin/ccls -log-file=ccls.log\") as graphServer:\n", + " analyzer = Analyzer(\"../repos\", graphServer)\n", + " graphServer.reset_graph()\n", + " await analyzer.analyze()\n", + " G = await analyzer.get_graph()" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['RowReader', 'TStream', 'operator>>', 'ConfigurationParser', 'ConfigurationParser::Load', 'ConfigurationParser::GetString', 'ConfigurationParser::GetInt', 'ConfigurationParser::GetDouble', 'ConfigurationParser::GetBool', 'ConfigurationParser::ConfigurationParser', 'dynamic_kind_of', 'TSrc', 'pointer_kind_of', 'safe_cast', 'safe_pointer_cast', 'StreamStatusToString', 'OpenAndValidate', 'TPath', 'ValidateStream', '_RangeToEnumerable', 'RangeToEnumerable', 'to_string', 'StringComparison', 'is_flags', 'Equal', 'operator&', 'TEnum', 'lhs', 'rhs', 'operator|', 'Confirm', 'FriendlyNameOf', 'BlockExitHandler', 'e', 'ReportException', 'main', 'Exception', 'c', 'subEx', 'stream', 'status', 'arg1', 'fs']\n" + ] + } + ], + "source": [ + "print(G.nodes())" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[('RowReader', 'RowReader'), ('RowReader', 'TStream'), ('operator>>', 'TStream'), ('operator>>', 'RowReader'), ('operator>>', 'operator>>'), ('operator>>', 'Equal'), ('operator>>', 'StringComparison'), ('operator>>', 'c'), ('ConfigurationParser', 'ConfigurationParser::Load'), ('ConfigurationParser', 'ConfigurationParser::GetString'), ('ConfigurationParser', 'ConfigurationParser::GetInt'), ('ConfigurationParser', 'ConfigurationParser::GetDouble'), ('ConfigurationParser', 'ConfigurationParser::GetBool'), ('ConfigurationParser', 'ConfigurationParser::ConfigurationParser'), ('ConfigurationParser', 'ConfigurationParser'), ('ConfigurationParser::Load', 'ConfigurationParser'), ('ConfigurationParser::Load', 'operator>>'), ('ConfigurationParser::Load', 'RowReader'), ('ConfigurationParser::Load', 'Exception'), ('ConfigurationParser::Load', 'to_string'), ('ConfigurationParser::Load', 'ConfigurationParser::Load'), ('ConfigurationParser::GetString', 'ConfigurationParser'), ('ConfigurationParser::GetString', '_RangeToEnumerable'), ('ConfigurationParser::GetString', 'ConfigurationParser::GetString'), ('ConfigurationParser::GetInt', 'ConfigurationParser'), ('ConfigurationParser::GetInt', 'ConfigurationParser::GetString'), ('ConfigurationParser::GetInt', '_RangeToEnumerable'), ('ConfigurationParser::GetInt', 'Exception'), ('ConfigurationParser::GetDouble', 'ConfigurationParser'), ('ConfigurationParser::GetDouble', 'ConfigurationParser::GetString'), ('ConfigurationParser::GetDouble', '_RangeToEnumerable'), ('ConfigurationParser::GetDouble', 'Exception'), ('ConfigurationParser::GetBool', 'ConfigurationParser'), ('ConfigurationParser::GetBool', 'ConfigurationParser::GetString'), ('ConfigurationParser::GetBool', '_RangeToEnumerable'), ('ConfigurationParser::GetBool', 'Equal'), ('ConfigurationParser::GetBool', 'StringComparison'), ('ConfigurationParser::GetBool', 'Exception'), ('ConfigurationParser::ConfigurationParser', 'ConfigurationParser'), ('ConfigurationParser::ConfigurationParser', 'ConfigurationParser::Load'), ('ConfigurationParser::ConfigurationParser', 'OpenAndValidate'), ('ConfigurationParser::ConfigurationParser', 'ConfigurationParser::ConfigurationParser'), ('dynamic_kind_of', 'TSrc'), ('pointer_kind_of', 'TSrc'), ('safe_cast', 'TSrc'), ('safe_pointer_cast', 'TSrc'), ('StreamStatusToString', 'TStream'), ('OpenAndValidate', 'TStream'), ('OpenAndValidate', 'TPath'), ('ValidateStream', 'TStream'), ('_RangeToEnumerable', '_RangeToEnumerable'), ('RangeToEnumerable', '_RangeToEnumerable'), ('to_string', 'to_string'), ('StringComparison', 'StringComparison'), ('is_flags', 'StringComparison'), ('Equal', 'Equal'), ('Equal', 'StringComparison'), ('operator&', 'TEnum'), ('lhs', 'TEnum'), ('rhs', 'TEnum'), ('operator|', 'TEnum'), ('Confirm', 'Confirm'), ('Confirm', 'c'), ('FriendlyNameOf', 'FriendlyNameOf'), ('BlockExitHandler', 'BlockExitHandler'), ('BlockExitHandler', 'e'), ('ReportException', 'ReportException'), ('ReportException', 'subEx'), ('main', 'OpenAndValidate'), ('main', 'ConfigurationParser'), ('main', 'main'), ('main', 'ConfigurationParser::GetBool'), ('main', 'ConfigurationParser::GetDouble'), ('main', 'ConfigurationParser::GetString'), ('stream', 'TStream'), ('status', 'stream'), ('arg1', 'TPath'), ('fs', 'arg1')]\n" + ] + } + ], + "source": [ + "print(G.edges())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.0" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/persper/analytics/analyzer.py b/persper/analytics/analyzer.py index f7a82f20e14..5a580c476a5 100644 --- a/persper/analytics/analyzer.py +++ b/persper/analytics/analyzer.py @@ -1,6 +1,7 @@ import os import time import pickle +import asyncio from persper.analytics.git_tools import get_contents, _diff_with_first_parent from persper.analytics.iterator import RepoIterator @@ -62,7 +63,7 @@ def __init__(self, repo_path, graph_server): self._ri = RepoIterator(repo_path) self._ccgraph = None - def analyze(self, rev=None, + async def analyze(self, rev=None, from_beginning=False, num_commits=None, continue_iter=False, @@ -93,18 +94,18 @@ def analyze(self, rev=None, for idx, commit in enumerate(reversed(commits), 1): phase = 'main' print_commit_info(phase, idx, commit, start_time, verbose) - self.analyze_master_commit(commit) + await self.analyze_master_commit(commit) self.autosave(phase, idx, checkpoint_interval) for idx, commit in enumerate(branch_commits, 1): phase = 'branch' print_commit_info(phase, idx, commit, start_time, verbose) - self.analyze_branch_commit(commit) + await self.analyze_branch_commit(commit) self.autosave(phase, idx, checkpoint_interval) self.autosave('finished', 0, 1) - def _analyze_commit(self, commit, server_func): + async def _analyze_commit(self, commit, server_func): self._graph_server.register_commit(commit.hexsha, commit.author.name, commit.author.email, @@ -135,14 +136,21 @@ def _analyze_commit(self, commit, server_func): if old_src or new_src: # todo (hezheng) store the status somewhere for reporting later - status = server_func(old_fname, old_src, new_fname, new_src, diff.diff) + result = server_func(old_fname, old_src, new_fname, new_src, diff.diff) + if asyncio.iscoroutine(result): + result = await result + status = result - def analyze_master_commit(self, commit): - self._analyze_commit(commit, self._graph_server.update_graph) + result = self._graph_server.end_commit(commit.hexsha) + if asyncio.iscoroutine(result): + result = await result + + async def analyze_master_commit(self, commit): + await self._analyze_commit(commit, self._graph_server.update_graph) # todo (hezheng) implement correct analysis for branches - def analyze_branch_commit(self, commit): - self._analyze_commit(commit, self._graph_server.update_graph) + async def analyze_branch_commit(self, commit): + await self._analyze_commit(commit, self._graph_server.update_graph) def reset_state(self): self._ccgraph = None @@ -159,4 +167,4 @@ def autosave(self, phase, idx, checkpoint_interval): if idx % checkpoint_interval == 0: repo_name = os.path.basename(self._ri.repo_path.rstrip('/')) fname = repo_name + '-' + phase + '-' + str(idx) + '.pickle' - self.save(fname) + #self.save(fname) diff --git a/persper/analytics/graph_server.py b/persper/analytics/graph_server.py index 5cf79c1d55a..8c4bb5985ac 100644 --- a/persper/analytics/graph_server.py +++ b/persper/analytics/graph_server.py @@ -45,6 +45,9 @@ def update_graph(self, old_filename: str, old_src: str, """ pass + def end_commit(self, hexsha): + pass + @abstractmethod def get_graph(self): """ diff --git a/persper/analytics/lsp_graph_server/__init__.py b/persper/analytics/lsp_graph_server/__init__.py index 6e86750cc1d..10897b04cc9 100644 --- a/persper/analytics/lsp_graph_server/__init__.py +++ b/persper/analytics/lsp_graph_server/__init__.py @@ -40,33 +40,43 @@ def __init__(self, workspaceRoot: str, languageServerCommand: Union[str, List[st def register_commit(self, hexsha, author_name, author_email, commit_message): self._ccgraph.add_commit(hexsha, author_name, author_email, commit_message) - def update_graph(self, old_filename: str, old_src: str, new_filename: str, new_src: str, patch: bytes): - with asyncio.new_event_loop() as loop: - loop.run_until_complete(self._onFileChanged(old_filename, old_src, new_filename, new_src, patch)) - - async def _onFileChanged(self, old_filename: str, old_src: str, new_filename: str, new_src: str, patch: bytes): + async def update_graph(self, old_filename: str, old_src: str, new_filename: str, new_src: str, patch: bytes): oldPath = self._workspaceRoot.joinpath(old_filename).resolve() if old_filename else None newPath = self._workspaceRoot.joinpath(new_filename).resolve() if new_filename else None - if old_filename != new_filename: + assert oldPath or newPath + if oldPath and oldPath != newPath: await self._callGraphBuilder.deleteFile(oldPath) - await self._callGraphBuilder.modifyFile(new_filename, new_src) - self._invalidatedFiles.add(oldPath) - self._invalidatedFiles.add(newPath) + self._invalidatedFiles.add(oldPath) + if newPath: + await self._callGraphBuilder.modifyFile(newPath, new_src) + self._invalidatedFiles.add(newPath) + + async def end_commit(self, hexsha): + await self.updateGraph() + # self._callGraph.dumpTo("Graph-" + hexsha + ".txt") + _logger.info("End commit: %s", hexsha) - def get_graph(self): - with asyncio.new_event_loop() as loop: - loop.run_until_complete(self.updateGraph()) + async def get_graph(self): return self._ccgraph def reset_graph(self): self._callGraph.clear() def filter_file(self, filename): - return self._callGraphBuilder.filterFile(filename) + filePath = self._workspaceRoot.joinpath(filename).resolve() + # print("Filter: ", filePath, self._callGraphBuilder.filterFile(str(filePath))) + return self._callGraphBuilder.filterFile(str(filePath)) def config(self, param: dict): pass + async def __aenter__(self): + await self.startLspClient() + return self + + async def __aexit__(self, exc_type, exc_value, traceback): + await self.stopLspClient() + async def startLspClient(self): """ When overridden in derived class, starts the LSP server process, @@ -107,5 +117,5 @@ async def updateGraph(self): return affectedFiles = self._callGraphManager.removeByFiles(self._invalidatedFiles) _logger.info("Invalidated %d files, affected %d files.", len(self._invalidatedFiles), len(affectedFiles)) - self._callGraphManager.buildGraph(fileNames=affectedFiles) + await self._callGraphManager.buildGraph(fileNames=affectedFiles) self._invalidatedFiles.clear() diff --git a/persper/analytics/lsp_graph_server/callgraph/builder.py b/persper/analytics/lsp_graph_server/callgraph/builder.py index f8aa49aff51..7cf1a98795d 100644 --- a/persper/analytics/lsp_graph_server/callgraph/builder.py +++ b/persper/analytics/lsp_graph_server/callgraph/builder.py @@ -11,7 +11,7 @@ from antlr4.error.ErrorListener import ErrorListener from jsonrpc.exceptions import JsonRpcException -import wildcards +from persper.analytics.lsp_graph_server import wildcards from persper.analytics.lsp_graph_server.languageclient.lspclient import LspClient from persper.analytics.lsp_graph_server.languageclient.lspcontract import \ DocumentSymbol, Location, Position, SymbolInformation, SymbolKind, \ @@ -51,7 +51,7 @@ def __init__(self, tokens: Iterable[Token], if isinstance(s, DocumentSymbol): # We assume selectionRange is exactly the range of symbol name symbolKinds[s.selectionRange.start.toTuple()] = s.kind - self._scopes.append(CallGraphScope(s.name, s.kind, fileName, s.range.start, s.range.end)) + self._scopes.append(CallGraphScope(s.detail or s.name, s.kind, fileName, s.range.start, s.range.end)) elif isinstance(s, SymbolInformation): symbolKinds[(s.location.range.start.line, s.name)] = (s.location.range.start.character, s.kind) self._scopes.append(CallGraphScope(s.containerName, s.kind, fileName, @@ -187,7 +187,10 @@ def removeDocumentCache(self, path: Union[str, PurePath]): """ if isinstance(path, str): path = Path(path).resolve() - del self._tokenizedDocCache[path] + try: + del self._tokenizedDocCache[path] + except KeyError: + pass async def getTokenizedDocument(self, path: Union[str, PurePath]): class MyLexerErrorListener(ErrorListener): @@ -249,7 +252,7 @@ def inferLanguageId(self, path: PurePath) -> str: """ ext = path.suffix.lower() return _KNOWN_EXTENSION_LANGUAGES[ext] - + @abstractclassmethod def createLexer(self, fileStream: FileStream) -> Lexer: raise NotImplementedError @@ -343,9 +346,9 @@ async def buildCallGraphInFile(self, fileName: str) -> Iterable[CallGraphBranch] async def deleteFile(self, fileName: str): path = Path(fileName).resolve() + self.removeDocumentCache(path) if not path.exists: return False - self.removeDocumentCache(path) doc = TextDocument(TextDocument.fileNameToUri(str(path)), self.inferLanguageId(path), 1, "") self._lspClient.server.textDocumentDidOpen(doc) # Empty the file and notify language server. @@ -362,18 +365,19 @@ async def modifyFile(self, fileName: str, newContent: str): if newContent is None: newContent = "" path = Path(fileName).resolve() + self.removeDocumentCache(path) try: + originalFileExists = path.exists() doc = TextDocument.loadFile(str(path), self.inferLanguageId(path), 1) \ - if path.exists() \ + if originalFileExists \ else TextDocument(TextDocument.fileNameToUri(str(path)), self.inferLanguageId(path), 1, "") try: - self.removeDocumentCache(path) self._lspClient.server.textDocumentDidOpen(doc) self._lspClient.server.textDocumentDidChange(doc.uri, 2, [TextDocumentContentChangeEvent(newContent)]) - with open(fileName, "wt", encoding="utf-8") as f: + with open(fileName, "wt", encoding="utf-8", errors="replace") as f: f.write(newContent) self._lspClient.server.textDocumentDidSave(doc.uri) - _logger.info("Modified %s.", path) + _logger.info("%s %s.", "Modified " if originalFileExists else "Created", path) return doc.text finally: await self.closeDocument(doc.uri) diff --git a/persper/analytics/lsp_graph_server/callgraph/manager.py b/persper/analytics/lsp_graph_server/callgraph/manager.py index 7c91e651ff6..591b0cd7e3c 100644 --- a/persper/analytics/lsp_graph_server/callgraph/manager.py +++ b/persper/analytics/lsp_graph_server/callgraph/manager.py @@ -2,8 +2,9 @@ Contains CallGraphManager. """ import logging -from pathlib import Path +from pathlib import Path, PurePath from typing import Iterable, NamedTuple, Tuple, Union +from os import path from . import CallGraph from .builder import CallGraphBuilder @@ -22,7 +23,7 @@ def __init__(self, builder: CallGraphBuilder, callGraph: CallGraph = None): raise TypeError("builderType should be a subtype of CallGraphBuilder.") self._builder = builder self._graph = callGraph or CallGraph() - #self.x = 0 + # self._rebuildCounter = 0 @property def graph(self): @@ -51,10 +52,13 @@ def pushBranch(branch): _logger.debug("%s Branch: %s", ex, branch) if fileNames: - if isinstance(fileNames, str): + if isinstance(fileNames, (str, PurePath)): fileNames = [fileNames] for fn in fileNames: - async for b in self._builder.buildCallGraphInFile(fn): + sfn = str(fn) + if not path.exists(sfn): + continue + async for b in self._builder.buildCallGraphInFile(sfn): pushBranch(b) if globPattern or not fileNames: async for b in self._builder.buildCallGraphInFiles(globPattern): @@ -77,6 +81,6 @@ async def rebuildGraph(self, fileNames: Iterable[str]): whose source or definition node contains the specified files. """ affectedFiles = self.removeByFiles(fileNames) - #self.x += 1 - #self._graph.dumpTo("dmp" + str(self.x) + ".txt") + self._rebuildCounter += 1 + # self._graph.dumpTo("rebuild_" + str(self._rebuildCounter) + ".txt") await self.buildGraph((str(p) for p in affectedFiles)) diff --git a/persper/analytics/lsp_graph_server/ccls.py b/persper/analytics/lsp_graph_server/ccls.py index 0c48e11616a..2e27e4ffc06 100644 --- a/persper/analytics/lsp_graph_server/ccls.py +++ b/persper/analytics/lsp_graph_server/ccls.py @@ -85,7 +85,7 @@ class CclsCallGraphBuilder(CallGraphBuilder): def __init__(self, lspClient: CclsLspClient): if not isinstance(lspClient, CclsLspClient): raise TypeError("lspClient should be an instance of CclsLspClient.") - super().__init__(CPP14Lexer, lspClient) + super().__init__(lspClient) def createLexer(self, fileStream: FileStream): return CPP14Lexer(fileStream) @@ -135,18 +135,16 @@ class CclsGraphServer(LspClientGraphServer): defaultLanguageServerCommand = "./bin/ccls -log-file=ccls.log" def __init__(self, workspaceRoot: str, cacheRoot: str = None, languageServerCommand: Union[str, List[str]] = None): - super().__init__(workspaceRoot) + super().__init__(workspaceRoot, languageServerCommand=languageServerCommand) self._cacheRoot = Path(cacheRoot).resolve() if cacheRoot else self._workspaceRoot.joinpath(".ccls-cache") async def startLspClient(self): - super().startLspClient() - self._lspClient = CclsLspClient(self._lspServerProc.stdout, self._lspServerProc.stdin) - self._callGraphBuilder = CclsCallGraphBuilder(self._lspClient) - self._callGraphManager = CallGraphManager(self._callGraphBuilder, self._callGraph) + await super().startLspClient() + self._lspClient = CclsLspClient(self._lspServerProc.stdout, self._lspServerProc.stdin, logFile="rpclog.log") self._lspClient.start() _logger.debug(await self._lspClient.server.initialize( rootFolder=self._workspaceRoot, - initializationOptions={"cacheDirectory": self._cacheRoot, + initializationOptions={"cacheDirectory": str(self._cacheRoot), "diagnostics": {"onParse": False, "onType": False}, "discoverSystemIncludes": True, "enableCacheRead": True, @@ -156,6 +154,17 @@ async def startLspClient(self): "extraArgs": ["-nocudalib"], "pathMappings": [], "resourceDir": "" - } - })) + }, + "index": {"threads": 1} # Ccls has concurrency issue, for now. + })) self._lspClient.server.initialized() + self._callGraphBuilder = CclsCallGraphBuilder(self._lspClient) + self._callGraphBuilder.workspaceFilePatterns = [ + str(self._workspaceRoot.joinpath("**/*.[Hh]")), + str(self._workspaceRoot.joinpath("**/*.[Hh]pp")), + str(self._workspaceRoot.joinpath("**/*.[Cc]")), + str(self._workspaceRoot.joinpath("**/*.[Cc]c")), + str(self._workspaceRoot.joinpath("**/*.[Cc]pp")), + str(self._workspaceRoot.joinpath("**/*.[Cc]xx")) + ] + self._callGraphManager = CallGraphManager(self._callGraphBuilder, self._callGraph) diff --git a/persper/analytics/lsp_graph_server/jsonrpcutils.py b/persper/analytics/lsp_graph_server/jsonrpcutils.py index d9735c132ce..bce06b62123 100644 --- a/persper/analytics/lsp_graph_server/jsonrpcutils.py +++ b/persper/analytics/lsp_graph_server/jsonrpcutils.py @@ -1,4 +1,6 @@ +import json from datetime import datetime + from jsonrpc.streams import JsonRpcStreamReader, JsonRpcStreamWriter @@ -8,10 +10,10 @@ def __init__(self, fileName): self._file = open(fileName, "wt") def logTX(self, message: dict): - self._file.write("{0} < {1}\n".format(datetime.now(), message)) + self._file.write("{0} < {1}\n".format(datetime.now(), json.dumps(message))) def logRX(self, message: dict): - self._file.write("{0} > {1}\n".format(datetime.now(), message)) + self._file.write("{0} > {1}\n".format(datetime.now(), json.dumps(message))) def __exit__(self, exc_type, exc_value, traceback): self._file.close() From 29cf1dfe67de3a91c96268a9df8409318504acab Mon Sep 17 00:00:00 2001 From: xinyan Date: Sun, 2 Dec 2018 23:21:29 +0800 Subject: [PATCH 07/84] Make LspClientGraphServer pickle-friendly. --- persper/analytics/analyzer.py | 2 +- persper/analytics/lsp_graph_server/__init__.py | 12 ++++++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/persper/analytics/analyzer.py b/persper/analytics/analyzer.py index 5a580c476a5..fed7dde2175 100644 --- a/persper/analytics/analyzer.py +++ b/persper/analytics/analyzer.py @@ -167,4 +167,4 @@ def autosave(self, phase, idx, checkpoint_interval): if idx % checkpoint_interval == 0: repo_name = os.path.basename(self._ri.repo_path.rstrip('/')) fname = repo_name + '-' + phase + '-' + str(idx) + '.pickle' - #self.save(fname) + self.save(fname) diff --git a/persper/analytics/lsp_graph_server/__init__.py b/persper/analytics/lsp_graph_server/__init__.py index 10897b04cc9..7ce827d264f 100644 --- a/persper/analytics/lsp_graph_server/__init__.py +++ b/persper/analytics/lsp_graph_server/__init__.py @@ -37,6 +37,18 @@ def __init__(self, workspaceRoot: str, languageServerCommand: Union[str, List[st self._callGraphBuilder: CallGraphBuilder = None self._callGraphManager: CallGraphManager = None + def __getstate__(self): + state = self.__dict__.copy() + del state["_lspServerProc"] + del state["_lspClient"] + del state["_callGraphBuilder"] + del state["_callGraphManager"] + + def __setstate__(self, state): + self.__dict__.update(state) + if not self._workspaceRoot.exists(): + self._workspaceRoot.touch() + def register_commit(self, hexsha, author_name, author_email, commit_message): self._ccgraph.add_commit(hexsha, author_name, author_email, commit_message) From cdb85ed0f8c9e910a740de72d5a537808e29d1da Mon Sep 17 00:00:00 2001 From: xinyan Date: Sun, 2 Dec 2018 23:23:08 +0800 Subject: [PATCH 08/84] Update gitignore. --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index 40674f75af0..2f2b3d8d9b4 100644 --- a/.gitignore +++ b/.gitignore @@ -90,3 +90,6 @@ ENV/ # Rope project settings .ropeproject +.ccls-cache +/bin +/bin-* From cd92e8af078a284e247c58bc3f598d6999994a18 Mon Sep 17 00:00:00 2001 From: xinyan Date: Sat, 8 Dec 2018 01:04:27 +0800 Subject: [PATCH 09/84] Modify files outside the LSP editor so ccls indexer will correctly invalidate the files by checking last modification time. --- notebooks/lsp-ccls.ipynb | 226 +++++++----------- .../analytics/lsp_graph_server/__init__.py | 6 + .../lsp_graph_server/callgraph/builder.py | 13 +- persper/analytics/lsp_graph_server/ccls.py | 39 +-- 4 files changed, 120 insertions(+), 164 deletions(-) diff --git a/notebooks/lsp-ccls.ipynb b/notebooks/lsp-ccls.ipynb index 53b99e41894..bc58c890a46 100644 --- a/notebooks/lsp-ccls.ipynb +++ b/notebooks/lsp-ccls.ipynb @@ -8,9 +8,9 @@ "source": [ "import logging\n", "\n", - "logging.basicConfig(format='%(asctime)s %(levelname)-8s [%(name)s] %(message)s',\n", - " level=logging.INFO)\n", - "logging.getLogger('asyncio').setLevel(logging.CRITICAL)" + "#logging.basicConfig(format='%(asctime)s %(levelname)-8s [%(name)s] %(message)s',\n", + "# level=logging.INFO)\n", + "#logging.getLogger('asyncio').setLevel(logging.CRITICAL)" ] }, { @@ -24,145 +24,14 @@ "name": "stdout", "output_type": "stream", "text": [ - "Workspace root: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2018-12-02 22:54:11,892 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Created C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Exceptions.h.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ + "Workspace root: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpp_gx74i7\n", "----- Overview ------\n", "# of commits on master: 4\n", "# of commits on branch: 0\n", - "----- No.1 e04529b7290fb09339c51df33b7b98123e5b6b77 on main -----\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2018-12-02 22:54:11,958 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Created C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TextFileParsers.cpp.\n", - "2018-12-02 22:54:12,022 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Created C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TextFileParsers.h.\n", - "2018-12-02 22:54:12,090 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Created C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TypeTraits.h.\n", - "2018-12-02 22:54:12,153 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Created C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.cpp.\n", - "2018-12-02 22:54:12,223 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Created C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.h.\n", - "2018-12-02 22:54:12,286 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Created C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\main.cpp.\n", - "2018-12-02 22:54:12,362 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Created C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\stdafx.cpp.\n", - "2018-12-02 22:54:12,425 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Created C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\stdafx.h.\n", - "2018-12-02 22:54:12,429 INFO [persper.analytics.lsp_graph_server.callgraph] Removed 0 branches by 9 files.\n", - "2018-12-02 22:54:12,430 INFO [persper.analytics.lsp_graph_server] Invalidated 9 files, affected 9 files.\n", - "2018-12-02 22:54:12,431 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TextFileParsers.h\n", - "2018-12-02 22:54:16,084 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 44 branches.\n", - "2018-12-02 22:54:16,086 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\stdafx.cpp\n", - "2018-12-02 22:54:16,196 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 0 branches.\n", - "2018-12-02 22:54:16,197 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.h\n", - "2018-12-02 22:54:17,486 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 79 branches.\n", - "2018-12-02 22:54:17,487 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\main.cpp\n", - "2018-12-02 22:54:17,623 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 6 branches.\n", - "2018-12-02 22:54:17,624 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TextFileParsers.cpp\n", - "2018-12-02 22:54:18,126 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 102 branches.\n", - "2018-12-02 22:54:18,127 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\stdafx.h\n", - "2018-12-02 22:54:18,360 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 0 branches.\n", - "2018-12-02 22:54:18,361 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.cpp\n", - "2018-12-02 22:54:18,573 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 77 branches.\n", - "2018-12-02 22:54:18,574 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TypeTraits.h\n", - "2018-12-02 22:54:18,696 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 2 branches.\n", - "2018-12-02 22:54:18,698 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Exceptions.h\n", - "2018-12-02 22:54:18,876 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 16 branches.\n", - "2018-12-02 22:54:18,877 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 256 branches.\n", - "2018-12-02 22:54:18,877 INFO [persper.analytics.lsp_graph_server] End commit: e04529b7290fb09339c51df33b7b98123e5b6b77\n", - "2018-12-02 22:54:19,049 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\main.cpp.\n", - "2018-12-02 22:54:19,050 INFO [persper.analytics.lsp_graph_server.callgraph] Removed 6 branches by 1 files.\n", - "2018-12-02 22:54:19,051 INFO [persper.analytics.lsp_graph_server] Invalidated 1 files, affected 1 files.\n", - "2018-12-02 22:54:19,052 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\main.cpp\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "----- No.2 ff47723c41d94eb1d90fa005986cafdc0b5ba7b3 on main -----\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2018-12-02 22:54:20,010 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 10 branches.\n", - "2018-12-02 22:54:20,011 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 10 branches.\n", - "2018-12-02 22:54:20,012 INFO [persper.analytics.lsp_graph_server] End commit: ff47723c41d94eb1d90fa005986cafdc0b5ba7b3\n", - "2018-12-02 22:54:20,199 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TextFileParsers.cpp." - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "----- No.3 4ff40e1456a4de88d802c410e755f34ad31ff303 on main -----\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\n", - "2018-12-02 22:54:20,315 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.cpp.\n", - "2018-12-02 22:54:20,436 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.h.\n", - "2018-12-02 22:54:20,565 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\main.cpp.\n", - "2018-12-02 22:54:20,567 INFO [persper.analytics.lsp_graph_server.callgraph] Removed 260 branches by 5 files.\n", - "2018-12-02 22:54:20,568 INFO [persper.analytics.lsp_graph_server] Invalidated 4 files, affected 5 files.\n", - "2018-12-02 22:54:20,570 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TextFileParsers.h\n", - "2018-12-02 22:54:23,189 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 44 branches.\n", - "2018-12-02 22:54:23,190 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.h\n", - "2018-12-02 22:54:24,340 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 85 branches.\n", - "2018-12-02 22:54:24,342 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\main.cpp\n", - "2018-12-02 22:54:24,474 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 6 branches.\n", - "2018-12-02 22:54:24,475 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TextFileParsers.cpp\n", - "2018-12-02 22:54:24,987 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 161 branches.\n", - "2018-12-02 22:54:24,987 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.cpp\n", - "2018-12-02 22:54:25,207 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 77 branches.\n", - "2018-12-02 22:54:25,208 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 327 branches.\n", - "2018-12-02 22:54:25,209 INFO [persper.analytics.lsp_graph_server] End commit: 4ff40e1456a4de88d802c410e755f34ad31ff303\n", - "2018-12-02 22:54:25,391 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TextFileParsers.cpp.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "----- No.4 62d85a54623c6ddafdef5b6bef0e24f53d49aa65 on main -----\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2018-12-02 22:54:25,508 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.cpp.\n", - "2018-12-02 22:54:25,627 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.h.\n", - "2018-12-02 22:54:25,747 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\main.cpp.\n", - "2018-12-02 22:54:25,750 INFO [persper.analytics.lsp_graph_server.callgraph] Removed 327 branches by 5 files.\n", - "2018-12-02 22:54:25,750 INFO [persper.analytics.lsp_graph_server] Invalidated 4 files, affected 5 files.\n", - "2018-12-02 22:54:25,752 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TextFileParsers.h\n", - "2018-12-02 22:54:28,442 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 44 branches.\n", - "2018-12-02 22:54:28,444 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.h\n", - "2018-12-02 22:54:29,587 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 85 branches.\n", - "2018-12-02 22:54:29,589 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\TextFileParsers.cpp\n", - "2018-12-02 22:54:30,116 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 161 branches.\n", - "2018-12-02 22:54:30,117 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\main.cpp\n", - "2018-12-02 22:54:30,251 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 3 branches.\n", - "2018-12-02 22:54:30,253 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpvjc5oxj6\\Utility.cpp\n", - "2018-12-02 22:54:30,480 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 77 branches.\n", - "2018-12-02 22:54:30,481 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 324 branches.\n", - "2018-12-02 22:54:30,481 INFO [persper.analytics.lsp_graph_server] End commit: 62d85a54623c6ddafdef5b6bef0e24f53d49aa65\n", - "2018-12-02 22:54:30,482 INFO [persper.analytics.lsp_graph_server] Shutting down language server...\n", - "2018-12-02 22:54:30,563 INFO [persper.analytics.lsp_graph_server] Language server 21420 exited with code: 0.\n" + "----- No.1 ad1187ea6cfc9dc5f2ea869db16895b3e7ae10f0 on main -----\n", + "----- No.2 f3cce3b9fb9cffe48343abad7e9617dd724835d9 on main -----\n", + "----- No.3 aca68e9d114665fb327b90234129cf4478800e10 on main -----\n", + "----- No.4 f482b140071867d4b2cd30f045725775e5394ccd on main -----\n" ] } ], @@ -190,7 +59,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "['RowReader', 'TStream', 'operator>>', 'ConfigurationParser', 'ConfigurationParser::Load', 'ConfigurationParser::GetString', 'ConfigurationParser::GetInt', 'ConfigurationParser::GetDouble', 'ConfigurationParser::GetBool', 'ConfigurationParser::ConfigurationParser', 'dynamic_kind_of', 'TSrc', 'pointer_kind_of', 'safe_cast', 'safe_pointer_cast', 'StreamStatusToString', 'OpenAndValidate', 'TPath', 'ValidateStream', '_RangeToEnumerable', 'RangeToEnumerable', 'to_string', 'StringComparison', 'is_flags', 'Equal', 'operator&', 'TEnum', 'lhs', 'rhs', 'operator|', 'Confirm', 'FriendlyNameOf', 'BlockExitHandler', 'e', 'ReportException', 'main', 'Exception', 'c', 'subEx', 'stream', 'status', 'arg1', 'fs']\n" + "['Equal', 'StringComparison', 'Confirm', 'c', 'ReportException', 'subEx', 'main', 'OpenAndValidate', 'ConfigurationParser', 'dynamic_kind_of', 'TSrc', 'pointer_kind_of', 'safe_cast', 'safe_pointer_cast', 'StreamStatusToString', 'TStream', 'TPath', 'ValidateStream', '_RangeToEnumerable', 'RangeToEnumerable', 'to_string', 'is_flags', 'operator&', 'TEnum', 'lhs', 'rhs', 'operator|', 'FriendlyNameOf', 'BlockExitHandler', 'e', 'operator>>', 'ConfigurationParser::Load', 'ConfigurationParser::ConfigurationParser', 'Exception', 'ConfigurationParser::GetString', 'ConfigurationParser::GetInt', 'ConfigurationParser::GetDouble', 'ConfigurationParser::GetBool', 'InvalidCastException', 'ArgumentException', 'OperationFailureException', 'is_flags_v']\n" ] } ], @@ -201,18 +70,89 @@ { "cell_type": "code", "execution_count": 4, - "metadata": {}, + "metadata": { + "scrolled": false + }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "[('RowReader', 'RowReader'), ('RowReader', 'TStream'), ('operator>>', 'TStream'), ('operator>>', 'RowReader'), ('operator>>', 'operator>>'), ('operator>>', 'Equal'), ('operator>>', 'StringComparison'), ('operator>>', 'c'), ('ConfigurationParser', 'ConfigurationParser::Load'), ('ConfigurationParser', 'ConfigurationParser::GetString'), ('ConfigurationParser', 'ConfigurationParser::GetInt'), ('ConfigurationParser', 'ConfigurationParser::GetDouble'), ('ConfigurationParser', 'ConfigurationParser::GetBool'), ('ConfigurationParser', 'ConfigurationParser::ConfigurationParser'), ('ConfigurationParser', 'ConfigurationParser'), ('ConfigurationParser::Load', 'ConfigurationParser'), ('ConfigurationParser::Load', 'operator>>'), ('ConfigurationParser::Load', 'RowReader'), ('ConfigurationParser::Load', 'Exception'), ('ConfigurationParser::Load', 'to_string'), ('ConfigurationParser::Load', 'ConfigurationParser::Load'), ('ConfigurationParser::GetString', 'ConfigurationParser'), ('ConfigurationParser::GetString', '_RangeToEnumerable'), ('ConfigurationParser::GetString', 'ConfigurationParser::GetString'), ('ConfigurationParser::GetInt', 'ConfigurationParser'), ('ConfigurationParser::GetInt', 'ConfigurationParser::GetString'), ('ConfigurationParser::GetInt', '_RangeToEnumerable'), ('ConfigurationParser::GetInt', 'Exception'), ('ConfigurationParser::GetDouble', 'ConfigurationParser'), ('ConfigurationParser::GetDouble', 'ConfigurationParser::GetString'), ('ConfigurationParser::GetDouble', '_RangeToEnumerable'), ('ConfigurationParser::GetDouble', 'Exception'), ('ConfigurationParser::GetBool', 'ConfigurationParser'), ('ConfigurationParser::GetBool', 'ConfigurationParser::GetString'), ('ConfigurationParser::GetBool', '_RangeToEnumerable'), ('ConfigurationParser::GetBool', 'Equal'), ('ConfigurationParser::GetBool', 'StringComparison'), ('ConfigurationParser::GetBool', 'Exception'), ('ConfigurationParser::ConfigurationParser', 'ConfigurationParser'), ('ConfigurationParser::ConfigurationParser', 'ConfigurationParser::Load'), ('ConfigurationParser::ConfigurationParser', 'OpenAndValidate'), ('ConfigurationParser::ConfigurationParser', 'ConfigurationParser::ConfigurationParser'), ('dynamic_kind_of', 'TSrc'), ('pointer_kind_of', 'TSrc'), ('safe_cast', 'TSrc'), ('safe_pointer_cast', 'TSrc'), ('StreamStatusToString', 'TStream'), ('OpenAndValidate', 'TStream'), ('OpenAndValidate', 'TPath'), ('ValidateStream', 'TStream'), ('_RangeToEnumerable', '_RangeToEnumerable'), ('RangeToEnumerable', '_RangeToEnumerable'), ('to_string', 'to_string'), ('StringComparison', 'StringComparison'), ('is_flags', 'StringComparison'), ('Equal', 'Equal'), ('Equal', 'StringComparison'), ('operator&', 'TEnum'), ('lhs', 'TEnum'), ('rhs', 'TEnum'), ('operator|', 'TEnum'), ('Confirm', 'Confirm'), ('Confirm', 'c'), ('FriendlyNameOf', 'FriendlyNameOf'), ('BlockExitHandler', 'BlockExitHandler'), ('BlockExitHandler', 'e'), ('ReportException', 'ReportException'), ('ReportException', 'subEx'), ('main', 'OpenAndValidate'), ('main', 'ConfigurationParser'), ('main', 'main'), ('main', 'ConfigurationParser::GetBool'), ('main', 'ConfigurationParser::GetDouble'), ('main', 'ConfigurationParser::GetString'), ('stream', 'TStream'), ('status', 'stream'), ('arg1', 'TPath'), ('fs', 'arg1')]\n" + "Edges: 67\n", + "Equal | Equal\n", + "Equal | StringComparison\n", + "StringComparison | StringComparison\n", + "Confirm | Confirm\n", + "Confirm | c\n", + "ReportException | ReportException\n", + "ReportException | subEx\n", + "main | OpenAndValidate\n", + "main | ConfigurationParser\n", + "main | main\n", + "main | ConfigurationParser::GetBool\n", + "main | ConfigurationParser::GetDouble\n", + "main | ConfigurationParser::GetString\n", + "OpenAndValidate | TStream\n", + "OpenAndValidate | TPath\n", + "dynamic_kind_of | TSrc\n", + "pointer_kind_of | TSrc\n", + "safe_cast | TSrc\n", + "safe_pointer_cast | TSrc\n", + "StreamStatusToString | TStream\n", + "ValidateStream | TStream\n", + "_RangeToEnumerable | _RangeToEnumerable\n", + "RangeToEnumerable | _RangeToEnumerable\n", + "to_string | to_string\n", + "is_flags | StringComparison\n", + "operator& | TEnum\n", + "lhs | TEnum\n", + "rhs | TEnum\n", + "operator| | TEnum\n", + "FriendlyNameOf | FriendlyNameOf\n", + "BlockExitHandler | BlockExitHandler\n", + "BlockExitHandler | e\n", + "operator>> | operator>>\n", + "operator>> | c\n", + "operator>> | Equal\n", + "operator>> | StringComparison\n", + "ConfigurationParser::Load | ConfigurationParser::ConfigurationParser\n", + "ConfigurationParser::Load | ConfigurationParser::Load\n", + "ConfigurationParser::Load | operator>>\n", + "ConfigurationParser::Load | Exception\n", + "ConfigurationParser::ConfigurationParser | ConfigurationParser::ConfigurationParser\n", + "ConfigurationParser::ConfigurationParser | ConfigurationParser::Load\n", + "ConfigurationParser::ConfigurationParser | OpenAndValidate\n", + "Exception | Exception\n", + "ConfigurationParser::GetString | ConfigurationParser::ConfigurationParser\n", + "ConfigurationParser::GetString | ConfigurationParser::GetString\n", + "ConfigurationParser::GetInt | ConfigurationParser::ConfigurationParser\n", + "ConfigurationParser::GetInt | ConfigurationParser::GetString\n", + "ConfigurationParser::GetInt | Exception\n", + "ConfigurationParser::GetInt | ConfigurationParser::GetInt\n", + "ConfigurationParser::GetDouble | ConfigurationParser::ConfigurationParser\n", + "ConfigurationParser::GetDouble | ConfigurationParser::GetString\n", + "ConfigurationParser::GetDouble | Exception\n", + "ConfigurationParser::GetDouble | ConfigurationParser::GetDouble\n", + "ConfigurationParser::GetBool | ConfigurationParser::ConfigurationParser\n", + "ConfigurationParser::GetBool | ConfigurationParser::GetString\n", + "ConfigurationParser::GetBool | Equal\n", + "ConfigurationParser::GetBool | StringComparison\n", + "ConfigurationParser::GetBool | Exception\n", + "ConfigurationParser::GetBool | ConfigurationParser::GetBool\n", + "InvalidCastException | Exception\n", + "InvalidCastException | InvalidCastException\n", + "ArgumentException | Exception\n", + "ArgumentException | ArgumentException\n", + "OperationFailureException | Exception\n", + "OperationFailureException | OperationFailureException\n", + "is_flags_v | is_flags\n" ] } ], "source": [ - "print(G.edges())" + "print(\"Edges:\", len(G.edges()))\n", + "for e in G.edges():\n", + " print(e[0], \" | \", e[1])" ] }, { diff --git a/persper/analytics/lsp_graph_server/__init__.py b/persper/analytics/lsp_graph_server/__init__.py index 7ce827d264f..2a7d3d2df1f 100644 --- a/persper/analytics/lsp_graph_server/__init__.py +++ b/persper/analytics/lsp_graph_server/__init__.py @@ -4,6 +4,7 @@ from abc import abstractclassmethod, abstractproperty from pathlib import Path from typing import List, Union +from datetime import datetime, timedelta from persper.analytics.call_commit_graph import CallCommitGraph from persper.analytics.graph_server import GraphServer @@ -36,6 +37,7 @@ def __init__(self, workspaceRoot: str, languageServerCommand: Union[str, List[st self._lspClient: LspClient = None self._callGraphBuilder: CallGraphBuilder = None self._callGraphManager: CallGraphManager = None + self._lastFileWrittenTime: datetime = None def __getstate__(self): state = self.__dict__.copy() @@ -62,11 +64,15 @@ async def update_graph(self, old_filename: str, old_src: str, new_filename: str, if newPath: await self._callGraphBuilder.modifyFile(newPath, new_src) self._invalidatedFiles.add(newPath) + self._lastFileWrittenTime = datetime.now() async def end_commit(self, hexsha): await self.updateGraph() # self._callGraph.dumpTo("Graph-" + hexsha + ".txt") _logger.info("End commit: %s", hexsha) + # ensure the files in the next commit has a different timestamp as this commit. + if datetime.now() - self._lastFileWrittenTime < timedelta(seconds=1): + await asyncio.sleep(1) async def get_graph(self): return self._ccgraph diff --git a/persper/analytics/lsp_graph_server/callgraph/builder.py b/persper/analytics/lsp_graph_server/callgraph/builder.py index 7cf1a98795d..a45375e7fa5 100644 --- a/persper/analytics/lsp_graph_server/callgraph/builder.py +++ b/persper/analytics/lsp_graph_server/callgraph/builder.py @@ -372,14 +372,17 @@ async def modifyFile(self, fileName: str, newContent: str): if originalFileExists \ else TextDocument(TextDocument.fileNameToUri(str(path)), self.inferLanguageId(path), 1, "") try: - self._lspClient.server.textDocumentDidOpen(doc) - self._lspClient.server.textDocumentDidChange(doc.uri, 2, [TextDocumentContentChangeEvent(newContent)]) - with open(fileName, "wt", encoding="utf-8", errors="replace") as f: - f.write(newContent) - self._lspClient.server.textDocumentDidSave(doc.uri) + await self.modifyFileCore(path, doc, newContent) _logger.info("%s %s.", "Modified " if originalFileExists else "Created", path) return doc.text finally: await self.closeDocument(doc.uri) except Exception as ex: raise Exception("Cannot modify {0}.".format(path)) from ex + + async def modifyFileCore(self, filePath: PurePath, originalDocument: TextDocument, newContent: str): + self._lspClient.server.textDocumentDidOpen(originalDocument) + self._lspClient.server.textDocumentDidChange(originalDocument.uri, 2, [TextDocumentContentChangeEvent(newContent)]) + with open(str(filePath), "wt", encoding="utf-8", errors="replace") as f: + f.write(newContent) + self._lspClient.server.textDocumentDidSave(originalDocument.uri) diff --git a/persper/analytics/lsp_graph_server/ccls.py b/persper/analytics/lsp_graph_server/ccls.py index 2e27e4ffc06..5e7141e8072 100644 --- a/persper/analytics/lsp_graph_server/ccls.py +++ b/persper/analytics/lsp_graph_server/ccls.py @@ -16,7 +16,7 @@ from .callgraph.manager import CallGraphManager from .fileparsers.CPP14Lexer import CPP14Lexer from .languageclient.lspclient import LspClient -from .languageclient.lspcontract import TextDocument +from .languageclient.lspcontract import TextDocument, TextDocumentContentChangeEvent from .languageclient.lspserver import LspServerStub _logger = logging.getLogger(__name__) @@ -99,24 +99,27 @@ def inferLanguageId(self, path: PurePath): def modifyFile(self, fileName: str, newContent: str): return super().modifyFile(fileName, newContent) + async def _waitForJobs(self): + lastJobs = None + while True: + curJobs = await self._lspClient.server.getJobs() + if curJobs != lastJobs: + _logger.debug("Server jobs: %d.", curJobs) + lastJobs = curJobs + if curJobs == 0: + break + if curJobs < 5: + await sleep(0.05) + elif curJobs < 50: + await sleep(0.1) + else: + await sleep(1) + async def openDocument(self, textDoc: TextDocument): self._lspClient.server.textDocumentDidOpen(textDoc) - lastJobs = None while True: try: - while True: - curJobs = await self._lspClient.server.getJobs() - if curJobs != lastJobs: - _logger.debug("Server jobs: %d.", curJobs) - lastJobs = curJobs - if curJobs == 0: - break - if curJobs < 5: - await sleep(0.05) - elif curJobs < 50: - await sleep(0.1) - else: - await sleep(1) + await self._waitForJobs() # dummy request await self._lspClient.server.textDocumentCodeLens(textDoc.uri) return True @@ -129,6 +132,10 @@ async def openDocument(self, textDoc: TextDocument): return False raise + async def modifyFileCore(self, filePath: PurePath, originalDocument: TextDocument, newContent: str): + with open(str(filePath), "wt", encoding="utf-8", errors="replace") as f: + f.write(newContent) + class CclsGraphServer(LspClientGraphServer): @@ -155,7 +162,7 @@ async def startLspClient(self): "pathMappings": [], "resourceDir": "" }, - "index": {"threads": 1} # Ccls has concurrency issue, for now. + "index": {"threads": 0} })) self._lspClient.server.initialized() self._callGraphBuilder = CclsCallGraphBuilder(self._lspClient) From 302739e6860578317aaad6b215072de9d895b70f Mon Sep 17 00:00:00 2001 From: xinyan Date: Sun, 9 Dec 2018 22:02:19 +0800 Subject: [PATCH 10/84] Filter out non-scopable symbol kinds. Add documentations. --- .../analytics/lsp_graph_server/__init__.py | 48 ++++++++++++++--- .../lsp_graph_server/callgraph/adapters.py | 3 ++ .../lsp_graph_server/callgraph/builder.py | 54 ++++++++++++++----- persper/analytics/lsp_graph_server/ccls.py | 21 +++++--- .../languageclient/lspcontract.py | 9 ++++ 5 files changed, 108 insertions(+), 27 deletions(-) diff --git a/persper/analytics/lsp_graph_server/__init__.py b/persper/analytics/lsp_graph_server/__init__.py index 2a7d3d2df1f..8f9316c113a 100644 --- a/persper/analytics/lsp_graph_server/__init__.py +++ b/persper/analytics/lsp_graph_server/__init__.py @@ -19,25 +19,49 @@ class LspClientGraphServer(GraphServer): - + """ + The common base class for LSP-client backed-up call graph server. + + The derived class of this class should be used with `async with` statement: + ``` + async with LspClientGraphServer(..) as graphServer: + ... + ``` + """ defaultLanguageServerCommand: Union[str, List[str]] = None + defaultLoggedLanguageServerCommand: Union[str, List[str]] = None + + def __init__(self, workspaceRoot: str, + languageServerCommand: Union[str, List[str]] = None, + dumpLogs: bool = False, + dumpGraphs: bool = False): + """ + workspaceRoot: root of the temporary workspace path. LSP workspace and intermediate repository files + will be placed in this folder. - def __init__(self, workspaceRoot: str, languageServerCommand: Union[str, List[str]] = None): + languageServerCommand: the command line (in string, or a sequence of parameters) for starting the + language server process. If use `null` or default value, + the value of current class's `defaultLanguageServerCommand` static field will be used. + """ self._ccgraph = CallCommitGraph() self._callGraph = CallCommitGraphSynchronizer(self._ccgraph) self._workspaceRoot: Path = Path(workspaceRoot).resolve() self._invalidatedFiles = set() if not self._workspaceRoot.exists(): self._workspaceRoot.touch() - self._languageServerCommand = \ - languageServerCommand \ - if languageServerCommand != None \ - else type(self).defaultLanguageServerCommand + if languageServerCommand: + self._languageServerCommand = languageServerCommand + elif dumpLogs and type(self).defaultLoggedLanguageServerCommand: + self._languageServerCommand = type(self).defaultLoggedLanguageServerCommand + else: + self._languageServerCommand = type(self).defaultLanguageServerCommand self._lspServerProc: subprocess.Popen = None self._lspClient: LspClient = None self._callGraphBuilder: CallGraphBuilder = None self._callGraphManager: CallGraphManager = None self._lastFileWrittenTime: datetime = None + self._dumpLogs = dumpLogs + self._dumpGraphs = dumpGraphs def __getstate__(self): state = self.__dict__.copy() @@ -68,7 +92,8 @@ async def update_graph(self, old_filename: str, old_src: str, new_filename: str, async def end_commit(self, hexsha): await self.updateGraph() - # self._callGraph.dumpTo("Graph-" + hexsha + ".txt") + if self._dumpGraphs: + self._callGraph.dumpTo("Graph-" + hexsha + ".txt") _logger.info("End commit: %s", hexsha) # ensure the files in the next commit has a different timestamp as this commit. if datetime.now() - self._lastFileWrittenTime < timedelta(seconds=1): @@ -110,6 +135,11 @@ async def startLspClient(self): creationflags=subprocess.CREATE_NEW_CONSOLE) async def stopLspClient(self): + """ + Performs LSP client stop sequence. + This method is usually invoked in `__aexit__` so you do not have to call it manually + if you are using this class instance with `async with` statement. + """ if not self._lspServerProc: return _logger.info("Shutting down language server...") @@ -126,6 +156,10 @@ async def stopLspClient(self): self._callGraphManager = None def invalidateFile(self, path: Union[str, Path]): + """ + Mark the call graph for the specified file as invalidated, so it should be re-generated in + the next `updateGraph` call. + """ if isinstance(path, str): path = Path(path).resolve() self._invalidatedFiles.add(path) diff --git a/persper/analytics/lsp_graph_server/callgraph/adapters.py b/persper/analytics/lsp_graph_server/callgraph/adapters.py index 251a2aa0412..f45ec69108a 100644 --- a/persper/analytics/lsp_graph_server/callgraph/adapters.py +++ b/persper/analytics/lsp_graph_server/callgraph/adapters.py @@ -10,6 +10,9 @@ def __init__(self, callCommitGraph: CallCommitGraph): def add(self, branch: CallGraphBranch): super().add(branch) + if branch.sourceScope == branch.definitionScope: + # e.g. variable referernces. + return # Use scope full name as identifier. self._callCommitGraph.add_node(branch.sourceScope.name) self._callCommitGraph.add_node(branch.definitionScope.name) diff --git a/persper/analytics/lsp_graph_server/callgraph/builder.py b/persper/analytics/lsp_graph_server/callgraph/builder.py index a45375e7fa5..161d3c81e53 100644 --- a/persper/analytics/lsp_graph_server/callgraph/builder.py +++ b/persper/analytics/lsp_graph_server/callgraph/builder.py @@ -29,6 +29,26 @@ } +_SCOPE_SYMBOL_KINDS = { + # SymbolKind.Unknown, + SymbolKind.Class, + SymbolKind.Constructor, + SymbolKind.Enum, + SymbolKind.File, + SymbolKind.Function, + SymbolKind.Interface, + SymbolKind.Macro, + SymbolKind.Method, + SymbolKind.Module, + SymbolKind.Namespace, + SymbolKind.Operator, + SymbolKind.Package, + SymbolKind.Property, + SymbolKind.StaticMethod, + SymbolKind.Struct +} + + class TokenizedDocument: """ Represents a fully tokenized document that supports finding a symbol or scope from @@ -47,17 +67,26 @@ def __init__(self, tokens: Iterable[Token], # SymbolInformation # { (symbolLine, symbolName): (containerColumn, symbolKind) } symbolKinds = {} - for s in documentSymbols: - if isinstance(s, DocumentSymbol): - # We assume selectionRange is exactly the range of symbol name - symbolKinds[s.selectionRange.start.toTuple()] = s.kind - self._scopes.append(CallGraphScope(s.detail or s.name, s.kind, fileName, s.range.start, s.range.end)) - elif isinstance(s, SymbolInformation): - symbolKinds[(s.location.range.start.line, s.name)] = (s.location.range.start.character, s.kind) - self._scopes.append(CallGraphScope(s.containerName, s.kind, fileName, - s.location.range.start, s.location.range.end)) - else: - _logger.error("Invalid DocumentSymbol in %s: %s", fileName, s) + + def PopulateSymbols(symbols): + for s in symbols: + if s.kind not in _SCOPE_SYMBOL_KINDS: + continue + if isinstance(s, DocumentSymbol): + # We assume selectionRange is exactly the range of symbol name + symbolKinds[s.selectionRange.start.toTuple()] = s.kind + self._scopes.append(CallGraphScope(s.detail or s.name, s.kind, + fileName, s.range.start, s.range.end)) + elif isinstance(s, SymbolInformation): + symbolKinds[(s.location.range.start.line, s.name)] = (s.location.range.start.character, s.kind) + self._scopes.append(CallGraphScope(s.containerName, s.kind, fileName, + s.location.range.start, s.location.range.end)) + if s.children: + PopulateSymbols(s.children) + else: + _logger.error("Invalid DocumentSymbol in %s: %s", fileName, s) + + PopulateSymbols(documentSymbols) # put the scopes in document order of start positions, then by the document order of their end positions self._scopes.sort(key=lambda sc: (sc.startPos, sc.endPos)) NOT_EXISTS = object() @@ -382,7 +411,8 @@ async def modifyFile(self, fileName: str, newContent: str): async def modifyFileCore(self, filePath: PurePath, originalDocument: TextDocument, newContent: str): self._lspClient.server.textDocumentDidOpen(originalDocument) - self._lspClient.server.textDocumentDidChange(originalDocument.uri, 2, [TextDocumentContentChangeEvent(newContent)]) + self._lspClient.server.textDocumentDidChange( + originalDocument.uri, 2, [TextDocumentContentChangeEvent(newContent)]) with open(str(filePath), "wt", encoding="utf-8", errors="replace") as f: f.write(newContent) self._lspClient.server.textDocumentDidSave(originalDocument.uri) diff --git a/persper/analytics/lsp_graph_server/ccls.py b/persper/analytics/lsp_graph_server/ccls.py index 5e7141e8072..8a7425ee45b 100644 --- a/persper/analytics/lsp_graph_server/ccls.py +++ b/persper/analytics/lsp_graph_server/ccls.py @@ -139,15 +139,20 @@ async def modifyFileCore(self, filePath: PurePath, originalDocument: TextDocumen class CclsGraphServer(LspClientGraphServer): - defaultLanguageServerCommand = "./bin/ccls -log-file=ccls.log" - - def __init__(self, workspaceRoot: str, cacheRoot: str = None, languageServerCommand: Union[str, List[str]] = None): - super().__init__(workspaceRoot, languageServerCommand=languageServerCommand) + defaultLanguageServerCommand = "./bin/ccls" + defaultLoggedLanguageServerCommand = "./bin/ccls -log-file=ccls.log" + + def __init__(self, workspaceRoot: str, cacheRoot: str = None, + languageServerCommand: Union[str, List[str]] = None, + dumpLogs: bool = False, + dumpGraphs: bool = False): + super().__init__(workspaceRoot, languageServerCommand=languageServerCommand, dumpLogs=dumpLogs, dumpGraphs=dumpGraphs) self._cacheRoot = Path(cacheRoot).resolve() if cacheRoot else self._workspaceRoot.joinpath(".ccls-cache") async def startLspClient(self): await super().startLspClient() - self._lspClient = CclsLspClient(self._lspServerProc.stdout, self._lspServerProc.stdin, logFile="rpclog.log") + self._lspClient = CclsLspClient(self._lspServerProc.stdout, self._lspServerProc.stdin, + logFile="rpclog.log" if self._dumpLogs else None) self._lspClient.start() _logger.debug(await self._lspClient.server.initialize( rootFolder=self._workspaceRoot, @@ -161,9 +166,9 @@ async def startLspClient(self): "extraArgs": ["-nocudalib"], "pathMappings": [], "resourceDir": "" - }, - "index": {"threads": 0} - })) + }, + "index": {"threads": 0} + })) self._lspClient.server.initialized() self._callGraphBuilder = CclsCallGraphBuilder(self._lspClient) self._callGraphBuilder.workspaceFilePatterns = [ diff --git a/persper/analytics/lsp_graph_server/languageclient/lspcontract.py b/persper/analytics/lsp_graph_server/languageclient/lspcontract.py index 3f3d9914c9f..3bd057495a4 100644 --- a/persper/analytics/lsp_graph_server/languageclient/lspcontract.py +++ b/persper/analytics/lsp_graph_server/languageclient/lspcontract.py @@ -221,7 +221,16 @@ def __init__(self, name: str, detail: str, kind: SymbolKind, deprecated: bool, r self.kind = kind self.deprecated = deprecated self.range = range + """ + The range enclosing this symbol not including leading/trailing whitespace but everything else + like comments. This information is typically used to determine if the clients cursor is + inside the symbol to reveal in the symbol in the UI. + """ self.selectionRange = selectionRange + """ + The range that should be selected and revealed when this symbol is being picked, e.g the name of a function. + Must be contained by the `range`. + """ self.children = list(children) def getSymbolRange(self): From cf17d6cd78348c6e3cd97c2dcc288251896a481c Mon Sep 17 00:00:00 2001 From: xinyan Date: Sun, 9 Dec 2018 22:15:06 +0800 Subject: [PATCH 11/84] Add README for lsp_graph_server. --- persper/analytics/lsp_graph_server/README.md | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 persper/analytics/lsp_graph_server/README.md diff --git a/persper/analytics/lsp_graph_server/README.md b/persper/analytics/lsp_graph_server/README.md new file mode 100644 index 00000000000..ffb9b38992a --- /dev/null +++ b/persper/analytics/lsp_graph_server/README.md @@ -0,0 +1,8 @@ +# lsp_graph_server + +To try out the graph server backed by LSP, especially the [ccls](https://github.com/MaskRay/ccls)-based one, you need +* Compile [ccls-prime](https://github.com/Persper/ccls-prime), the customized ccls fork for graph server. +* Place the compiled binary under `bin` folder of the repository root. +* In the repository root, run `pipenv run ./tools/repo_creater/create_repo.py test/cpp_test_repo/` to create a cpp test repo. +* `jupyter notebook`, then open `notebooks/lsp-ccls.ipynb` +* Execute all the cells From b7fb74904feddfa0679e1dd9f3e3eee0c3fe603d Mon Sep 17 00:00:00 2001 From: xinyan Date: Sun, 9 Dec 2018 22:50:29 +0800 Subject: [PATCH 12/84] Make LspClientGraphServer really picklable. Remove dependency on jsonpickle. --- .../analytics/lsp_graph_server/__init__.py | 11 +++---- .../lsp_graph_server/callgraph/__init__.py | 29 ------------------- .../lsp_graph_server/callgraph/manager.py | 2 +- 3 files changed, 7 insertions(+), 35 deletions(-) diff --git a/persper/analytics/lsp_graph_server/__init__.py b/persper/analytics/lsp_graph_server/__init__.py index 8f9316c113a..81c25ba363a 100644 --- a/persper/analytics/lsp_graph_server/__init__.py +++ b/persper/analytics/lsp_graph_server/__init__.py @@ -65,10 +65,11 @@ def __init__(self, workspaceRoot: str, def __getstate__(self): state = self.__dict__.copy() - del state["_lspServerProc"] - del state["_lspClient"] - del state["_callGraphBuilder"] - del state["_callGraphManager"] + state.pop("_lspServerProc", None) + state.pop("_lspClient", None) + state.pop("_callGraphBuilder", None) + state.pop("_callGraphManager", None) + return state def __setstate__(self, state): self.__dict__.update(state) @@ -99,7 +100,7 @@ async def end_commit(self, hexsha): if datetime.now() - self._lastFileWrittenTime < timedelta(seconds=1): await asyncio.sleep(1) - async def get_graph(self): + def get_graph(self): return self._ccgraph def reset_graph(self): diff --git a/persper/analytics/lsp_graph_server/callgraph/__init__.py b/persper/analytics/lsp_graph_server/callgraph/__init__.py index 80341353ced..8740916f7f6 100644 --- a/persper/analytics/lsp_graph_server/callgraph/__init__.py +++ b/persper/analytics/lsp_graph_server/callgraph/__init__.py @@ -6,8 +6,6 @@ from pathlib import Path, PurePath from typing import Dict, Iterable, List, NamedTuple, Tuple, Type, Union -import jsonpickle - from persper.analytics.lsp_graph_server.languageclient.lspcontract import \ DocumentSymbol, Location, Position, SymbolInformation, SymbolKind, \ TextDocument, TextDocumentContentChangeEvent @@ -96,30 +94,3 @@ def dump(self, file: IOBase): def dumpTo(self, fileName: str): with open(fileName, "wt") as f: self.dump(f) - - def serialize(self, file: IOBase): - for item in self._items: - file.write(jsonpickle.dumps(item, file)) - file.write("\n") - _logger.info("Written %d call graph branches.", len(self._items)) - - def serializeTo(self, fileName): - with open(fileName, "wt") as f: - self.serialize(f) - - def deserialize(self, file: IOBase): - items = [] - for line in file: - line: str = line.strip() - if line: - item = jsonpickle.loads(line) - if not isinstance(item, CallGraphBranch): - raise ValueError("Parsed object [{0}] is not CallGraphBranch.".format(type(item))) - items.append(item) - self._items = items - _logger.info("Loaded %d call graph branches.", len(items)) - assert isinstance(self._items, list) - - def deserializeFrom(self, fileName): - with open(fileName, "rt") as f: - self.deserialize(fileName) diff --git a/persper/analytics/lsp_graph_server/callgraph/manager.py b/persper/analytics/lsp_graph_server/callgraph/manager.py index 591b0cd7e3c..345c7ee8435 100644 --- a/persper/analytics/lsp_graph_server/callgraph/manager.py +++ b/persper/analytics/lsp_graph_server/callgraph/manager.py @@ -81,6 +81,6 @@ async def rebuildGraph(self, fileNames: Iterable[str]): whose source or definition node contains the specified files. """ affectedFiles = self.removeByFiles(fileNames) - self._rebuildCounter += 1 + # self._rebuildCounter += 1 # self._graph.dumpTo("rebuild_" + str(self._rebuildCounter) + ".txt") await self.buildGraph((str(p) for p in affectedFiles)) From 2b9a183788c4b92c28b28d3eeb3523d761e6d8a7 Mon Sep 17 00:00:00 2001 From: xinyan Date: Mon, 10 Dec 2018 21:42:48 +0800 Subject: [PATCH 13/84] Stablize. --- .../analytics/lsp_graph_server/__init__.py | 3 +- .../lsp_graph_server/callgraph/__init__.py | 2 +- .../lsp_graph_server/callgraph/builder.py | 61 +++++++++++++------ .../lsp_graph_server/callgraph/manager.py | 22 ++++--- persper/analytics/lsp_graph_server/ccls.py | 13 ++-- 5 files changed, 66 insertions(+), 35 deletions(-) diff --git a/persper/analytics/lsp_graph_server/__init__.py b/persper/analytics/lsp_graph_server/__init__.py index 81c25ba363a..86b9719f2a5 100644 --- a/persper/analytics/lsp_graph_server/__init__.py +++ b/persper/analytics/lsp_graph_server/__init__.py @@ -2,9 +2,10 @@ import logging import subprocess from abc import abstractclassmethod, abstractproperty +from datetime import datetime, timedelta +from os import path from pathlib import Path from typing import List, Union -from datetime import datetime, timedelta from persper.analytics.call_commit_graph import CallCommitGraph from persper.analytics.graph_server import GraphServer diff --git a/persper/analytics/lsp_graph_server/callgraph/__init__.py b/persper/analytics/lsp_graph_server/callgraph/__init__.py index 8740916f7f6..35dec6664d6 100644 --- a/persper/analytics/lsp_graph_server/callgraph/__init__.py +++ b/persper/analytics/lsp_graph_server/callgraph/__init__.py @@ -63,7 +63,7 @@ def __eq__(self, other): class CallGraph(): def __init__(self): - self._items = [] + self._items = set() @property def items(self): diff --git a/persper/analytics/lsp_graph_server/callgraph/builder.py b/persper/analytics/lsp_graph_server/callgraph/builder.py index 161d3c81e53..dc527e22609 100644 --- a/persper/analytics/lsp_graph_server/callgraph/builder.py +++ b/persper/analytics/lsp_graph_server/callgraph/builder.py @@ -1,3 +1,4 @@ +import asyncio import logging import re import urllib.parse @@ -179,6 +180,7 @@ def __init__(self, lspClient: LspClient): self._tokenizedDocCache: Dict[str, TokenizedDocument] = {} self._workspaceFilePatterns: List[str] = None self._workspaceFilePatternsRegex: list[re.Pattern] = None + self._deletePendingPaths = [] @property def lspClient(self): @@ -349,7 +351,12 @@ async def buildCallGraphInFile(self, fileName: str) -> Iterable[CallGraphBranch] defPath = self.pathFromUri(d.uri) if not self.filterFile(defPath): continue - defsDoc = await self.getTokenizedDocument(defPath) + defsDoc = None + try: + defsDoc = await self.getTokenizedDocument(defPath) + except Exception as ex: + _logger.error("%s", ex) + continue defNode = defsDoc.tokenAt(d.range.start.line, d.range.start.character) defScope = defsDoc.scopeAt(d.range.start.line, d.range.start.character) if not defNode: @@ -378,14 +385,28 @@ async def deleteFile(self, fileName: str): self.removeDocumentCache(path) if not path.exists: return False - doc = TextDocument(TextDocument.fileNameToUri(str(path)), self.inferLanguageId(path), 1, "") + await self.waitForFileSystem(relaxed=True) + await self.deleteFileCore(path) + self._deletePendingPaths.append(path) + return True + + async def deleteFileCore(self, filePath: Path): + doc = TextDocument(TextDocument.fileNameToUri(str(filePath)), self.inferLanguageId(filePath), 1, "") self._lspClient.server.textDocumentDidOpen(doc) # Empty the file and notify language server. self._lspClient.server.textDocumentDidChange(doc.uri, 2, [TextDocumentContentChangeEvent("")]) - path.unlink() + filePath.unlink() self._lspClient.server.textDocumentDidSave(doc.uri) await self.closeDocument(doc.uri) + async def waitForFileSystem(self, relaxed: bool = False): + if not relaxed or len(self._deletePendingPaths) > 1000: + for p in self._deletePendingPaths: + p: Path + if p.exists(): + await asyncio.sleep(0.1) + self._deletePendingPaths.clear() + async def modifyFile(self, fileName: str, newContent: str): """ Modify a file's content, notifying the language server, as if the file @@ -396,23 +417,23 @@ async def modifyFile(self, fileName: str, newContent: str): path = Path(fileName).resolve() self.removeDocumentCache(path) try: - originalFileExists = path.exists() - doc = TextDocument.loadFile(str(path), self.inferLanguageId(path), 1) \ - if originalFileExists \ - else TextDocument(TextDocument.fileNameToUri(str(path)), self.inferLanguageId(path), 1, "") - try: - await self.modifyFileCore(path, doc, newContent) - _logger.info("%s %s.", "Modified " if originalFileExists else "Created", path) - return doc.text - finally: - await self.closeDocument(doc.uri) + await self.modifyFileCore(path, newContent) except Exception as ex: raise Exception("Cannot modify {0}.".format(path)) from ex - async def modifyFileCore(self, filePath: PurePath, originalDocument: TextDocument, newContent: str): - self._lspClient.server.textDocumentDidOpen(originalDocument) - self._lspClient.server.textDocumentDidChange( - originalDocument.uri, 2, [TextDocumentContentChangeEvent(newContent)]) - with open(str(filePath), "wt", encoding="utf-8", errors="replace") as f: - f.write(newContent) - self._lspClient.server.textDocumentDidSave(originalDocument.uri) + async def modifyFileCore(self, filePath: Path, newContent: str): + originalFileExists = filePath.exists() + doc = TextDocument.loadFile(str(path), self.inferLanguageId(path), 1) \ + if originalFileExists \ + else TextDocument(TextDocument.fileNameToUri(str(path)), self.inferLanguageId(path), 1, "") + try: + self._lspClient.server.textDocumentDidOpen(doc) + self._lspClient.server.textDocumentDidChange( + doc.uri, 2, [TextDocumentContentChangeEvent(newContent)]) + with open(str(filePath), "wt", encoding="utf-8", errors="replace") as f: + f.write(newContent) + self._lspClient.server.textDocumentDidSave(doc.uri) + _logger.info("%s %s.", "Modified " if originalFileExists else "Created", path) + return doc.text + finally: + await self.closeDocument(doc.uri) diff --git a/persper/analytics/lsp_graph_server/callgraph/manager.py b/persper/analytics/lsp_graph_server/callgraph/manager.py index 345c7ee8435..efc1a5c59b6 100644 --- a/persper/analytics/lsp_graph_server/callgraph/manager.py +++ b/persper/analytics/lsp_graph_server/callgraph/manager.py @@ -39,15 +39,17 @@ async def buildGraph(self, fileNames: Union[str, Iterable[str]] = None, globPatt globPattern: `str` or `str[]` containing the glob pattern of the files from which to build the call graph branches. """ - counter = 0 - + oldBranchesCount = len(self._graph.items) + branchCounter = oldBranchesCount + fileCounter = 0 + await self._builder.waitForFileSystem() def pushBranch(branch): - nonlocal counter + nonlocal branchCounter try: self._graph.add(branch) - counter += 1 - if counter % 1000 == 0: - _logger.info("Already added %d branches.", counter) + branchCounter = len(self._graph.items) - oldBranchesCount + if branchCounter % 2000 == 0: + _logger.info("Already added %d branches.", branchCounter) except ValueError as ex: _logger.debug("%s Branch: %s", ex, branch) @@ -58,12 +60,16 @@ def pushBranch(branch): sfn = str(fn) if not path.exists(sfn): continue + fileCounter += 1 async for b in self._builder.buildCallGraphInFile(sfn): pushBranch(b) if globPattern or not fileNames: async for b in self._builder.buildCallGraphInFiles(globPattern): - pushBranch(b) - _logger.info("Added %d branches.", counter) + pushBranch(b) + if fileNames and not globPattern: + _logger.info("Added %d branches (-> %d) from %d files.", branchCounter, len(self._graph.items), fileCounter) + else: + _logger.info("Added %d branches (-> %d).", len(self._graph.items), branchCounter) def removeByFiles(self, fileNames: Iterable[str]) -> Iterable[Path]: """ diff --git a/persper/analytics/lsp_graph_server/ccls.py b/persper/analytics/lsp_graph_server/ccls.py index 8a7425ee45b..0353e273300 100644 --- a/persper/analytics/lsp_graph_server/ccls.py +++ b/persper/analytics/lsp_graph_server/ccls.py @@ -2,6 +2,7 @@ ccls client-side LSP support. """ import logging +import os from asyncio import sleep from pathlib import Path, PurePath from typing import List, Union @@ -132,7 +133,8 @@ async def openDocument(self, textDoc: TextDocument): return False raise - async def modifyFileCore(self, filePath: PurePath, originalDocument: TextDocument, newContent: str): + async def modifyFileCore(self, filePath: Path, newContent: str): + os.makedirs(str(filePath.parent), exist_ok=True) with open(str(filePath), "wt", encoding="utf-8", errors="replace") as f: f.write(newContent) @@ -173,10 +175,11 @@ async def startLspClient(self): self._callGraphBuilder = CclsCallGraphBuilder(self._lspClient) self._callGraphBuilder.workspaceFilePatterns = [ str(self._workspaceRoot.joinpath("**/*.[Hh]")), - str(self._workspaceRoot.joinpath("**/*.[Hh]pp")), + str(self._workspaceRoot.joinpath("**/*.[Hh][Hh]")), + str(self._workspaceRoot.joinpath("**/*.[Hh][Pp][Pp]")), str(self._workspaceRoot.joinpath("**/*.[Cc]")), - str(self._workspaceRoot.joinpath("**/*.[Cc]c")), - str(self._workspaceRoot.joinpath("**/*.[Cc]pp")), - str(self._workspaceRoot.joinpath("**/*.[Cc]xx")) + str(self._workspaceRoot.joinpath("**/*.[Cc][Cc]")), + str(self._workspaceRoot.joinpath("**/*.[Cc][Pp][Pp]")), + str(self._workspaceRoot.joinpath("**/*.[Cc][Xx][Xx]")) ] self._callGraphManager = CallGraphManager(self._callGraphBuilder, self._callGraph) From 665f1b09a0be09fd2324e9e3eb72a3a68352598c Mon Sep 17 00:00:00 2001 From: xinyan Date: Mon, 10 Dec 2018 22:35:51 +0800 Subject: [PATCH 14/84] Add notebook generating call graph from first 100 commits of ccls. --- notebooks/lsp-ccls-ccls.ipynb | 1760 +++++++++++++++++++++++++++++++++ 1 file changed, 1760 insertions(+) create mode 100644 notebooks/lsp-ccls-ccls.ipynb diff --git a/notebooks/lsp-ccls-ccls.ipynb b/notebooks/lsp-ccls-ccls.ipynb new file mode 100644 index 00000000000..1e58280f5db --- /dev/null +++ b/notebooks/lsp-ccls-ccls.ipynb @@ -0,0 +1,1760 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import logging\n", + "\n", + "logging.basicConfig(format='%(asctime)s %(levelname)-8s [%(name)s] %(message)s',\n", + " level=logging.CRITICAL)\n", + "logging.getLogger(\"persper.analytics.lsp_graph_server.callgraph.manager\").setLevel(logging.INFO)\n", + "logging.getLogger('asyncio').setLevel(logging.WARNING)" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "scrolled": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Workspace root: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp5twgn11z\n", + "----- Overview ------\n", + "# of commits on master: 108\n", + "# of commits on branch: 0\n", + "----- No.1 5dbde940b60cab1d6a4cd45a90aa45959f7ad84b on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:41:27,270 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 503 branches (-> 503) from 31 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.2 0dc27bd3acfc0526ac713d15d32c8d784297b6a9 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:41:50,247 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 932 branches (-> 992) from 34 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.3 0718435400ed2a4ff1ae958d565dc6de5b695ab4 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:42:14,616 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 627 branches (-> 782) from 40 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.4 46cec6f08355cb9a985f8fce657c35fbd09a4f9a on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:42:34,955 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 524 branches (-> 806) from 41 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.5 a8cdadc201620993ece0be8d8e2b3d200448701b on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:42:57,719 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 679 branches (-> 954) from 39 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.6 e54d70e464e93daa3058e6a34584f1d073609efa on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:43:17,536 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 639 branches (-> 931) from 27 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.7 41f2a75de02707b2972a654b48c86955b60d9385 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:43:37,840 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 698 branches (-> 993) from 32 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.8 fd1d8c8785a942b3efcf4e88a4ad771c344fa347 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:43:55,812 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 783 branches (-> 1099) from 22 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.9 7f6354f9c8810321337ef705b132684497f45fa5 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:44:09,742 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 592 branches (-> 1145) from 6 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.10 ca9918b8c4284d432cbf224c6dcac7dbe170ce7b on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:44:27,726 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 766 branches (-> 1178) from 23 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.11 4ed00a32622b64df920c342251a417bc8ebd27e9 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:44:44,874 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 706 branches (-> 1265) from 5 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.12 e25d54c8b941d1074383333b2542f4740e668eae on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:45:04,449 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 716 branches (-> 1260) from 27 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.13 e637de145af8aff3cbb0f0761928c6496eb9bed2 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:45:18,873 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 528 branches (-> 1273) from 9 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.14 161aab3a09ba7b3cf30d540d7ed12676750e732e on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:45:32,745 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 477 branches (-> 1312) from 2 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.15 8b534175f5b7368d6bf8a1a0a3cdd98b6aacef89 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:45:49,112 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 799 branches (-> 1374) from 20 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.16 d87d74083caa18aa990f3f9e0ca60a3d72f45067 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:46:23,367 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1436 branches (-> 1658) from 50 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.17 7701822aa9cfcfd6f82db2b8652f24f3c477cc66 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:46:51,628 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1181 branches (-> 1709) from 34 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.18 3cd39ae91ab53587af59abefd0431a49dd2d008b on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:47:16,061 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 724 branches (-> 1733) from 2 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.19 d3b57ac30108251a3334058b1b9c4dc0a7bd894b on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:47:46,333 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1197 branches (-> 1734) from 34 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.20 d01eb6b86f22e7aad01cdcc53f3c917e26329156 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:48:12,985 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1153 branches (-> 1726) from 20 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.21 d26cb81854526c1ddaf95d80ed5b8c984851e9be on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:48:38,870 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1194 branches (-> 1789) from 19 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.22 6970d60dca9454227a3d85006193d625f63e0603 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:49:01,523 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 293 branches (-> 1379) from 1 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.23 da6fbf7c5a750036a2c5013a6edadc638d789aca on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:49:03,136 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 549 branches (-> 1437) from 18 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.24 657260eeabf7b933dce952ef31c62a95c933d883 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:49:25,825 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 304 branches (-> 1448) from 5 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.25 0dabbb30932ea04a718d719ae3b66822a84d9647 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:50:01,095 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1077 branches (-> 1547) from 58 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.26 43072452e5666869777cddda80b4a36e5f373c86 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:50:25,027 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 914 branches (-> 1609) from 18 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.27 e78945a80f1908cf5c759f47bbc35e9ecaccf244 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:50:53,501 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 689 branches (-> 1678) from 10 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.28 04412f056f6b7f0c19585a14f58c4a4a699a6d21 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:51:21,800 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 591 branches (-> 1668) from 11 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.29 561f747133c0f045da56076f1b60680508580cf9 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:51:41,283 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 654 branches (-> 1730) from 5 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.30 f3edc6e2f010cf0eabe5605549b8e33e22891af4 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:51:59,023 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 637 branches (-> 1729) from 7 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.31 ab7138bd915c9d0b0ea2585e6d8cafaa8990d30c on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:52:17,076 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 572 branches (-> 1743) from 7 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.32 7c70d7fafde186f9defd58590bee0b3a7a96124b on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:52:55,411 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1517 branches (-> 1835) from 76 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.33 f067f6dcc428ca53d4612b7661443d27a6a7f83a on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:53:16,946 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1325 branches (-> 1843) from 24 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.34 ef1ff80a58232aa0efe3d1c7e32240bf7b9a29a3 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:53:54,763 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1527 branches (-> 1845) from 75 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.35 6b95f51a25849122a7e0093ea2e243c9b7b64cfb on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:54:20,054 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1416 branches (-> 1847) from 33 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.36 5f0f290d1876ff13259709f62bcbeff57ea1334c on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:54:41,048 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 639 branches (-> 1857) from 9 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.37 a7f5231e4cc4bb8ea676ef93851b9edaeeb13123 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:55:20,853 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1545 branches (-> 1863) from 81 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.38 6a95d2f46f382a460ece15c976926967247d4ebb on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:55:54,458 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1556 branches (-> 1890) from 68 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.39 2932b5d41b531fd51b0758b0239fecda2d286790 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:56:14,963 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 732 branches (-> 1925) from 4 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.40 b33bd54922f74a92ca0d8f82301ffbeae3d95c33 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:56:35,178 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 696 branches (-> 1926) from 4 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.41 bebb2306038702e0ee600926e9f4ddff66a9ce17 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:56:55,307 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 698 branches (-> 1928) from 3 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.42 9b351ce52faa76aab2564e9fba5f9fd3394344f6 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:57:16,635 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 876 branches (-> 1931) from 10 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.43 683cac602e7a6952abad2c280f7662edd3d7e268 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:57:55,002 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 890 branches (-> 2051) from 6 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.44 358b4434c2dddaaccfff8400ce0c5497629b605a on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:58:39,245 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 855 branches (-> 2016) from 7 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.45 ce4c2232d7599403388a945a53968a8ff5c2cb4c on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:59:05,386 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 739 branches (-> 1810) from 19 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.46 d867d962d82abae8b40ed5e433a02646a5aacbe0 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:59:28,732 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 903 branches (-> 2049) from 10 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.47 b165cfa59d03fec0776b6b0b28f4065a587086a0 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 21:59:49,121 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 735 branches (-> 1759) from 11 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.48 0f17a49d40aacaf29a6d7eaefe3762072c0b30f7 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:00:05,793 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 502 branches (-> 1702) from 5 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.49 264b687e4312fa022ebca264945af344c12b78ef on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:00:21,326 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 565 branches (-> 1615) from 6 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.50 5faf9d1f6b54d89113eb0b0aac5ccb798e630fe8 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:00:37,524 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 483 branches (-> 1635) from 6 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.51 d59b7c7379c4fa2653b388dcd988d58c439f2e9b on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:00:58,293 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 484 branches (-> 1633) from 7 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.52 f3f72a0dfabb6ac2dead437e829960d2910c65a0 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:01:26,533 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1397 branches (-> 1754) from 50 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.53 b7d9a0f815f825d0a4e6993395b15e9a0fba51c7 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:01:56,839 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1317 branches (-> 2476) from 13 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.54 243ed8dfa523092e77ee8c6341ffffcd623fe93d on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:02:24,450 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1277 branches (-> 2480) from 8 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.55 f3aa91d8dbf457e09897b52e70d3a80fa4c305f5 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:02:44,312 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1031 branches (-> 2649) from 3 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.56 aaa3542670a9e4ada6973d3077df8d73d9e0ab40 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:03:17,939 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1117 branches (-> 2335) from 8 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.57 bc8daee0654c4daa1ebc2b29333248678f34f70a on main -----\n", + "----- No.58 18aa28bdea347d0af19504a08e71cc42d1fcdfeb on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:03:49,886 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1239 branches (-> 2457) from 8 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.59 aa677de886e23ebff3e4d6327f98730f5bed9cd2 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:04:18,490 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1563 branches (-> 2781) from 8 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.60 430b9ef61bc4a106dba0e6b685b1891d351f17f3 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:04:46,238 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1386 branches (-> 2604) from 8 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.61 6df9f61b56a0e3fe867f9af93530a6ef4eed55c4 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:05:13,600 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1454 branches (-> 2587) from 14 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.62 c1db1766c9139ffb09525bbebc8d98a1500bdbef on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:05:25,628 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 647 branches (-> 2547) from 3 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.63 7192db6e32e8ea7828ef52a50ef3d66761ac535a on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:05:52,501 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1354 branches (-> 2535) from 9 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.64 a37d402ce2bcb5155fae5d1dd558b6a2d84ce4c2 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:06:17,437 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1352 branches (-> 2535) from 8 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.65 05af433b5a323f0cd626aa810738f0326595005f on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:06:46,102 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1339 branches (-> 2520) from 9 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.66 1f4f72013654f3b03e19f95e1612281527386256 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:07:18,293 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1371 branches (-> 2520) from 11 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.67 2fc419faa302d51101c8b71b5ecdd1c8c2de7f89 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:08:00,640 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1374 branches (-> 2552) from 10 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.68 0738b8f57aa4abfcd43b517b35338a51db21d75d on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:08:40,461 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1436 branches (-> 3000) from 10 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.69 88f2a3541a0bcb551da8c00cf80bba29f3245467 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:09:18,285 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1364 branches (-> 2975) from 12 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.70 dad7fcb5a3a5535744f9a52e9cce9a482ad9669e on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:09:52,092 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1292 branches (-> 3091) from 9 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.71 6ffca03d6f7baf280996e02f8b78f9a886e3f80e on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:10:07,000 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 587 branches (-> 3096) from 3 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.72 94383d589b3fcb5786290c50a9f2e08402e0e562 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:10:38,529 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1329 branches (-> 3077) from 7 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.73 b3a544e880547a9434c3e7caad526759dc6bdef0 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:10:48,025 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 436 branches (-> 3069) from 4 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.74 253414f29b9c1d3e0dc06c807b79591fe6daad85 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:10:57,375 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 447 branches (-> 3080) from 4 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.75 dd8fb070fd5b1bae7f7e1d7dd03131e208e107e5 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:11:06,758 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 505 branches (-> 3138) from 4 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.76 15b5a03a08799659d54b58bf0f7b49d3ae5ed55b on main -----\n", + "----- No.77 2a08552265402719f5bcc8c260794b16734adc2d on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:11:44,347 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 977 branches (-> 3154) from 9 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.78 8fb0fb816c45ea7fad3ac4732b72d42401d184b4 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:12:47,629 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-10 22:12:47,631 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2000 branches (-> 3133) from 15 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.79 bf98dc56fb15f4d47c6f9faf245dc1d84b5907a2 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:13:43,776 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-10 22:14:01,282 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2329 branches (-> 3396) from 21 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.80 d403d7ad963898cfb1bd9d46220ab57359c3658e on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:14:01,793 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 5 branches (-> 3401) from 2 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.81 8e3615240661365c9f6650db08246ef6dd3886ae on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:14:51,701 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-10 22:15:00,664 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2521 branches (-> 3589) from 33 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.82 e06f9472c1012235de02caf6e2d5b41688b72c85 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:15:50,533 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-10 22:15:55,018 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2220 branches (-> 3456) from 16 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.83 f6967eee48f48c2b100538b40a58e3cb53829d3e on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:16:43,888 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-10 22:16:45,245 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2114 branches (-> 3463) from 15 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.84 f7e2b200591372495d93b0e56d00bce3eafe89c9 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:17:32,833 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-10 22:17:34,234 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2119 branches (-> 3469) from 12 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.85 49c6f7787a4363d4a43b1197d9a66d995da2b7d0 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:18:22,513 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-10 22:18:23,911 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2123 branches (-> 3475) from 13 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.86 36f69e61a9b248097f7f4330d69568d54129a7db on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:19:12,959 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-10 22:19:14,606 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2132 branches (-> 3490) from 11 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.87 852a4218ba60770e6effa149a3ccfecd997bd2f0 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:19:37,302 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1106 branches (-> 3497) from 10 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.88 2db753ff92279f7e5a1901e9e5bc27a8d6d5eb29 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:20:24,758 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-10 22:20:26,389 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2132 branches (-> 3497) from 11 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.89 7a160a526975068509461803ee99db8a2d2a4c4a on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:21:16,151 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1616 branches (-> 3592) from 13 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.90 729264bb34a2e885409a0d2c19170f72d809c327 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:22:28,280 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1935 branches (-> 3076) from 15 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.91 b1a69c2ec3f96d668b4e815adf83641289444018 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:22:34,578 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 13 branches (-> 3081) from 2 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.92 94b344dbe3f79340e5d18ed7ddc3bed6fcab8a15 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:23:06,224 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1306 branches (-> 3124) from 71 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.93 6519dc1b8de9d3a4fa53995753ca8b6fe7a1ef86 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:23:22,658 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1009 branches (-> 3127) from 67 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.94 4e2f24ac174533d295799a6a4958da6a564f7462 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:23:36,244 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 895 branches (-> 3131) from 45 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.95 9b89bfc905dd90205a6c3a901bdf5f1a35e535ba on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:24:21,461 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1715 branches (-> 3124) from 10 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.96 afc6db80ddebda8813015a645854de6422eef32e on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:24:31,735 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3124 branches (-> 3124) from 1 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.97 1508ac85d8cace7bdf9fdf456753a6024959008b on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:25:31,069 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-10 22:25:35,851 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2264 branches (-> 3675) from 13 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.98 7c55502fe8c4cadd24b507fe1053165f4b4c55fb on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:25:40,217 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 144 branches (-> 3692) from 3 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.99 68d9002ecd9ac617448d25e0beeb1593c7e2df72 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:25:52,830 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 679 branches (-> 3773) from 4 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.100 38fb4a4f1da53062cbaa881534f612cd78f8014b on main -----\n", + "------ Used time: 2675.164 -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:26:08,446 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 553 branches (-> 3797) from 5 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.101 8a13acd3b867dc3cf4455f3387123ea3df89725b on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:26:09,367 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3797 branches (-> 3797) from 2 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.102 f3c8500fa9c7422ffd693c02a2bb7c9fb01b3bc5 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:27:00,632 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-10 22:27:20,857 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2494 branches (-> 3723) from 16 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.103 cc4d49794d8960b82208bfa2ec8e4f4a9f9548c9 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:28:11,839 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-10 22:28:33,146 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2624 branches (-> 3807) from 17 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.104 fdd798f995ce3f6e66b1c3d92ad6b9dedc3218ab on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:29:19,790 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-10 22:29:41,494 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2670 branches (-> 3856) from 15 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.105 1a6fd858c67110e9d88a1012db973875cccd8d2f on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:30:24,167 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1837 branches (-> 3775) from 9 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.106 0b44f72ed8c0091862a1f9f0f782a5180d0a82ff on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:31:10,697 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-10 22:31:31,397 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2519 branches (-> 3810) from 15 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.107 c079ab45b3bdda145054e14aeb61c332c2dbf3cf on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:31:44,583 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 772 branches (-> 3875) from 3 files.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.108 ebd467d31b10dbf5784f160533d747d434bbc764 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-10 22:32:32,368 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-10 22:32:52,709 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2489 branches (-> 3875) from 13 files.\n" + ] + } + ], + "source": [ + "from persper.analytics.analyzer import Analyzer\n", + "from persper.analytics.lsp_graph_server.ccls import CclsGraphServer\n", + "from tempfile import mkdtemp\n", + "\n", + "dumpLogs = False\n", + "workspaceRoot = mkdtemp()\n", + "print(\"Workspace root: \", workspaceRoot)\n", + "G = None\n", + "async with CclsGraphServer(workspaceRoot, cacheRoot=\"./.ccls-cache\",\n", + " languageServerCommand=\"../bin/ccls\" + (\"-log-file=ccls.log\" if dumpLogs else \"\"),\n", + " dumpLogs=dumpLogs) as graphServer:\n", + " analyzer = Analyzer(\"../../testrepos/ccls\", graphServer)\n", + " graphServer.reset_graph()\n", + " await analyzer.analyze()\n", + " G = analyzer.get_graph()" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Nodes: 683\n", + "Edges: 3250\n" + ] + } + ], + "source": [ + "print(\"Nodes:\", len(G.nodes()))\n", + "print(\"Edges:\", len(G.edges()))" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "scrolled": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int offset) | namespace clang {}\n", + "namespace clang {} | clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int offset)\n", + "namespace clang {} | clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int line, unsigned int column)\n", + "namespace clang {} | std::string clang::SourceLocation::get_path()\n", + "namespace clang {} | clang::Offset clang::SourceLocation::get_offset()\n", + "namespace clang {} | void clang::SourceLocation::get_data(std::string *path, unsigned int *line, unsigned int *column, unsigned int *offset)\n", + "namespace clang {} | clang::CompilationDatabase::CompilationDatabase(const int &project_path)\n", + "namespace clang {} | namespace clang {}\n", + "namespace clang {} | clang::Tokens::Tokens(int &cx_tu, const clang::SourceRange &range)\n", + "namespace clang {} | int clang::CompileCommand::get_command()\n", + "namespace clang {} | int clang::CompileCommand::get_command_as_args()\n", + "namespace clang {} | std::string clang::to_string(int cx_string)\n", + "namespace clang {} | clang::TranslationUnit::TranslationUnit(int &index, const std::string &file_path, const int &command_line_args, const std::string &buffer, unsigned int flags)\n", + "namespace clang {} | static unsigned int clang::TranslationUnit::DefaultFlags()\n", + "namespace clang {} | int clang::TranslationUnit::ReparseTranslationUnit(const std::string &buffer, unsigned int flags)\n", + "namespace clang {} | void clang::TranslationUnit::parse(int &index, const std::string &file_path, const int &command_line_args, const int &buffers, unsigned int flags)\n", + "namespace clang {} | int clang::TranslationUnit::get_code_completions(const std::string &buffer, unsigned int line_number, unsigned int column)\n", + "namespace clang {} | int clang::TranslationUnit::get_diagnostics()\n", + "namespace clang {} | int clang::TranslationUnit::get_tokens(unsigned int start_offset, unsigned int end_offset)\n", + "namespace clang {} | int clang::TranslationUnit::get_tokens(unsigned int start_line, unsigned int start_column, unsigned int end_line, unsigned int end_column)\n", + "namespace clang {} | int clang::TranslationUnit::get_cursor(std::string path, unsigned int offset)\n", + "namespace clang {} | int clang::TranslationUnit::get_cursor(std::string path, unsigned int line, unsigned int column)\n", + "namespace clang {} | std::string clang::Cursor::Type::get_spelling() const\n", + "namespace clang {} | clang::Cursor::Type clang::Cursor::Type::get_result() const\n", + "namespace clang {} | clang::Cursor::Cursor()\n", + "namespace clang {} | clang::Cursor::Cursor::Cursor(const int &other)\n", + "namespace clang {} | int Cursor::get_kind() const\n", + "namespace clang {} | clang::Cursor::Type clang::Cursor::get_type() const\n", + "namespace clang {} | clang::SourceLocation clang::Cursor::get_source_location() const\n", + "namespace clang {} | int Cursor::get_source_range() const\n", + "namespace clang {} | std::string clang::Cursor::get_spelling() const\n", + "namespace clang {} | std::string clang::Cursor::get_display_name() const\n", + "namespace clang {} | std::string clang::Cursor::get_usr() const\n", + "namespace clang {} | clang::Cursor clang::Cursor::get_referenced() const\n", + "namespace clang {} | clang::Cursor clang::Cursor::get_canonical() const\n", + "namespace clang {} | clang::Cursor clang::Cursor::get_definition() const\n", + "namespace clang {} | clang::Cursor clang::Cursor::get_semantic_parent() const\n", + "namespace clang {} | int Cursor::get_arguments() const\n", + "namespace clang {} | bool clang::Cursor::is_valid_kind() const\n", + "namespace clang {} | std::string clang::Cursor::get_type_description() const\n", + "namespace clang {} | std::string clang::Cursor::get_brief_comments() const\n", + "namespace clang {} | clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column)\n", + "namespace clang {} | clang::CompletionString::CompletionString(const int &cx_completion_sting)\n", + "namespace clang {} | int clang::CodeCompleteResults::get(unsigned int i) const\n", + "namespace clang {} | unsigned int clang::CodeCompleteResults::size() const\n", + "namespace clang {} | int clang::CodeCompleteResults::get_usr() const\n", + "namespace clang {} | clang::Index::Index(int excludeDeclarationsFromPCH, int displayDiagnostics)\n", + "namespace clang {} | int clang::SourceRange::get_offsets()\n", + "namespace clang {} | clang::TranslationUnit::TranslationUnit::TranslationUnit(int &index, const std::string &file_path, const int &command_line_args, const std::string &buffer, unsigned int flags)\n", + "namespace clang {} | void TranslationUnit::parse(int &index, const std::string &file_path, const int &command_line_args, const int &buffers, unsigned int flags)\n", + "namespace clang {} | int TranslationUnit::get_code_completions(const std::string &buffer, unsigned int line_number, unsigned int column)\n", + "namespace clang {} | int TranslationUnit::get_diagnostics()\n", + "namespace clang {} | int TranslationUnit::get_tokens(unsigned int start_offset, unsigned int end_offset)\n", + "namespace clang {} | int TranslationUnit::get_tokens(unsigned int start_line, unsigned int start_column, unsigned int end_line, unsigned int end_column)\n", + "namespace clang {} | int TranslationUnit::document_cursor() const\n", + "namespace clang {} | int TranslationUnit::get_cursor(std::string path, unsigned int offset)\n", + "namespace clang {} | int TranslationUnit::get_cursor(std::string path, unsigned int line, unsigned int column)\n", + "namespace clang {} | std::string clang::Type::get_spelling() const\n", + "namespace clang {} | clang::Type clang::Type::get_result() const\n", + "namespace clang {} | clang::Type clang::Cursor::get_type() const\n", + "namespace clang {} | bool clang::Cursor::is_definition() const\n", + "namespace clang {} | std::string clang::Cursor::get_comments() const\n", + "namespace clang {} | clang::Diagnostic::Diagnostic(int &cx_tu, int &cx_diagnostic)\n", + "namespace clang {} | static const std::string clang::Diagnostic::get_severity_spelling(unsigned int severity)\n", + "namespace clang {} | struct TypeDef\n", + "namespace clang {} | std::string clang::ToString(int cx_string)\n", + "namespace clang {} | clang::Type::Type()\n", + "namespace clang {} | clang::Type::Type::Type(const int &other)\n", + "namespace clang {} | std::string clang::Type::get_usr() const\n", + "namespace clang {} | clang::Type clang::Type::get_return_type() const\n", + "namespace clang {} | int Type::get_arguments() const\n", + "namespace clang {} | std::string clang::Cursor::ToString() const\n", + "namespace clang {} | clang::SourceLocation::SourceLocation::SourceLocation(const int &cx_location)\n", + "namespace clang {} | std::string clang::SourceLocation::ToString() const\n", + "namespace clang {} | std::string clang::Cursor::evaluate() const\n", + "namespace clang {} | clang::Type clang::Type::strip_qualifiers() const\n", + "namespace clang {} | bool clang::Type::is_fundamental() const\n", + "namespace clang {} | int Type::get_template_arguments() const\n", + "namespace clang {} | clang::SourceLocation::SourceLocation()\n", + "namespace clang {} | class Foo\n", + "namespace clang {} | clang::Cursor clang::Cursor::template_specialization_to_template_definition() const\n", + "namespace clang {} | struct Foo {}\n", + "namespace clang {} | class Foo {}\n", + "namespace clang {} | clang::CompileCommand::CompileCommand::CompileCommand(const int &command)\n", + "namespace clang {} | int CompileCommand::get_command() const\n", + "namespace clang {} | int CompileCommand::get_command_as_args() const\n", + "namespace clang {} | struct SymbolIdx {}\n", + "namespace clang {} | explicit clang::CompilationDatabase::CompilationDatabase(const std::string &project_path)\n", + "namespace clang {} | clang::CompileCommands::CompileCommands(const clang::CompilationDatabase &db)\n", + "namespace clang {} | int clang::CompileCommands::get_commands()\n", + "namespace clang {} | struct QueryableDatabase {}\n", + "namespace clang {} | struct Task {}\n", + "namespace clang {} | int Type::get_declaration() const\n", + "namespace clang {} | struct IpcRegistry {}\n", + "namespace clang {} | struct QueryableLocation {}\n", + "clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int line, unsigned int column) | namespace clang {}\n", + "clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int line, unsigned int column) | clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int offset)\n", + "std::string clang::SourceLocation::get_path() | namespace clang {}\n", + "std::string clang::SourceLocation::get_path() | void clang::SourceLocation::get_data(std::string *path, unsigned int *line, unsigned int *column, unsigned int *offset)\n", + "void clang::SourceLocation::get_data(std::string *path, unsigned int *line, unsigned int *column, unsigned int *offset) | namespace clang {}\n", + "...\n" + ] + } + ], + "source": [ + "import itertools\n", + "for e in itertools.islice(G.edges(), 100):\n", + " print(e[0], \" | \", e[1])\n", + "if len(G.edges()) > 100:\n", + " print(\"...\")" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "# un-pickle test\n", + "import pickle\n", + "loaded = pickle.load(open(\"repos-finished-0.pickle\", \"rb\"))" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "RowReader &operator>>(RowReader &reader, int &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", + "RowReader &operator>>(RowReader &reader, long &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", + "RowReader &operator>>(RowReader &reader, long &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", + "RowReader &operator>>(RowReader &reader, float &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", + "RowReader &operator>>(RowReader &reader, float &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", + "RowReader &operator>>(RowReader &reader, double &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", + "RowReader &operator>>(RowReader &reader, double &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", + "RowReader &operator>>(RowReader &reader, bool &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", + "RowReader &operator>>(RowReader &reader, bool &rhs) | bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)\n", + "RowReader &operator>>(RowReader &reader, bool &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n" + ] + } + ], + "source": [ + "for e in itertools.islice(loaded.get_graph().edges(), 10):\n", + " print(e[0], \" | \", e[1])\n", + "if len(loaded.get_graph().edges()) > 100:\n", + " print(\"...\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.0" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} From c0f9fab1b2286c8974eb1d9c58cd26d6e718df16 Mon Sep 17 00:00:00 2001 From: xinyan Date: Tue, 25 Dec 2018 22:58:10 +0800 Subject: [PATCH 15/84] Update cpp test repo to contain file moving scenario. --- test/cpp_test_repo/B/{Utility.cpp => Utility-1.cpp} | 0 test/cpp_test_repo/B/main.cpp | 2 ++ 2 files changed, 2 insertions(+) rename test/cpp_test_repo/B/{Utility.cpp => Utility-1.cpp} (100%) diff --git a/test/cpp_test_repo/B/Utility.cpp b/test/cpp_test_repo/B/Utility-1.cpp similarity index 100% rename from test/cpp_test_repo/B/Utility.cpp rename to test/cpp_test_repo/B/Utility-1.cpp diff --git a/test/cpp_test_repo/B/main.cpp b/test/cpp_test_repo/B/main.cpp index 6542b044ad0..6d9f1638733 100644 --- a/test/cpp_test_repo/B/main.cpp +++ b/test/cpp_test_repo/B/main.cpp @@ -11,5 +11,7 @@ int main(int argc, char* argv[]) cout << parser.GetBool("testBool", false) << endl; cout << parser.GetDouble("textDouble", 1.23) << endl; cout << parser.GetString("rawValue", "test") << endl; + exception testException("message"); + ReportException(testException); return 0; } From 0ee80ef2381f5f85730dce9a81be47726f3365c2 Mon Sep 17 00:00:00 2001 From: xinyan Date: Tue, 25 Dec 2018 22:59:20 +0800 Subject: [PATCH 16/84] Fix incorrect waiting in stopLspClient. Popen.wait throws TimeoutExpired instead of returning None. --- persper/analytics/lsp_graph_server/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/persper/analytics/lsp_graph_server/__init__.py b/persper/analytics/lsp_graph_server/__init__.py index 86b9719f2a5..2fd8b663e7b 100644 --- a/persper/analytics/lsp_graph_server/__init__.py +++ b/persper/analytics/lsp_graph_server/__init__.py @@ -109,7 +109,7 @@ def reset_graph(self): def filter_file(self, filename): filePath = self._workspaceRoot.joinpath(filename).resolve() - # print("Filter: ", filePath, self._callGraphBuilder.filterFile(str(filePath))) + # _logger.info("Filter: %s -> %s", filePath, self._callGraphBuilder.filterFile(str(filePath))) return self._callGraphBuilder.filterFile(str(filePath)) def config(self, param: dict): @@ -147,10 +147,10 @@ async def stopLspClient(self): _logger.info("Shutting down language server...") await asyncio.wait_for(self._lspClient.server.shutdown(), 10) self._lspClient.server.exit() - exitCode = self._lspServerProc.wait(10) - if exitCode != None: + try: + exitCode = self._lspServerProc.wait(10) _logger.info("Language server %d exited with code: %s.", self._lspServerProc.pid, exitCode) - else: + except subprocess.TimeoutExpired: self._lspServerProc.kill() _logger.warning("Killed language server %d.", self._lspServerProc.pid) self._lspServerProc = None From 0b8c096ac5f9937a108985fd7d972e57ff534771 Mon Sep 17 00:00:00 2001 From: xinyan Date: Tue, 25 Dec 2018 23:10:40 +0800 Subject: [PATCH 17/84] Use DidChangeWatchedFiles for file system notifications. Wait for ccls fix on handling deleted/moved files. There should no more FileNotFound errors in buildCallGraphInFile. --- .../lsp_graph_server/callgraph/builder.py | 39 +++++++--------- persper/analytics/lsp_graph_server/ccls.py | 37 +++++++++------ .../languageclient/lspclient.py | 5 ++- .../languageclient/lspcontract.py | 45 ++++++++++++++++++- .../languageclient/lspserver.py | 5 ++- 5 files changed, 93 insertions(+), 38 deletions(-) diff --git a/persper/analytics/lsp_graph_server/callgraph/builder.py b/persper/analytics/lsp_graph_server/callgraph/builder.py index dc527e22609..9abfa2b8a3a 100644 --- a/persper/analytics/lsp_graph_server/callgraph/builder.py +++ b/persper/analytics/lsp_graph_server/callgraph/builder.py @@ -1,5 +1,6 @@ import asyncio import logging +import os import re import urllib.parse from abc import ABC, abstractclassmethod @@ -16,7 +17,7 @@ from persper.analytics.lsp_graph_server.languageclient.lspclient import LspClient from persper.analytics.lsp_graph_server.languageclient.lspcontract import \ DocumentSymbol, Location, Position, SymbolInformation, SymbolKind, \ - TextDocument, TextDocumentContentChangeEvent + TextDocument, TextDocumentContentChangeEvent, FileEvent, FileChangeType from . import CallGraphBranch, CallGraphNode, CallGraphScope _logger = logging.getLogger(__name__) @@ -352,11 +353,7 @@ async def buildCallGraphInFile(self, fileName: str) -> Iterable[CallGraphBranch] if not self.filterFile(defPath): continue defsDoc = None - try: - defsDoc = await self.getTokenizedDocument(defPath) - except Exception as ex: - _logger.error("%s", ex) - continue + defsDoc = await self.getTokenizedDocument(defPath) defNode = defsDoc.tokenAt(d.range.start.line, d.range.start.character) defScope = defsDoc.scopeAt(d.range.start.line, d.range.start.character) if not defNode: @@ -400,11 +397,15 @@ async def deleteFileCore(self, filePath: Path): await self.closeDocument(doc.uri) async def waitForFileSystem(self, relaxed: bool = False): - if not relaxed or len(self._deletePendingPaths) > 1000: + if not relaxed and len(self._deletePendingPaths) > 0 or len(self._deletePendingPaths) > 100: for p in self._deletePendingPaths: p: Path if p.exists(): await asyncio.sleep(0.1) + else: + _logger.info("Confirm deleted: %s", p) + self._lspClient.server.workspaceDidChangeWatchedFiles( + [FileEvent(TextDocument.fileNameToUri(p), FileChangeType.Deleted) for p in self._deletePendingPaths]) self._deletePendingPaths.clear() async def modifyFile(self, fileName: str, newContent: str): @@ -422,18 +423,12 @@ async def modifyFile(self, fileName: str, newContent: str): raise Exception("Cannot modify {0}.".format(path)) from ex async def modifyFileCore(self, filePath: Path, newContent: str): - originalFileExists = filePath.exists() - doc = TextDocument.loadFile(str(path), self.inferLanguageId(path), 1) \ - if originalFileExists \ - else TextDocument(TextDocument.fileNameToUri(str(path)), self.inferLanguageId(path), 1, "") - try: - self._lspClient.server.textDocumentDidOpen(doc) - self._lspClient.server.textDocumentDidChange( - doc.uri, 2, [TextDocumentContentChangeEvent(newContent)]) - with open(str(filePath), "wt", encoding="utf-8", errors="replace") as f: - f.write(newContent) - self._lspClient.server.textDocumentDidSave(doc.uri) - _logger.info("%s %s.", "Modified " if originalFileExists else "Created", path) - return doc.text - finally: - await self.closeDocument(doc.uri) + os.makedirs(str(filePath.parent), exist_ok=True) + prevFileExists = filePath.exists() + with open(str(filePath), "wt", encoding="utf-8", errors="replace") as f: + f.write(newContent) + uri = TextDocument.fileNameToUri(filePath) + self._lspClient.server.workspaceDidChangeWatchedFiles( + [FileEvent(uri, + FileChangeType.Changed if prevFileExists else FileChangeType.Created)]) + _logger.info("Modified %s.", filePath) diff --git a/persper/analytics/lsp_graph_server/ccls.py b/persper/analytics/lsp_graph_server/ccls.py index 0353e273300..b0c945904f9 100644 --- a/persper/analytics/lsp_graph_server/ccls.py +++ b/persper/analytics/lsp_graph_server/ccls.py @@ -17,22 +17,38 @@ from .callgraph.manager import CallGraphManager from .fileparsers.CPP14Lexer import CPP14Lexer from .languageclient.lspclient import LspClient -from .languageclient.lspcontract import TextDocument, TextDocumentContentChangeEvent +from .languageclient.lspcontract import LspContractObject, TextDocument, TextDocumentContentChangeEvent from .languageclient.lspserver import LspServerStub _logger = logging.getLogger(__name__) +class CclsInfo(LspContractObject): + def __init__(self, pendingIndexRequests: int, postIndexWorkItems:int, projectEntries: int): + self.pendingIndexRequests = pendingIndexRequests + self.postIndexWorkItems = postIndexWorkItems + self.projectEntries = projectEntries + + def toDict(self): + raise NotImplementedError() + + @staticmethod + def fromDict(d: dict): + return CclsInfo(int(d["pipeline"]["pendingIndexRequests"]), + 0, + int(d["project"]["entries"])) + + class CclsLspServerStub(LspServerStub): def __init__(self, endpoint: Endpoint): super().__init__(endpoint) - async def getJobs(self): + async def cclsInfo(self): """ - Gets the count of jobs to be done before server can provide latest call information. + Gets the ccls language server status. """ - result = await self.request("$ccls/getJobs") - return int(result) + result = await self.request("$ccls/info") + return CclsInfo.fromDict(result) class CclsLspClient(LspClient): @@ -87,6 +103,7 @@ def __init__(self, lspClient: CclsLspClient): if not isinstance(lspClient, CclsLspClient): raise TypeError("lspClient should be an instance of CclsLspClient.") super().__init__(lspClient) + self._lspClient:CclsLspClient def createLexer(self, fileStream: FileStream): return CPP14Lexer(fileStream) @@ -103,7 +120,8 @@ def modifyFile(self, fileName: str, newContent: str): async def _waitForJobs(self): lastJobs = None while True: - curJobs = await self._lspClient.server.getJobs() + info:CclsInfo = await self._lspClient.server.cclsInfo() + curJobs = info.pendingIndexRequests + info.postIndexWorkItems if curJobs != lastJobs: _logger.debug("Server jobs: %d.", curJobs) lastJobs = curJobs @@ -121,8 +139,6 @@ async def openDocument(self, textDoc: TextDocument): while True: try: await self._waitForJobs() - # dummy request - await self._lspClient.server.textDocumentCodeLens(textDoc.uri) return True except JsonRpcException as ex: if ex.code == -32002: @@ -133,11 +149,6 @@ async def openDocument(self, textDoc: TextDocument): return False raise - async def modifyFileCore(self, filePath: Path, newContent: str): - os.makedirs(str(filePath.parent), exist_ok=True) - with open(str(filePath), "wt", encoding="utf-8", errors="replace") as f: - f.write(newContent) - class CclsGraphServer(LspClientGraphServer): diff --git a/persper/analytics/lsp_graph_server/languageclient/lspclient.py b/persper/analytics/lsp_graph_server/languageclient/lspclient.py index d9268326c34..c0abd633d75 100644 --- a/persper/analytics/lsp_graph_server/languageclient/lspclient.py +++ b/persper/analytics/lsp_graph_server/languageclient/lspclient.py @@ -8,7 +8,7 @@ from jsonrpc.endpoint import Endpoint from jsonrpc.streams import JsonRpcStreamReader, JsonRpcStreamWriter -from persper.analytics.lsp_graph_server.languageclient.lspcontract import MessageType +from persper.analytics.lsp_graph_server.languageclient.lspcontract import MessageType, Registration from persper.analytics.lsp_graph_server.languageclient.lspserver import LspServerStub from persper.analytics.lsp_graph_server.jsonrpcutils import LoggedJsonRpcStreamReader, LoggedJsonRpcStreamWriter, JsonRpcLogger @@ -71,3 +71,6 @@ def m_window__log_message(self, type: MessageType, message: str): def m_text_document__publish_diagnostics(self, uri: str, diagnostics): # ignore all diagnostic information for now. pass + + def m_client__register_capability(self, registrations: list): + regs = [Registration.fromDict(r) for r in registrations] diff --git a/persper/analytics/lsp_graph_server/languageclient/lspcontract.py b/persper/analytics/lsp_graph_server/languageclient/lspcontract.py index 3bd057495a4..930e3741e03 100644 --- a/persper/analytics/lsp_graph_server/languageclient/lspcontract.py +++ b/persper/analytics/lsp_graph_server/languageclient/lspcontract.py @@ -225,7 +225,7 @@ def __init__(self, name: str, detail: str, kind: SymbolKind, deprecated: bool, r The range enclosing this symbol not including leading/trailing whitespace but everything else like comments. This information is typically used to determine if the clients cursor is inside the symbol to reveal in the symbol in the UI. - """ + """ self.selectionRange = selectionRange """ The range that should be selected and revealed when this symbol is being picked, e.g the name of a function. @@ -311,3 +311,46 @@ class TextDocumentSaveReason(Enum): Manual = 1 AfterDelay = 2 FocusOut = 3 + + +class FileChangeType(Enum): + """The file event type.""" + Created = 1 + Changed = 2 + Deleted = 3 + + +class FileEvent(LspContractObject): + """ + An event describing a file change. + """ + + def __init__(self, uri: str, type: FileChangeType): + self.uri = uri + self.type = type + + def toDict(self): + d = {"uri": self.uri, "type": self.type.value} + return d + + +class Registration(LspContractObject): + """ + Represents information about programming constructs like variables, classes, + interfaces etc. + """ + + def __init__(self, id: str, method: str, registerOptions: dict): + self.id = id + self.method = method + self.registerOptions = registerOptions + + def __str__(self): + return self.id + + def toDict(self): + raise NotImplementedError() + + @staticmethod + def fromDict(d: dict): + return Registration(d["id"], d["method"], d.get("registerOptions", None)) diff --git a/persper/analytics/lsp_graph_server/languageclient/lspserver.py b/persper/analytics/lsp_graph_server/languageclient/lspserver.py index e74816f317c..72dac067891 100644 --- a/persper/analytics/lsp_graph_server/languageclient/lspserver.py +++ b/persper/analytics/lsp_graph_server/languageclient/lspserver.py @@ -9,7 +9,7 @@ from jsonrpc.endpoint import Endpoint -from .lspcontract import (DocumentSymbol, Location, Position, +from .lspcontract import (DocumentSymbol, FileEvent, Location, Position, SymbolInformation, TextDocument, TextDocumentContentChangeEvent, TextDocumentSaveReason) @@ -200,3 +200,6 @@ async def textDocumentCodeLens(self, documentUri: str): result = await self.request("textDocument/codeLens", {"textDocument": {"uri": documentUri}}) # We call this method only to synchronize the time sequence return result + + def workspaceDidChangeWatchedFiles(self, changes: Iterable[FileEvent]): + self.notify("workspace/didChangeWatchedFiles", {"changes": [c.toDict() for c in changes]}) From 9d323aeb23e983ec4bf0ec3b23fcadde04f28c85 Mon Sep 17 00:00:00 2001 From: xinyan Date: Tue, 25 Dec 2018 23:47:24 +0800 Subject: [PATCH 18/84] Make create_repo Windows-compatible. --- tools/repo_creater/create_repo.py | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/tools/repo_creater/create_repo.py b/tools/repo_creater/create_repo.py index 4547a2f60c2..a45e918c99f 100755 --- a/tools/repo_creater/create_repo.py +++ b/tools/repo_creater/create_repo.py @@ -2,6 +2,7 @@ # -*- coding: utf-8 -*- import os +import stat import subprocess import sys import shutil @@ -10,11 +11,29 @@ from git import Repo from persper.util.path import root_path +def rmtree_compat(top): + """ + A Windows-compatible implementation of rmtree. + This implementation guarantees the files have appropriate permissions to delete, + and would avoid Access Denied errors. + """ + if os.name != "nt": + shutil.rmtree(top) + return + + for root, dirs, files in os.walk(top, topdown=False): + for name in files: + filename = os.path.join(root, name) + os.chmod(filename, stat.S_IWUSR) + os.remove(filename) + for name in dirs: + os.rmdir(os.path.join(root, name)) + os.rmdir(top) def make_new_dir(dir_path): """delete old directory first if exists""" if os.path.exists(dir_path): - shutil.rmtree(dir_path) + rmtree_compat(dir_path) os.makedirs(dir_path) From bea18c07edf7179b1d2a9530576074d72d3b972b Mon Sep 17 00:00:00 2001 From: xinyan Date: Wed, 26 Dec 2018 00:37:41 +0800 Subject: [PATCH 19/84] Update notebooks. --- notebooks/lsp-ccls-ccls.ipynb | 404 ++++++++++++++++---------------- notebooks/lsp-ccls.ipynb | 429 +++++++++++++++++++++++++++------- 2 files changed, 548 insertions(+), 285 deletions(-) diff --git a/notebooks/lsp-ccls-ccls.ipynb b/notebooks/lsp-ccls-ccls.ipynb index 1e58280f5db..b95fd5adc8f 100644 --- a/notebooks/lsp-ccls-ccls.ipynb +++ b/notebooks/lsp-ccls-ccls.ipynb @@ -17,15 +17,13 @@ { "cell_type": "code", "execution_count": 2, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Workspace root: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp5twgn11z\n", + "Workspace root: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp5o1wc339\n", "----- Overview ------\n", "# of commits on master: 108\n", "# of commits on branch: 0\n", @@ -36,7 +34,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:41:27,270 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 503 branches (-> 503) from 31 files.\n" + "2018-12-25 00:30:36,811 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 312 branches (-> 312) from 31 files.\n" ] }, { @@ -50,7 +48,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:41:50,247 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 932 branches (-> 992) from 34 files.\n" + "2018-12-25 00:30:59,478 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 971 branches (-> 1018) from 32 files.\n" ] }, { @@ -64,7 +62,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:42:14,616 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 627 branches (-> 782) from 40 files.\n" + "2018-12-25 00:31:22,862 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 678 branches (-> 868) from 40 files.\n" ] }, { @@ -78,7 +76,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:42:34,955 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 524 branches (-> 806) from 41 files.\n" + "2018-12-25 00:31:41,561 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 570 branches (-> 896) from 42 files.\n" ] }, { @@ -92,7 +90,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:42:57,719 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 679 branches (-> 954) from 39 files.\n" + "2018-12-25 00:32:02,104 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 737 branches (-> 1055) from 40 files.\n" ] }, { @@ -106,7 +104,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:43:17,536 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 639 branches (-> 931) from 27 files.\n" + "2018-12-25 00:32:19,726 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 681 branches (-> 1017) from 28 files.\n" ] }, { @@ -120,7 +118,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:43:37,840 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 698 branches (-> 993) from 32 files.\n" + "2018-12-25 00:32:38,385 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 756 branches (-> 1095) from 33 files.\n" ] }, { @@ -134,7 +132,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:43:55,812 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 783 branches (-> 1099) from 22 files.\n" + "2018-12-25 00:32:54,554 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 806 branches (-> 1190) from 22 files.\n" ] }, { @@ -148,7 +146,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:44:09,742 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 592 branches (-> 1145) from 6 files.\n" + "2018-12-25 00:33:07,855 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 602 branches (-> 1236) from 6 files.\n" ] }, { @@ -162,7 +160,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:44:27,726 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 766 branches (-> 1178) from 23 files.\n" + "2018-12-25 00:33:24,684 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 796 branches (-> 1289) from 23 files.\n" ] }, { @@ -176,7 +174,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:44:44,874 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 706 branches (-> 1265) from 5 files.\n" + "2018-12-25 00:33:40,885 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 750 branches (-> 1376) from 6 files.\n" ] }, { @@ -190,7 +188,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:45:04,449 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 716 branches (-> 1260) from 27 files.\n" + "2018-12-25 00:33:58,910 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 738 branches (-> 1354) from 27 files.\n" ] }, { @@ -204,7 +202,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:45:18,873 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 528 branches (-> 1273) from 9 files.\n" + "2018-12-25 00:34:12,482 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 531 branches (-> 1367) from 9 files.\n" ] }, { @@ -218,7 +216,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:45:32,745 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 477 branches (-> 1312) from 2 files.\n" + "2018-12-25 00:34:25,751 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 480 branches (-> 1406) from 2 files.\n" ] }, { @@ -232,7 +230,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:45:49,112 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 799 branches (-> 1374) from 20 files.\n" + "2018-12-25 00:34:40,412 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 815 branches (-> 1475) from 20 files.\n" ] }, { @@ -246,7 +244,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:46:23,367 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1436 branches (-> 1658) from 50 files.\n" + "2018-12-25 00:35:12,602 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1576 branches (-> 1841) from 51 files.\n" ] }, { @@ -260,7 +258,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:46:51,628 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1181 branches (-> 1709) from 34 files.\n" + "2018-12-25 00:35:38,675 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1224 branches (-> 1867) from 34 files.\n" ] }, { @@ -274,7 +272,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:47:16,061 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 724 branches (-> 1733) from 2 files.\n" + "2018-12-25 00:36:00,971 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 745 branches (-> 1892) from 2 files.\n" ] }, { @@ -288,7 +286,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:47:46,333 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1197 branches (-> 1734) from 34 files.\n" + "2018-12-25 00:36:28,447 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1219 branches (-> 1891) from 34 files.\n" ] }, { @@ -302,7 +300,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:48:12,985 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1153 branches (-> 1726) from 20 files.\n" + "2018-12-25 00:36:52,303 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1199 branches (-> 1907) from 20 files.\n" ] }, { @@ -316,7 +314,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:48:38,870 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1194 branches (-> 1789) from 19 files.\n" + "2018-12-25 00:37:16,023 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1235 branches (-> 1969) from 19 files.\n" ] }, { @@ -330,7 +328,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:49:01,523 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 293 branches (-> 1379) from 1 files.\n" + "2018-12-25 00:37:38,020 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 310 branches (-> 1554) from 1 files.\n" ] }, { @@ -344,7 +342,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:49:03,136 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 549 branches (-> 1437) from 18 files.\n" + "2018-12-25 00:37:39,612 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 549 branches (-> 1593) from 18 files.\n" ] }, { @@ -358,7 +356,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:49:25,825 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 304 branches (-> 1448) from 5 files.\n" + "2018-12-25 00:38:01,528 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 323 branches (-> 1606) from 5 files.\n" ] }, { @@ -372,7 +370,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:50:01,095 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1077 branches (-> 1547) from 58 files.\n" + "2018-12-25 00:38:34,678 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1186 branches (-> 1764) from 59 files.\n" ] }, { @@ -386,7 +384,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:50:25,027 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 914 branches (-> 1609) from 18 files.\n" + "2018-12-25 00:38:57,748 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 931 branches (-> 1764) from 18 files.\n" ] }, { @@ -400,7 +398,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:50:53,501 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 689 branches (-> 1678) from 10 files.\n" + "2018-12-25 00:39:28,120 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 755 branches (-> 1784) from 12 files.\n" ] }, { @@ -414,7 +412,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:51:21,800 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 591 branches (-> 1668) from 11 files.\n" + "2018-12-25 00:39:56,149 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 610 branches (-> 1774) from 11 files.\n" ] }, { @@ -428,7 +426,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:51:41,283 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 654 branches (-> 1730) from 5 files.\n" + "2018-12-25 00:40:14,381 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 708 branches (-> 1837) from 6 files.\n" ] }, { @@ -442,7 +440,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:51:59,023 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 637 branches (-> 1729) from 7 files.\n" + "2018-12-25 00:40:31,524 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 657 branches (-> 1836) from 7 files.\n" ] }, { @@ -456,7 +454,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:52:17,076 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 572 branches (-> 1743) from 7 files.\n" + "2018-12-25 00:40:48,697 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 592 branches (-> 1848) from 7 files.\n" ] }, { @@ -470,7 +468,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:52:55,411 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1517 branches (-> 1835) from 76 files.\n" + "2018-12-25 00:41:24,817 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1602 branches (-> 1983) from 76 files.\n" ] }, { @@ -484,7 +482,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:53:16,946 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1325 branches (-> 1843) from 24 files.\n" + "2018-12-25 00:41:45,286 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1337 branches (-> 1991) from 24 files.\n" ] }, { @@ -498,7 +496,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:53:54,763 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1527 branches (-> 1845) from 75 files.\n" + "2018-12-25 00:42:19,733 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1388 branches (-> 1991) from 74 files.\n" ] }, { @@ -512,7 +510,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:54:20,054 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1416 branches (-> 1847) from 33 files.\n" + "2018-12-25 00:42:43,018 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1455 branches (-> 2007) from 33 files.\n" ] }, { @@ -526,7 +524,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:54:41,048 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 639 branches (-> 1857) from 9 files.\n" + "2018-12-25 00:43:03,368 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 718 branches (-> 2020) from 10 files.\n" ] }, { @@ -540,7 +538,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:55:20,853 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1545 branches (-> 1863) from 81 files.\n" + "2018-12-25 00:43:40,328 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1591 branches (-> 2024) from 81 files.\n" ] }, { @@ -554,7 +552,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:55:54,458 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1556 branches (-> 1890) from 68 files.\n" + "2018-12-25 00:44:11,562 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1602 branches (-> 2051) from 68 files.\n" ] }, { @@ -568,7 +566,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:56:14,963 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 732 branches (-> 1925) from 4 files.\n" + "2018-12-25 00:44:30,835 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 772 branches (-> 2087) from 4 files.\n" ] }, { @@ -582,7 +580,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:56:35,178 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 696 branches (-> 1926) from 4 files.\n" + "2018-12-25 00:44:49,593 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 722 branches (-> 2088) from 4 files.\n" ] }, { @@ -596,7 +594,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:56:55,307 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 698 branches (-> 1928) from 3 files.\n" + "2018-12-25 00:45:08,609 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 724 branches (-> 2090) from 3 files.\n" ] }, { @@ -610,7 +608,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:57:16,635 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 876 branches (-> 1931) from 10 files.\n" + "2018-12-25 00:45:28,386 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 902 branches (-> 2093) from 10 files.\n" ] }, { @@ -624,7 +622,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:57:55,002 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 890 branches (-> 2051) from 6 files.\n" + "2018-12-25 00:46:03,491 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 914 branches (-> 2195) from 6 files.\n" ] }, { @@ -638,7 +636,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:58:39,245 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 855 branches (-> 2016) from 7 files.\n" + "2018-12-25 00:46:45,751 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 877 branches (-> 2158) from 7 files.\n" ] }, { @@ -652,7 +650,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:59:05,386 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 739 branches (-> 1810) from 19 files.\n" + "2018-12-25 00:47:08,789 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 990 branches (-> 2184) from 19 files.\n" ] }, { @@ -666,7 +664,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:59:28,732 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 903 branches (-> 2049) from 10 files.\n" + "2018-12-25 00:47:31,566 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1089 branches (-> 2333) from 13 files.\n" ] }, { @@ -680,7 +678,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 21:59:49,121 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 735 branches (-> 1759) from 11 files.\n" + "2018-12-25 00:47:52,709 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1114 branches (-> 2207) from 13 files.\n" ] }, { @@ -694,7 +692,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:00:05,793 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 502 branches (-> 1702) from 5 files.\n" + "2018-12-25 00:48:09,699 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1048 branches (-> 2381) from 7 files.\n" ] }, { @@ -708,7 +706,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:00:21,326 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 565 branches (-> 1615) from 6 files.\n" + "2018-12-25 00:48:26,615 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1144 branches (-> 2381) from 9 files.\n" ] }, { @@ -722,7 +720,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:00:37,524 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 483 branches (-> 1635) from 6 files.\n" + "2018-12-25 00:48:54,066 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1193 branches (-> 2408) from 12 files.\n" ] }, { @@ -736,7 +734,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:00:58,293 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 484 branches (-> 1633) from 7 files.\n" + "2018-12-25 00:49:24,333 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1153 branches (-> 2376) from 12 files.\n" ] }, { @@ -750,7 +748,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:01:26,533 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1397 branches (-> 1754) from 50 files.\n" + "2018-12-25 00:50:01,888 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-25 00:50:02,262 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2012 branches (-> 2443) from 55 files.\n" ] }, { @@ -764,7 +763,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:01:56,839 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1317 branches (-> 2476) from 13 files.\n" + "2018-12-25 00:50:42,130 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1976 branches (-> 3191) from 18 files.\n" ] }, { @@ -778,7 +777,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:02:24,450 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1277 branches (-> 2480) from 8 files.\n" + "2018-12-25 00:51:21,707 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1971 branches (-> 3217) from 14 files.\n" ] }, { @@ -792,7 +791,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:02:44,312 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1031 branches (-> 2649) from 3 files.\n" + "2018-12-25 00:51:54,448 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1685 branches (-> 3358) from 8 files.\n" ] }, { @@ -806,7 +805,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:03:17,939 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1117 branches (-> 2335) from 8 files.\n" + "2018-12-25 00:52:40,484 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1748 branches (-> 2999) from 14 files.\n" ] }, { @@ -821,7 +820,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:03:49,886 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1239 branches (-> 2457) from 8 files.\n" + "2018-12-25 00:53:26,301 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1694 branches (-> 2945) from 14 files.\n" ] }, { @@ -835,7 +834,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:04:18,490 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1563 branches (-> 2781) from 8 files.\n" + "2018-12-25 00:54:08,039 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1857 branches (-> 3108) from 14 files.\n" ] }, { @@ -849,7 +848,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:04:46,238 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1386 branches (-> 2604) from 8 files.\n" + "2018-12-25 00:54:48,489 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1941 branches (-> 3253) from 11 files.\n" ] }, { @@ -863,7 +862,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:05:13,600 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1454 branches (-> 2587) from 14 files.\n" + "2018-12-25 00:55:27,064 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-25 00:55:27,387 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2026 branches (-> 3250) from 18 files.\n" ] }, { @@ -877,7 +877,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:05:25,628 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 647 branches (-> 2547) from 3 files.\n" + "2018-12-25 00:55:47,192 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1023 branches (-> 3210) from 5 files.\n" ] }, { @@ -891,7 +891,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:05:52,501 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1354 branches (-> 2535) from 9 files.\n" + "2018-12-25 00:56:38,645 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1926 branches (-> 3201) from 12 files.\n" ] }, { @@ -905,7 +905,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:06:17,437 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1352 branches (-> 2535) from 8 files.\n" + "2018-12-25 00:57:31,425 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1696 branches (-> 3201) from 12 files.\n" ] }, { @@ -919,7 +919,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:06:46,102 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1339 branches (-> 2520) from 9 files.\n" + "2018-12-25 00:58:23,935 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1950 branches (-> 3191) from 13 files.\n" ] }, { @@ -933,7 +933,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:07:18,293 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1371 branches (-> 2520) from 11 files.\n" + "2018-12-25 00:59:13,408 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-25 00:59:13,667 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2018 branches (-> 3254) from 15 files.\n" ] }, { @@ -947,7 +948,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:08:00,640 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1374 branches (-> 2552) from 10 files.\n" + "2018-12-25 01:00:03,088 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-25 01:00:04,250 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2052 branches (-> 3295) from 15 files.\n" ] }, { @@ -961,7 +963,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:08:40,461 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1436 branches (-> 3000) from 10 files.\n" + "2018-12-25 01:00:44,053 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1794 branches (-> 3727) from 11 files.\n" ] }, { @@ -975,7 +977,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:09:18,285 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1364 branches (-> 2975) from 12 files.\n" + "2018-12-25 01:01:12,743 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1076 branches (-> 3589) from 11 files.\n" ] }, { @@ -989,7 +991,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:09:52,092 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1292 branches (-> 3091) from 9 files.\n" + "2018-12-25 01:01:37,812 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1148 branches (-> 3713) from 8 files.\n" ] }, { @@ -1003,7 +1005,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:10:07,000 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 587 branches (-> 3096) from 3 files.\n" + "2018-12-25 01:01:51,650 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 803 branches (-> 3713) from 3 files.\n" ] }, { @@ -1017,7 +1019,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:10:38,529 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1329 branches (-> 3077) from 7 files.\n" + "2018-12-25 01:02:12,247 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 910 branches (-> 3768) from 5 files.\n" ] }, { @@ -1031,7 +1033,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:10:48,025 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 436 branches (-> 3069) from 4 files.\n" + "2018-12-25 01:02:34,062 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1008 branches (-> 3866) from 5 files.\n" ] }, { @@ -1045,7 +1047,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:10:57,375 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 447 branches (-> 3080) from 4 files.\n" + "2018-12-25 01:02:42,225 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 336 branches (-> 3886) from 3 files.\n" ] }, { @@ -1059,7 +1061,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:11:06,758 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 505 branches (-> 3138) from 4 files.\n" + "2018-12-25 01:02:50,092 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 392 branches (-> 3942) from 3 files.\n" ] }, { @@ -1074,7 +1076,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:11:44,347 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 977 branches (-> 3154) from 9 files.\n" + "2018-12-25 01:03:26,950 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1128 branches (-> 3993) from 10 files.\n" ] }, { @@ -1088,8 +1090,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:12:47,629 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-10 22:12:47,631 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2000 branches (-> 3133) from 15 files.\n" + "2018-12-25 01:04:07,508 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-25 01:04:27,833 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2804 branches (-> 4521) from 17 files.\n" ] }, { @@ -1103,8 +1105,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:13:43,776 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-10 22:14:01,282 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2329 branches (-> 3396) from 21 files.\n" + "2018-12-25 01:05:09,496 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-25 01:05:40,036 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3754 branches (-> 4979) from 23 files.\n" ] }, { @@ -1118,7 +1120,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:14:01,793 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 5 branches (-> 3401) from 2 files.\n" + "2018-12-25 01:05:40,504 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 5 branches (-> 4984) from 2 files.\n" ] }, { @@ -1132,8 +1134,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:14:51,701 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-10 22:15:00,664 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2521 branches (-> 3589) from 33 files.\n" + "2018-12-25 01:06:18,512 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-25 01:06:34,779 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2917 branches (-> 5019) from 35 files.\n" ] }, { @@ -1147,8 +1149,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:15:50,533 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-10 22:15:55,018 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2220 branches (-> 3456) from 16 files.\n" + "2018-12-25 01:07:10,061 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-25 01:07:21,423 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2627 branches (-> 5041) from 17 files.\n" ] }, { @@ -1162,8 +1164,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:16:43,888 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-10 22:16:45,245 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2114 branches (-> 3463) from 15 files.\n" + "2018-12-25 01:07:59,017 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-25 01:08:06,460 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2540 branches (-> 5046) from 16 files.\n" ] }, { @@ -1177,8 +1179,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:17:32,833 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-10 22:17:34,234 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2119 branches (-> 3469) from 12 files.\n" + "2018-12-25 01:08:39,989 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-25 01:08:50,183 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2549 branches (-> 5056) from 13 files.\n" ] }, { @@ -1192,8 +1194,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:18:22,513 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-10 22:18:23,911 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2123 branches (-> 3475) from 13 files.\n" + "2018-12-25 01:09:27,227 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-25 01:09:35,580 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2554 branches (-> 5063) from 14 files.\n" ] }, { @@ -1207,8 +1209,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:19:12,959 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-10 22:19:14,606 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2132 branches (-> 3490) from 11 files.\n" + "2018-12-25 01:10:09,005 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-25 01:10:22,770 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2694 branches (-> 5069) from 13 files.\n" ] }, { @@ -1222,7 +1224,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:19:37,302 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1106 branches (-> 3497) from 10 files.\n" + "2018-12-25 01:10:41,091 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1076 branches (-> 5077) from 9 files.\n" ] }, { @@ -1236,8 +1238,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:20:24,758 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-10 22:20:26,389 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2132 branches (-> 3497) from 11 files.\n" + "2018-12-25 01:11:14,750 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-25 01:11:25,035 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2520 branches (-> 5077) from 11 files.\n" ] }, { @@ -1251,7 +1253,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:21:16,151 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1616 branches (-> 3592) from 13 files.\n" + "2018-12-25 01:11:58,002 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-25 01:12:27,114 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3661 branches (-> 5294) from 15 files.\n" ] }, { @@ -1265,7 +1268,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:22:28,280 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1935 branches (-> 3076) from 15 files.\n" + "2018-12-25 01:13:24,944 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-25 01:13:32,992 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2457 branches (-> 3854) from 18 files.\n" ] }, { @@ -1279,7 +1283,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:22:34,578 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 13 branches (-> 3081) from 2 files.\n" + "2018-12-25 01:13:39,213 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 14 branches (-> 3861) from 2 files.\n" ] }, { @@ -1293,7 +1297,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:23:06,224 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1306 branches (-> 3124) from 71 files.\n" + "2018-12-25 01:14:09,642 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1481 branches (-> 3904) from 71 files.\n" ] }, { @@ -1307,7 +1311,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:23:22,658 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1009 branches (-> 3127) from 67 files.\n" + "2018-12-25 01:14:30,998 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1359 branches (-> 3907) from 72 files.\n" ] }, { @@ -1321,7 +1325,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:23:36,244 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 895 branches (-> 3131) from 45 files.\n" + "2018-12-25 01:14:44,148 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 876 branches (-> 3912) from 45 files.\n" ] }, { @@ -1335,7 +1339,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:24:21,461 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1715 branches (-> 3124) from 10 files.\n" + "2018-12-25 01:15:24,172 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-25 01:15:30,833 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2325 branches (-> 3909) from 12 files.\n" ] }, { @@ -1349,7 +1354,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:24:31,735 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3124 branches (-> 3124) from 1 files.\n" + "2018-12-25 01:15:42,406 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 8 branches (-> 3917) from 1 files.\n" ] }, { @@ -1363,8 +1368,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:25:31,069 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-10 22:25:35,851 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2264 branches (-> 3675) from 13 files.\n" + "2018-12-25 01:16:28,158 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-25 01:16:48,675 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3168 branches (-> 4911) from 16 files.\n" ] }, { @@ -1378,7 +1383,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:25:40,217 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 144 branches (-> 3692) from 3 files.\n" + "2018-12-25 01:16:54,919 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 304 branches (-> 4928) from 5 files.\n" ] }, { @@ -1392,7 +1397,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:25:52,830 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 679 branches (-> 3773) from 4 files.\n" + "2018-12-25 01:17:05,732 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 762 branches (-> 4940) from 4 files.\n" ] }, { @@ -1400,14 +1405,14 @@ "output_type": "stream", "text": [ "----- No.100 38fb4a4f1da53062cbaa881534f612cd78f8014b on main -----\n", - "------ Used time: 2675.164 -----\n" + "------ Used time: 2798.011 -----\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:26:08,446 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 553 branches (-> 3797) from 5 files.\n" + "2018-12-25 01:17:21,147 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 771 branches (-> 4919) from 6 files.\n" ] }, { @@ -1421,7 +1426,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:26:09,367 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3797 branches (-> 3797) from 2 files.\n" + "2018-12-25 01:17:22,051 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3 branches (-> 4922) from 2 files.\n" ] }, { @@ -1435,8 +1440,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:27:00,632 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-10 22:27:20,857 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2494 branches (-> 3723) from 16 files.\n" + "2018-12-25 01:18:01,105 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-25 01:18:33,001 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3464 branches (-> 4844) from 20 files.\n" ] }, { @@ -1450,8 +1455,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:28:11,839 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-10 22:28:33,146 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2624 branches (-> 3807) from 17 files.\n" + "2018-12-25 01:19:17,270 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-25 01:19:38,440 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3175 branches (-> 4848) from 19 files.\n" ] }, { @@ -1465,8 +1470,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:29:19,790 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-10 22:29:41,494 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2670 branches (-> 3856) from 15 files.\n" + "2018-12-25 01:20:23,790 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-25 01:20:32,894 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2556 branches (-> 4777) from 12 files.\n" ] }, { @@ -1480,7 +1485,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:30:24,167 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1837 branches (-> 3775) from 9 files.\n" + "2018-12-25 01:20:47,888 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 912 branches (-> 4872) from 6 files.\n" ] }, { @@ -1494,8 +1499,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:31:10,697 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-10 22:31:31,397 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2519 branches (-> 3810) from 15 files.\n" + "2018-12-25 01:21:25,180 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-25 01:21:45,689 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3046 branches (-> 4943) from 16 files.\n" ] }, { @@ -1509,7 +1514,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:31:44,583 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 772 branches (-> 3875) from 3 files.\n" + "2018-12-25 01:21:57,642 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 944 branches (-> 5004) from 3 files.\n" ] }, { @@ -1523,8 +1528,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-10 22:32:32,368 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-10 22:32:52,709 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2489 branches (-> 3875) from 13 files.\n" + "2018-12-25 01:22:33,337 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2018-12-25 01:22:54,418 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3121 branches (-> 5026) from 17 files.\n" ] } ], @@ -1533,14 +1538,14 @@ "from persper.analytics.lsp_graph_server.ccls import CclsGraphServer\n", "from tempfile import mkdtemp\n", "\n", - "dumpLogs = False\n", + "dumpLogs = True\n", "workspaceRoot = mkdtemp()\n", "print(\"Workspace root: \", workspaceRoot)\n", "G = None\n", "async with CclsGraphServer(workspaceRoot, cacheRoot=\"./.ccls-cache\",\n", - " languageServerCommand=\"../bin/ccls\" + (\"-log-file=ccls.log\" if dumpLogs else \"\"),\n", + " languageServerCommand=\"../bin/ccls\" + (\" -log-file=ccls.log\" if dumpLogs else \"\"),\n", " dumpLogs=dumpLogs) as graphServer:\n", - " analyzer = Analyzer(\"../../testrepos/ccls\", graphServer)\n", + " analyzer = Analyzer(\"../../../testrepos/ccls\", graphServer)\n", " graphServer.reset_graph()\n", " await analyzer.analyze()\n", " G = analyzer.get_graph()" @@ -1555,8 +1560,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "Nodes: 683\n", - "Edges: 3250\n" + "Nodes: 738\n", + "Edges: 3505\n" ] } ], @@ -1576,39 +1581,23 @@ "name": "stdout", "output_type": "stream", "text": [ - "clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int offset) | namespace clang {}\n", - "namespace clang {} | clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int offset)\n", - "namespace clang {} | clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int line, unsigned int column)\n", - "namespace clang {} | std::string clang::SourceLocation::get_path()\n", - "namespace clang {} | clang::Offset clang::SourceLocation::get_offset()\n", - "namespace clang {} | void clang::SourceLocation::get_data(std::string *path, unsigned int *line, unsigned int *column, unsigned int *offset)\n", - "namespace clang {} | clang::CompilationDatabase::CompilationDatabase(const int &project_path)\n", "namespace clang {} | namespace clang {}\n", - "namespace clang {} | clang::Tokens::Tokens(int &cx_tu, const clang::SourceRange &range)\n", - "namespace clang {} | int clang::CompileCommand::get_command()\n", - "namespace clang {} | int clang::CompileCommand::get_command_as_args()\n", - "namespace clang {} | std::string clang::to_string(int cx_string)\n", - "namespace clang {} | clang::TranslationUnit::TranslationUnit(int &index, const std::string &file_path, const int &command_line_args, const std::string &buffer, unsigned int flags)\n", - "namespace clang {} | static unsigned int clang::TranslationUnit::DefaultFlags()\n", - "namespace clang {} | int clang::TranslationUnit::ReparseTranslationUnit(const std::string &buffer, unsigned int flags)\n", - "namespace clang {} | void clang::TranslationUnit::parse(int &index, const std::string &file_path, const int &command_line_args, const int &buffers, unsigned int flags)\n", - "namespace clang {} | int clang::TranslationUnit::get_code_completions(const std::string &buffer, unsigned int line_number, unsigned int column)\n", - "namespace clang {} | int clang::TranslationUnit::get_diagnostics()\n", - "namespace clang {} | int clang::TranslationUnit::get_tokens(unsigned int start_offset, unsigned int end_offset)\n", - "namespace clang {} | int clang::TranslationUnit::get_tokens(unsigned int start_line, unsigned int start_column, unsigned int end_line, unsigned int end_column)\n", - "namespace clang {} | int clang::TranslationUnit::get_cursor(std::string path, unsigned int offset)\n", - "namespace clang {} | int clang::TranslationUnit::get_cursor(std::string path, unsigned int line, unsigned int column)\n", - "namespace clang {} | std::string clang::Cursor::Type::get_spelling() const\n", - "namespace clang {} | clang::Cursor::Type clang::Cursor::Type::get_result() const\n", + "namespace clang {} | int clang::CodeCompleteResults::get_usr() const\n", + "namespace clang {} | clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column)\n", + "namespace clang {} | int clang::CodeCompleteResults::get(unsigned int i) const\n", + "namespace clang {} | unsigned int clang::CodeCompleteResults::size() const\n", + "namespace clang {} | clang::Diagnostic::Diagnostic(int &cx_tu, int &cx_diagnostic)\n", + "namespace clang {} | static const std::string clang::Diagnostic::get_severity_spelling(unsigned int severity)\n", + "namespace clang {} | int clang::Token::get_spelling() const\n", "namespace clang {} | clang::Cursor::Cursor()\n", "namespace clang {} | clang::Cursor::Cursor::Cursor(const int &other)\n", "namespace clang {} | int Cursor::get_kind() const\n", - "namespace clang {} | clang::Cursor::Type clang::Cursor::get_type() const\n", + "namespace clang {} | int Cursor::get_type() const\n", "namespace clang {} | clang::SourceLocation clang::Cursor::get_source_location() const\n", "namespace clang {} | int Cursor::get_source_range() const\n", - "namespace clang {} | std::string clang::Cursor::get_spelling() const\n", "namespace clang {} | std::string clang::Cursor::get_display_name() const\n", "namespace clang {} | std::string clang::Cursor::get_usr() const\n", + "namespace clang {} | bool Cursor::is_definition() const\n", "namespace clang {} | clang::Cursor clang::Cursor::get_referenced() const\n", "namespace clang {} | clang::Cursor clang::Cursor::get_canonical() const\n", "namespace clang {} | clang::Cursor clang::Cursor::get_definition() const\n", @@ -1616,15 +1605,21 @@ "namespace clang {} | int Cursor::get_arguments() const\n", "namespace clang {} | bool clang::Cursor::is_valid_kind() const\n", "namespace clang {} | std::string clang::Cursor::get_type_description() const\n", - "namespace clang {} | std::string clang::Cursor::get_brief_comments() const\n", - "namespace clang {} | clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column)\n", + "namespace clang {} | std::string Cursor::get_comments() const\n", + "namespace clang {} | struct TypeDef\n", + "namespace clang {} | clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int offset)\n", + "namespace clang {} | clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int line, unsigned int column)\n", + "namespace clang {} | std::string clang::SourceLocation::get_path()\n", + "namespace clang {} | clang::Offset clang::SourceLocation::get_offset()\n", + "namespace clang {} | void clang::SourceLocation::get_data(std::string *path, unsigned int *line, unsigned int *column, unsigned int *offset)\n", "namespace clang {} | clang::CompletionString::CompletionString(const int &cx_completion_sting)\n", - "namespace clang {} | int clang::CodeCompleteResults::get(unsigned int i) const\n", - "namespace clang {} | unsigned int clang::CodeCompleteResults::size() const\n", - "namespace clang {} | int clang::CodeCompleteResults::get_usr() const\n", - "namespace clang {} | clang::Index::Index(int excludeDeclarationsFromPCH, int displayDiagnostics)\n", - "namespace clang {} | int clang::SourceRange::get_offsets()\n", - "namespace clang {} | clang::TranslationUnit::TranslationUnit::TranslationUnit(int &index, const std::string &file_path, const int &command_line_args, const std::string &buffer, unsigned int flags)\n", + "namespace clang {} | int clang::Token::get_source_range() const\n", + "namespace clang {} | clang::Token::Kind clang::Token::get_kind() const\n", + "namespace clang {} | clang::SourceLocation clang::Token::get_source_location() const\n", + "namespace clang {} | bool clang::Token::is_identifier() const\n", + "namespace clang {} | clang::TranslationUnit::TranslationUnit::TranslationUnit(int &index, const std::string &file_path, const int &command_line_args, unsigned int flags)\n", + "namespace clang {} | static unsigned int clang::TranslationUnit::DefaultFlags()\n", + "namespace clang {} | int clang::TranslationUnit::ReparseTranslationUnit(const std::string &buffer, unsigned int flags)\n", "namespace clang {} | void TranslationUnit::parse(int &index, const std::string &file_path, const int &command_line_args, const int &buffers, unsigned int flags)\n", "namespace clang {} | int TranslationUnit::get_code_completions(const std::string &buffer, unsigned int line_number, unsigned int column)\n", "namespace clang {} | int TranslationUnit::get_diagnostics()\n", @@ -1633,49 +1628,59 @@ "namespace clang {} | int TranslationUnit::document_cursor() const\n", "namespace clang {} | int TranslationUnit::get_cursor(std::string path, unsigned int offset)\n", "namespace clang {} | int TranslationUnit::get_cursor(std::string path, unsigned int line, unsigned int column)\n", - "namespace clang {} | std::string clang::Type::get_spelling() const\n", - "namespace clang {} | clang::Type clang::Type::get_result() const\n", - "namespace clang {} | clang::Type clang::Cursor::get_type() const\n", - "namespace clang {} | bool clang::Cursor::is_definition() const\n", - "namespace clang {} | std::string clang::Cursor::get_comments() const\n", - "namespace clang {} | clang::Diagnostic::Diagnostic(int &cx_tu, int &cx_diagnostic)\n", - "namespace clang {} | static const std::string clang::Diagnostic::get_severity_spelling(unsigned int severity)\n", - "namespace clang {} | struct TypeDef\n", "namespace clang {} | std::string clang::ToString(int cx_string)\n", "namespace clang {} | clang::Type::Type()\n", "namespace clang {} | clang::Type::Type::Type(const int &other)\n", "namespace clang {} | std::string clang::Type::get_usr() const\n", + "namespace clang {} | std::string clang::Type::get_spelling() const\n", "namespace clang {} | clang::Type clang::Type::get_return_type() const\n", "namespace clang {} | int Type::get_arguments() const\n", + "namespace clang {} | clang::Type clang::Cursor::get_type() const\n", + "namespace clang {} | bool clang::Cursor::is_definition() const\n", + "namespace clang {} | std::string clang::Cursor::get_comments() const\n", "namespace clang {} | std::string clang::Cursor::ToString() const\n", "namespace clang {} | clang::SourceLocation::SourceLocation::SourceLocation(const int &cx_location)\n", - "namespace clang {} | std::string clang::SourceLocation::ToString() const\n", + "namespace clang {} | std::string SourceLocation::ToString() const\n", "namespace clang {} | std::string clang::Cursor::evaluate() const\n", "namespace clang {} | clang::Type clang::Type::strip_qualifiers() const\n", "namespace clang {} | bool clang::Type::is_fundamental() const\n", "namespace clang {} | int Type::get_template_arguments() const\n", + "namespace clang {} | clang::SourceLocation::SourceLocation::SourceLocation() noexcept\n", + "namespace clang {} | std::string clang::SourceLocation::ToString() const\n", "namespace clang {} | clang::SourceLocation::SourceLocation()\n", "namespace clang {} | class Foo\n", "namespace clang {} | clang::Cursor clang::Cursor::template_specialization_to_template_definition() const\n", - "namespace clang {} | struct Foo {}\n", - "namespace clang {} | class Foo {}\n", "namespace clang {} | clang::CompileCommand::CompileCommand::CompileCommand(const int &command)\n", "namespace clang {} | int CompileCommand::get_command() const\n", "namespace clang {} | int CompileCommand::get_command_as_args() const\n", - "namespace clang {} | struct SymbolIdx {}\n", - "namespace clang {} | explicit clang::CompilationDatabase::CompilationDatabase(const std::string &project_path)\n", + "namespace clang {} | clang::TranslationUnit::TranslationUnit::TranslationUnit(int &index, const std::string &file_path, const int &command_line_args, const std::string &buffer, unsigned int flags)\n", + "namespace clang {} | struct FindChildOfKindParam {}\n", "namespace clang {} | clang::CompileCommands::CompileCommands(const clang::CompilationDatabase &db)\n", + "namespace clang {} | struct IndexParam {}\n", + "namespace clang {} | clang::CompileCommands::~CompileCommands() noexcept\n", "namespace clang {} | int clang::CompileCommands::get_commands()\n", - "namespace clang {} | struct QueryableDatabase {}\n", - "namespace clang {} | struct Task {}\n", - "namespace clang {} | int Type::get_declaration() const\n", - "namespace clang {} | struct IpcRegistry {}\n", + "namespace clang {} | struct CachedIndexedFile {}\n", + "namespace clang {} | struct Location {}\n", + "namespace clang {} | struct SymbolIdx {}\n", "namespace clang {} | struct QueryableLocation {}\n", - "clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int line, unsigned int column) | namespace clang {}\n", - "clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int line, unsigned int column) | clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int offset)\n", - "std::string clang::SourceLocation::get_path() | namespace clang {}\n", - "std::string clang::SourceLocation::get_path() | void clang::SourceLocation::get_data(std::string *path, unsigned int *line, unsigned int *column, unsigned int *offset)\n", - "void clang::SourceLocation::get_data(std::string *path, unsigned int *line, unsigned int *column, unsigned int *offset) | namespace clang {}\n", + "namespace clang {} | int Type::get_declaration() const\n", + "int clang::CodeCompleteResults::get_usr() const | namespace clang {}\n", + "int clang::CodeCompleteResults::get_usr() const | std::string clang::ToString(int cx_string)\n", + "int clang::CodeCompleteResults::get_usr() const | std::string SourceLocation::ToString() const\n", + "int clang::CodeCompleteResults::get_usr() const | std::string clang::SourceLocation::ToString() const\n", + "int clang::CodeCompleteResults::get_usr() const | std::string ToString()\n", + "int clang::CodeCompleteResults::get_usr() const | std::string IndexedFile::ToString()\n", + "int clang::CodeCompleteResults::get_usr() const | std::string ToString(const int &document)\n", + "int clang::CodeCompleteResults::get_usr() const | int ToString(const int &document)\n", + "clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column) | namespace clang {}\n", + "clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column) | unsigned int clang::CodeCompleteResults::size() const\n", + "clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column) | std::string clang::ToString(int cx_string)\n", + "clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column) | struct Database {}\n", + "clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column) | struct QueryDatabase {}\n", + "clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column) | struct QueryableDatabase {}\n", + "clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column) | struct Location {}\n", + "clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column) | struct QueryableLocation {}\n", + "clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column) | std::string ToString(const int &document)\n", "...\n" ] } @@ -1690,34 +1695,35 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 7, "metadata": {}, "outputs": [], "source": [ "# un-pickle test\n", "import pickle\n", - "loaded = pickle.load(open(\"repos-finished-0.pickle\", \"rb\"))" + "loaded = pickle.load(open(\"ccls-finished-0.pickle\", \"rb\"))" ] }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 8, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "RowReader &operator>>(RowReader &reader, int &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", - "RowReader &operator>>(RowReader &reader, long &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", - "RowReader &operator>>(RowReader &reader, long &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", - "RowReader &operator>>(RowReader &reader, float &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", - "RowReader &operator>>(RowReader &reader, float &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", - "RowReader &operator>>(RowReader &reader, double &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", - "RowReader &operator>>(RowReader &reader, double &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", - "RowReader &operator>>(RowReader &reader, bool &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", - "RowReader &operator>>(RowReader &reader, bool &rhs) | bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)\n", - "RowReader &operator>>(RowReader &reader, bool &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n" + "namespace clang {} | namespace clang {}\n", + "namespace clang {} | int clang::CodeCompleteResults::get_usr() const\n", + "namespace clang {} | clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column)\n", + "namespace clang {} | int clang::CodeCompleteResults::get(unsigned int i) const\n", + "namespace clang {} | unsigned int clang::CodeCompleteResults::size() const\n", + "namespace clang {} | clang::Diagnostic::Diagnostic(int &cx_tu, int &cx_diagnostic)\n", + "namespace clang {} | static const std::string clang::Diagnostic::get_severity_spelling(unsigned int severity)\n", + "namespace clang {} | int clang::Token::get_spelling() const\n", + "namespace clang {} | clang::Cursor::Cursor()\n", + "namespace clang {} | clang::Cursor::Cursor::Cursor(const int &other)\n", + "...\n" ] } ], diff --git a/notebooks/lsp-ccls.ipynb b/notebooks/lsp-ccls.ipynb index bc58c890a46..2b7c9ab4494 100644 --- a/notebooks/lsp-ccls.ipynb +++ b/notebooks/lsp-ccls.ipynb @@ -8,9 +8,10 @@ "source": [ "import logging\n", "\n", - "#logging.basicConfig(format='%(asctime)s %(levelname)-8s [%(name)s] %(message)s',\n", - "# level=logging.INFO)\n", - "#logging.getLogger('asyncio').setLevel(logging.CRITICAL)" + "logging.basicConfig(format='%(asctime)s %(levelname)-8s [%(name)s] %(message)s',\n", + " level=logging.INFO)\n", + "logging.getLogger(\"persper.analytics.lsp_graph_server.callgraph.manager\").setLevel(logging.INFO)\n", + "# logging.getLogger('asyncio').setLevel(logging.CRITICAL)" ] }, { @@ -24,14 +25,149 @@ "name": "stdout", "output_type": "stream", "text": [ - "Workspace root: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpp_gx74i7\n", + "Workspace root: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-26 00:34:17,722 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Exceptions.h.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ "----- Overview ------\n", "# of commits on master: 4\n", "# of commits on branch: 0\n", - "----- No.1 ad1187ea6cfc9dc5f2ea869db16895b3e7ae10f0 on main -----\n", - "----- No.2 f3cce3b9fb9cffe48343abad7e9617dd724835d9 on main -----\n", - "----- No.3 aca68e9d114665fb327b90234129cf4478800e10 on main -----\n", - "----- No.4 f482b140071867d4b2cd30f045725775e5394ccd on main -----\n" + "----- No.1 37d91e19997dcfac11c7213126761920ad57f0d5 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-26 00:34:17,790 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\TextFileParsers.cpp.\n", + "2018-12-26 00:34:17,859 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\TextFileParsers.h.\n", + "2018-12-26 00:34:17,934 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\TypeTraits.h.\n", + "2018-12-26 00:34:18,012 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.cpp.\n", + "2018-12-26 00:34:18,093 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.h.\n", + "2018-12-26 00:34:18,190 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\main.cpp.\n", + "2018-12-26 00:34:18,284 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\stdafx.cpp.\n", + "2018-12-26 00:34:18,370 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\stdafx.h.\n", + "2018-12-26 00:34:18,376 INFO [persper.analytics.lsp_graph_server.callgraph] Removed 0 branches by 9 files.\n", + "2018-12-26 00:34:18,378 INFO [persper.analytics.lsp_graph_server] Invalidated 9 files, affected 9 files.\n", + "2018-12-26 00:34:18,380 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.h\n", + "2018-12-26 00:34:21,427 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 81 branches.\n", + "2018-12-26 00:34:21,429 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\stdafx.h\n", + "2018-12-26 00:34:21,577 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 0 branches.\n", + "2018-12-26 00:34:21,579 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\TextFileParsers.cpp\n", + "2018-12-26 00:34:22,468 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 80 branches.\n", + "2018-12-26 00:34:22,470 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.cpp\n", + "2018-12-26 00:34:22,809 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 20 branches.\n", + "2018-12-26 00:34:22,810 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\TypeTraits.h\n", + "2018-12-26 00:34:22,896 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 1 branches.\n", + "2018-12-26 00:34:22,898 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\TextFileParsers.h\n", + "2018-12-26 00:34:23,515 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 39 branches.\n", + "2018-12-26 00:34:23,516 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\main.cpp\n", + "2018-12-26 00:34:23,667 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 2 branches.\n", + "2018-12-26 00:34:23,668 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\stdafx.cpp\n", + "2018-12-26 00:34:23,775 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 0 branches.\n", + "2018-12-26 00:34:23,777 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Exceptions.h\n", + "2018-12-26 00:34:24,150 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 27 branches.\n", + "2018-12-26 00:34:24,151 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 130 branches (-> 130) from 9 files.\n", + "2018-12-26 00:34:24,151 INFO [persper.analytics.lsp_graph_server] End commit: 37d91e19997dcfac11c7213126761920ad57f0d5\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.2 1745919965e74be2ae6549d0db90eaa75af1b21c on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-26 00:34:24,382 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility-1.cpp.\n", + "2018-12-26 00:34:24,513 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\main.cpp.\n", + "2018-12-26 00:34:24,515 INFO [persper.analytics.lsp_graph_server.callgraph] Removed 77 branches by 5 files.\n", + "2018-12-26 00:34:24,516 INFO [persper.analytics.lsp_graph_server] Invalidated 3 files, affected 5 files.\n", + "2018-12-26 00:34:24,517 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Confirm deleted: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.cpp\n", + "2018-12-26 00:34:24,519 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.h\n", + "2018-12-26 00:34:26,972 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 81 branches.\n", + "2018-12-26 00:34:26,974 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\main.cpp\n", + "2018-12-26 00:34:27,168 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 12 branches.\n", + "2018-12-26 00:34:27,171 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\TextFileParsers.cpp\n", + "2018-12-26 00:34:28,186 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 80 branches.\n", + "2018-12-26 00:34:28,189 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility-1.cpp\n", + "2018-12-26 00:34:28,514 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 77 branches.\n", + "2018-12-26 00:34:28,515 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 144 branches (-> 197) from 4 files.\n", + "2018-12-26 00:34:28,516 INFO [persper.analytics.lsp_graph_server] End commit: 1745919965e74be2ae6549d0db90eaa75af1b21c\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.3 1cb351c5c12916338c73aba2a2e4f0384ff06b07 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-26 00:34:28,854 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\TextFileParsers.cpp.\n", + "2018-12-26 00:34:29,091 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.cpp.\n", + "2018-12-26 00:34:29,287 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.h.\n", + "2018-12-26 00:34:29,476 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\main.cpp.\n", + "2018-12-26 00:34:29,481 INFO [persper.analytics.lsp_graph_server.callgraph] Removed 144 branches by 5 files.\n", + "2018-12-26 00:34:29,483 INFO [persper.analytics.lsp_graph_server] Invalidated 5 files, affected 5 files.\n", + "2018-12-26 00:34:29,485 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Confirm deleted: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility-1.cpp\n", + "2018-12-26 00:34:29,487 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.h\n", + "2018-12-26 00:34:32,527 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 81 branches.\n", + "2018-12-26 00:34:32,529 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\TextFileParsers.cpp\n", + "2018-12-26 00:34:33,509 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 162 branches.\n", + "2018-12-26 00:34:33,511 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.cpp\n", + "2018-12-26 00:34:33,844 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 77 branches.\n", + "2018-12-26 00:34:33,845 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\main.cpp\n", + "2018-12-26 00:34:33,996 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 6 branches.\n", + "2018-12-26 00:34:33,997 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 273 branches (-> 326) from 4 files.\n", + "2018-12-26 00:34:33,998 INFO [persper.analytics.lsp_graph_server] End commit: 1cb351c5c12916338c73aba2a2e4f0384ff06b07\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----- No.4 dc00db06e9013dc4b92c199887f081049e3a4276 on main -----\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2018-12-26 00:34:34,232 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\TextFileParsers.cpp.\n", + "2018-12-26 00:34:34,397 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.cpp.\n", + "2018-12-26 00:34:34,562 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.h.\n", + "2018-12-26 00:34:34,728 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\main.cpp.\n", + "2018-12-26 00:34:34,733 INFO [persper.analytics.lsp_graph_server.callgraph] Removed 273 branches by 4 files.\n", + "2018-12-26 00:34:34,735 INFO [persper.analytics.lsp_graph_server] Invalidated 4 files, affected 4 files.\n", + "2018-12-26 00:34:34,737 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.h\n", + "2018-12-26 00:34:37,262 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 81 branches.\n", + "2018-12-26 00:34:37,263 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\TextFileParsers.cpp\n", + "2018-12-26 00:34:37,971 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 162 branches.\n", + "2018-12-26 00:34:37,972 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.cpp\n", + "2018-12-26 00:34:38,250 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 77 branches.\n", + "2018-12-26 00:34:38,251 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\main.cpp\n", + "2018-12-26 00:34:38,399 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 3 branches.\n", + "2018-12-26 00:34:38,400 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 270 branches (-> 323) from 4 files.\n", + "2018-12-26 00:34:38,401 INFO [persper.analytics.lsp_graph_server] End commit: dc00db06e9013dc4b92c199887f081049e3a4276\n", + "2018-12-26 00:34:38,408 INFO [persper.analytics.lsp_graph_server] Shutting down language server...\n", + "2018-12-26 00:35:41,011 INFO [persper.analytics.lsp_graph_server] Language server 24988 exited with code: 3221225786.\n" ] } ], @@ -40,14 +176,17 @@ "from persper.analytics.lsp_graph_server.ccls import CclsGraphServer\n", "from tempfile import mkdtemp\n", "\n", + "dumpLogs = True\n", "workspaceRoot = mkdtemp()\n", "print(\"Workspace root: \", workspaceRoot)\n", "G = None\n", - "async with CclsGraphServer(workspaceRoot, cacheRoot=\"./.ccls-cache\", languageServerCommand=\"../bin/ccls -log-file=ccls.log\") as graphServer:\n", - " analyzer = Analyzer(\"../repos\", graphServer)\n", + "async with CclsGraphServer(workspaceRoot, cacheRoot=\"./.ccls-cache\",\n", + " languageServerCommand=\"../bin/ccls\" + (\" -log-file=ccls.log\" if dumpLogs else \"\"),\n", + " dumpLogs=dumpLogs) as graphServer:\n", + " analyzer = Analyzer(\"../repos/cpp_test_repo\", graphServer)\n", " graphServer.reset_graph()\n", " await analyzer.analyze()\n", - " G = await analyzer.get_graph()" + " G = analyzer.get_graph()" ] }, { @@ -59,12 +198,14 @@ "name": "stdout", "output_type": "stream", "text": [ - "['Equal', 'StringComparison', 'Confirm', 'c', 'ReportException', 'subEx', 'main', 'OpenAndValidate', 'ConfigurationParser', 'dynamic_kind_of', 'TSrc', 'pointer_kind_of', 'safe_cast', 'safe_pointer_cast', 'StreamStatusToString', 'TStream', 'TPath', 'ValidateStream', '_RangeToEnumerable', 'RangeToEnumerable', 'to_string', 'is_flags', 'operator&', 'TEnum', 'lhs', 'rhs', 'operator|', 'FriendlyNameOf', 'BlockExitHandler', 'e', 'operator>>', 'ConfigurationParser::Load', 'ConfigurationParser::ConfigurationParser', 'Exception', 'ConfigurationParser::GetString', 'ConfigurationParser::GetInt', 'ConfigurationParser::GetDouble', 'ConfigurationParser::GetBool', 'InvalidCastException', 'ArgumentException', 'OperationFailureException', 'is_flags_v']\n" + "Nodes: 41\n", + "Edges: 71\n" ] } ], "source": [ - "print(G.nodes())" + "print(\"Nodes:\", len(G.nodes()))\n", + "print(\"Edges:\", len(G.edges()))" ] }, { @@ -78,89 +219,205 @@ "name": "stdout", "output_type": "stream", "text": [ - "Edges: 67\n", - "Equal | Equal\n", - "Equal | StringComparison\n", - "StringComparison | StringComparison\n", - "Confirm | Confirm\n", - "Confirm | c\n", - "ReportException | ReportException\n", - "ReportException | subEx\n", - "main | OpenAndValidate\n", - "main | ConfigurationParser\n", - "main | main\n", - "main | ConfigurationParser::GetBool\n", - "main | ConfigurationParser::GetDouble\n", - "main | ConfigurationParser::GetString\n", - "OpenAndValidate | TStream\n", - "OpenAndValidate | TPath\n", - "dynamic_kind_of | TSrc\n", - "pointer_kind_of | TSrc\n", - "safe_cast | TSrc\n", - "safe_pointer_cast | TSrc\n", - "StreamStatusToString | TStream\n", - "ValidateStream | TStream\n", - "_RangeToEnumerable | _RangeToEnumerable\n", - "RangeToEnumerable | _RangeToEnumerable\n", - "to_string | to_string\n", - "is_flags | StringComparison\n", - "operator& | TEnum\n", - "lhs | TEnum\n", - "rhs | TEnum\n", - "operator| | TEnum\n", - "FriendlyNameOf | FriendlyNameOf\n", - "BlockExitHandler | BlockExitHandler\n", - "BlockExitHandler | e\n", - "operator>> | operator>>\n", - "operator>> | c\n", - "operator>> | Equal\n", - "operator>> | StringComparison\n", - "ConfigurationParser::Load | ConfigurationParser::ConfigurationParser\n", - "ConfigurationParser::Load | ConfigurationParser::Load\n", - "ConfigurationParser::Load | operator>>\n", - "ConfigurationParser::Load | Exception\n", - "ConfigurationParser::ConfigurationParser | ConfigurationParser::ConfigurationParser\n", - "ConfigurationParser::ConfigurationParser | ConfigurationParser::Load\n", - "ConfigurationParser::ConfigurationParser | OpenAndValidate\n", - "Exception | Exception\n", - "ConfigurationParser::GetString | ConfigurationParser::ConfigurationParser\n", - "ConfigurationParser::GetString | ConfigurationParser::GetString\n", - "ConfigurationParser::GetInt | ConfigurationParser::ConfigurationParser\n", - "ConfigurationParser::GetInt | ConfigurationParser::GetString\n", - "ConfigurationParser::GetInt | Exception\n", - "ConfigurationParser::GetInt | ConfigurationParser::GetInt\n", - "ConfigurationParser::GetDouble | ConfigurationParser::ConfigurationParser\n", - "ConfigurationParser::GetDouble | ConfigurationParser::GetString\n", - "ConfigurationParser::GetDouble | Exception\n", - "ConfigurationParser::GetDouble | ConfigurationParser::GetDouble\n", - "ConfigurationParser::GetBool | ConfigurationParser::ConfigurationParser\n", - "ConfigurationParser::GetBool | ConfigurationParser::GetString\n", - "ConfigurationParser::GetBool | Equal\n", - "ConfigurationParser::GetBool | StringComparison\n", - "ConfigurationParser::GetBool | Exception\n", - "ConfigurationParser::GetBool | ConfigurationParser::GetBool\n", - "InvalidCastException | Exception\n", - "InvalidCastException | InvalidCastException\n", - "ArgumentException | Exception\n", - "ArgumentException | ArgumentException\n", - "OperationFailureException | Exception\n", - "OperationFailureException | OperationFailureException\n", - "is_flags_v | is_flags\n" + "_RangeToEnumerable RangeToEnumerable(const std::pair range) | class _RangeToEnumerable {}\n", + "template<> struct is_flags : std::true_type {} | enum class StringComparison : int {}\n", + "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision) | bool Equal(const int &lhs, const int &rhs, int comparision)\n", + "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision) | enum class StringComparison : int {}\n", + "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision) | bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)\n", + "bool Equal(const int &lhs, const int &rhs, int comparision) | enum class StringComparison : int {}\n", + "bool Confirm(const std::string &prompt) | bool Confirm(const int &prompt)\n", + "bool Confirm(const std::string &prompt) | bool Confirm(const std::string &prompt)\n", + "bool Confirm(const int &prompt) | class RowReader {}\n", + "const char *FriendlyNameOf(const std::type_index &type) | const char *FriendlyNameOf()\n", + "const char *FriendlyNameOf(const type_info &type) | const char *FriendlyNameOf()\n", + "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance) | const char *FriendlyNameOf()\n", + "void ReportException(const std::exception &ex, int level) | void ReportException(const int &ex, int level)\n", + "void ReportException(const std::exception &ex, int level) | void ReportException(const std::exception &ex, int level)\n", + "int &operator>>(int &reader, int &rhs) | class RowReader {}\n", + "int &operator>>(int &reader, long &rhs) | class RowReader {}\n", + "int &operator>>(int &reader, long &rhs) | int &operator>>(int &reader, int &rhs)\n", + "int &operator>>(int &reader, float &rhs) | class RowReader {}\n", + "int &operator>>(int &reader, float &rhs) | int &operator>>(int &reader, int &rhs)\n", + "int &operator>>(int &reader, double &rhs) | class RowReader {}\n", + "int &operator>>(int &reader, double &rhs) | int &operator>>(int &reader, int &rhs)\n", + "int &operator>>(int &reader, bool &rhs) | class RowReader {}\n", + "int &operator>>(int &reader, bool &rhs) | int &operator>>(int &reader, int &rhs)\n", + "int &operator>>(int &reader, bool &rhs) | bool Equal(const int &lhs, const int &rhs, int comparision)\n", + "int &operator>>(int &reader, bool &rhs) | enum class StringComparison : int {}\n", + "int &operator>>(int &reader, bool &rhs) | bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)\n", + "TStream &operator>>(TStream &s, RowReader &reader) | class RowReader {}\n", + "RowReader &operator>>(RowReader &reader, std::string &rhs) | class RowReader {}\n", + "RowReader &operator>>(RowReader &reader, int &rhs) | class RowReader {}\n", + "RowReader &operator>>(RowReader &reader, int &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", + "RowReader &operator>>(RowReader &reader, long &rhs) | class RowReader {}\n", + "RowReader &operator>>(RowReader &reader, long &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", + "RowReader &operator>>(RowReader &reader, long &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", + "RowReader &operator>>(RowReader &reader, float &rhs) | class RowReader {}\n", + "RowReader &operator>>(RowReader &reader, float &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", + "RowReader &operator>>(RowReader &reader, float &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", + "RowReader &operator>>(RowReader &reader, double &rhs) | class RowReader {}\n", + "RowReader &operator>>(RowReader &reader, double &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", + "RowReader &operator>>(RowReader &reader, double &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", + "RowReader &operator>>(RowReader &reader, bool &rhs) | class RowReader {}\n", + "RowReader &operator>>(RowReader &reader, bool &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", + "RowReader &operator>>(RowReader &reader, bool &rhs) | bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)\n", + "RowReader &operator>>(RowReader &reader, bool &rhs) | enum class StringComparison : int {}\n", + "RowReader &operator>>(RowReader &reader, bool &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", + "int main(int argc, char *argv[]) | TStream OpenAndValidate(const TPath arg1)\n", + "int main(int argc, char *argv[]) | class ConfigurationParser {}\n", + "int main(int argc, char *argv[]) | void ReportException(const std::exception &ex, int level)\n", + "int main(int argc, char *argv[]) | bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const\n", + "class InvalidCastException : public Exception {} | class Exception : public std::exception {}\n", + "class ArgumentException : public Exception {} | class Exception : public std::exception {}\n", + "class OperationFailureException : public Exception {} | class Exception : public std::exception {}\n", + "void ConfigurationParser::Load(std::istream &inputStream) | class ConfigurationParser {}\n", + "void ConfigurationParser::Load(std::istream &inputStream) | RowReader &operator>>(RowReader &reader, int &rhs)\n", + "void ConfigurationParser::Load(std::istream &inputStream) | class Exception : public std::exception {}\n", + "std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const | class ConfigurationParser {}\n", + "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const | class ConfigurationParser {}\n", + "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const | std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const\n", + "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const | class Exception : public std::exception {}\n", + "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const | class ConfigurationParser {}\n", + "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const | std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const\n", + "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const | class Exception : public std::exception {}\n", + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const | class ConfigurationParser {}\n", + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const | std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const\n", + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const | bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)\n", + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const | enum class StringComparison : int {}\n", + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const | class Exception : public std::exception {}\n", + "ConfigurationParser::ConfigurationParser(std::istream &inputStream) | class ConfigurationParser {}\n", + "ConfigurationParser::ConfigurationParser(std::istream &inputStream) | void ConfigurationParser::Load(std::istream &inputStream)\n", + "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath) | class ConfigurationParser {}\n", + "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath) | TStream OpenAndValidate(const TPath arg1)\n", + "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath) | void ConfigurationParser::Load(std::istream &inputStream)\n" ] } ], "source": [ - "print(\"Edges:\", len(G.edges()))\n", - "for e in G.edges():\n", - " print(e[0], \" | \", e[1])" + "import itertools\n", + "for e in itertools.islice(G.edges(), 100):\n", + " print(e[0], \" | \", e[1])\n", + "if len(G.edges()) > 100:\n", + " print(\"...\")" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "metadata": {}, "outputs": [], - "source": [] + "source": [ + "# un-pickle test\n", + "import pickle\n", + "loaded = pickle.load(open(\"cpp_test_repo-finished-0.pickle\", \"rb\"))" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "_RangeToEnumerable RangeToEnumerable(const std::pair range) | class _RangeToEnumerable {}\n", + "template<> struct is_flags : std::true_type {} | enum class StringComparison : int {}\n", + "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision) | bool Equal(const int &lhs, const int &rhs, int comparision)\n", + "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision) | enum class StringComparison : int {}\n", + "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision) | bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)\n", + "bool Equal(const int &lhs, const int &rhs, int comparision) | enum class StringComparison : int {}\n", + "bool Confirm(const std::string &prompt) | bool Confirm(const int &prompt)\n", + "bool Confirm(const std::string &prompt) | bool Confirm(const std::string &prompt)\n", + "bool Confirm(const int &prompt) | class RowReader {}\n", + "const char *FriendlyNameOf(const std::type_index &type) | const char *FriendlyNameOf()\n", + "const char *FriendlyNameOf(const type_info &type) | const char *FriendlyNameOf()\n", + "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance) | const char *FriendlyNameOf()\n", + "void ReportException(const std::exception &ex, int level) | void ReportException(const int &ex, int level)\n", + "void ReportException(const std::exception &ex, int level) | void ReportException(const std::exception &ex, int level)\n", + "int &operator>>(int &reader, int &rhs) | class RowReader {}\n", + "int &operator>>(int &reader, long &rhs) | class RowReader {}\n", + "int &operator>>(int &reader, long &rhs) | int &operator>>(int &reader, int &rhs)\n", + "int &operator>>(int &reader, float &rhs) | class RowReader {}\n", + "int &operator>>(int &reader, float &rhs) | int &operator>>(int &reader, int &rhs)\n", + "int &operator>>(int &reader, double &rhs) | class RowReader {}\n", + "int &operator>>(int &reader, double &rhs) | int &operator>>(int &reader, int &rhs)\n", + "int &operator>>(int &reader, bool &rhs) | class RowReader {}\n", + "int &operator>>(int &reader, bool &rhs) | int &operator>>(int &reader, int &rhs)\n", + "int &operator>>(int &reader, bool &rhs) | bool Equal(const int &lhs, const int &rhs, int comparision)\n", + "int &operator>>(int &reader, bool &rhs) | enum class StringComparison : int {}\n", + "int &operator>>(int &reader, bool &rhs) | bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)\n", + "TStream &operator>>(TStream &s, RowReader &reader) | class RowReader {}\n", + "RowReader &operator>>(RowReader &reader, std::string &rhs) | class RowReader {}\n", + "RowReader &operator>>(RowReader &reader, int &rhs) | class RowReader {}\n", + "RowReader &operator>>(RowReader &reader, int &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", + "RowReader &operator>>(RowReader &reader, long &rhs) | class RowReader {}\n", + "RowReader &operator>>(RowReader &reader, long &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", + "RowReader &operator>>(RowReader &reader, long &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", + "RowReader &operator>>(RowReader &reader, float &rhs) | class RowReader {}\n", + "RowReader &operator>>(RowReader &reader, float &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", + "RowReader &operator>>(RowReader &reader, float &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", + "RowReader &operator>>(RowReader &reader, double &rhs) | class RowReader {}\n", + "RowReader &operator>>(RowReader &reader, double &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", + "RowReader &operator>>(RowReader &reader, double &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", + "RowReader &operator>>(RowReader &reader, bool &rhs) | class RowReader {}\n", + "RowReader &operator>>(RowReader &reader, bool &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", + "RowReader &operator>>(RowReader &reader, bool &rhs) | bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)\n", + "RowReader &operator>>(RowReader &reader, bool &rhs) | enum class StringComparison : int {}\n", + "RowReader &operator>>(RowReader &reader, bool &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", + "int main(int argc, char *argv[]) | TStream OpenAndValidate(const TPath arg1)\n", + "int main(int argc, char *argv[]) | class ConfigurationParser {}\n", + "int main(int argc, char *argv[]) | void ReportException(const std::exception &ex, int level)\n", + "int main(int argc, char *argv[]) | bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const\n", + "class InvalidCastException : public Exception {} | class Exception : public std::exception {}\n", + "class ArgumentException : public Exception {} | class Exception : public std::exception {}\n", + "class OperationFailureException : public Exception {} | class Exception : public std::exception {}\n", + "void ConfigurationParser::Load(std::istream &inputStream) | class ConfigurationParser {}\n", + "void ConfigurationParser::Load(std::istream &inputStream) | RowReader &operator>>(RowReader &reader, int &rhs)\n", + "void ConfigurationParser::Load(std::istream &inputStream) | class Exception : public std::exception {}\n", + "std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const | class ConfigurationParser {}\n", + "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const | class ConfigurationParser {}\n", + "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const | std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const\n", + "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const | class Exception : public std::exception {}\n", + "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const | class ConfigurationParser {}\n", + "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const | std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const\n", + "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const | class Exception : public std::exception {}\n", + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const | class ConfigurationParser {}\n", + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const | std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const\n", + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const | bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)\n", + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const | enum class StringComparison : int {}\n", + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const | class Exception : public std::exception {}\n", + "ConfigurationParser::ConfigurationParser(std::istream &inputStream) | class ConfigurationParser {}\n", + "ConfigurationParser::ConfigurationParser(std::istream &inputStream) | void ConfigurationParser::Load(std::istream &inputStream)\n", + "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath) | class ConfigurationParser {}\n", + "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath) | TStream OpenAndValidate(const TPath arg1)\n", + "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath) | void ConfigurationParser::Load(std::istream &inputStream)\n" + ] + } + ], + "source": [ + "for e in itertools.islice(loaded.get_graph().edges(), 100):\n", + " print(e[0], \" | \", e[1])\n", + "if len(loaded.get_graph().edges()) > 100:\n", + " print(\"...\")" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "No traceback available to show.\n" + ] + } + ], + "source": [ + "%tb" + ] } ], "metadata": { From 304bbc0c834eef653deb312b9ce7532d42787672 Mon Sep 17 00:00:00 2001 From: xinyan Date: Wed, 26 Dec 2018 00:38:13 +0800 Subject: [PATCH 20/84] Remove pip dependency: jsonpickle. --- Pipfile | 1 - 1 file changed, 1 deletion(-) diff --git a/Pipfile b/Pipfile index 5fda71a5d88..a89dc5c62fe 100644 --- a/Pipfile +++ b/Pipfile @@ -19,7 +19,6 @@ ipykernel = "*" matplotlib = "*" antlr4-python3-runtime = "*" python-jsonrpc-server = "*" -jsonpickle = "*" [dev-packages] From cf4fa8447e9dd9d7bf5054699e08b7f2b4c08740 Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Sat, 5 Jan 2019 13:50:43 -0800 Subject: [PATCH 21/84] Replace subprocess.CREATE_NEW_CONSOLE (windows only) with shell=True --- persper/analytics/lsp_graph_server/__init__.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/persper/analytics/lsp_graph_server/__init__.py b/persper/analytics/lsp_graph_server/__init__.py index 2fd8b663e7b..47ebb4bc2f1 100644 --- a/persper/analytics/lsp_graph_server/__init__.py +++ b/persper/analytics/lsp_graph_server/__init__.py @@ -133,8 +133,9 @@ async def startLspClient(self): """ self._lspServerProc = subprocess.Popen( self._languageServerCommand, - stdin=subprocess.PIPE, stdout=subprocess.PIPE, - creationflags=subprocess.CREATE_NEW_CONSOLE) + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + shell=True) async def stopLspClient(self): """ From bc710f11ef257fea15538c6fbe96373bbf5e6c24 Mon Sep 17 00:00:00 2001 From: xinyan Date: Wed, 9 Jan 2019 22:41:01 +0800 Subject: [PATCH 22/84] Make pathFromUri Linux-compatible. --- .../analytics/lsp_graph_server/callgraph/builder.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/persper/analytics/lsp_graph_server/callgraph/builder.py b/persper/analytics/lsp_graph_server/callgraph/builder.py index 9abfa2b8a3a..012c268a781 100644 --- a/persper/analytics/lsp_graph_server/callgraph/builder.py +++ b/persper/analytics/lsp_graph_server/callgraph/builder.py @@ -261,8 +261,16 @@ def tokenGenerator(): def pathFromUri(self, expr: str) -> Path: expr: str = urllib.parse.unquote(expr).strip() - if expr.lower().startswith("file:///"): - expr = expr[8:] + if expr[:7].lower() == "file://": + if expr[7:8] == "/": # Local file + if expr[9:10] == ":": # Windows drive e.g. C: + expr = expr[8:] # Remove all the leading slashes + else: # Linux path + expr = expr[7:] + else: # UNC address + expr = expr[5:] + else: + expr = urllib.parse.unquote(expr) return Path(expr).resolve() @abstractclassmethod From 407e312f113851338605258b2eb3d66dc2542dce Mon Sep 17 00:00:00 2001 From: xinyan Date: Thu, 17 Jan 2019 23:51:06 +0800 Subject: [PATCH 23/84] Add node history when building commit graph. Do not track the graph nodes while in CallCommitGraphSynchronizer. We keep track in underlying CallCommitGraph. --- notebooks/lsp-ccls-ccls.ipynb | 526 +++++++++++------- notebooks/lsp-ccls.ipynb | 330 ++++++----- .../analytics/lsp_graph_server/__init__.py | 109 +++- .../lsp_graph_server/callgraph/adapters.py | 19 +- .../lsp_graph_server/callgraph/builder.py | 9 +- .../lsp_graph_server/callgraph/manager.py | 9 +- .../lsp_graph_server/jsonrpcutils.py | 2 + 7 files changed, 641 insertions(+), 363 deletions(-) diff --git a/notebooks/lsp-ccls-ccls.ipynb b/notebooks/lsp-ccls-ccls.ipynb index b95fd5adc8f..28eeeae008b 100644 --- a/notebooks/lsp-ccls-ccls.ipynb +++ b/notebooks/lsp-ccls-ccls.ipynb @@ -23,7 +23,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Workspace root: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp5o1wc339\n", + "Workspace root: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpd69om9e8\n", "----- Overview ------\n", "# of commits on master: 108\n", "# of commits on branch: 0\n", @@ -34,7 +34,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:30:36,811 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 312 branches (-> 312) from 31 files.\n" + "2019-01-17 22:49:38,759 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 687 branches from 31 files.\n" ] }, { @@ -48,7 +48,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:30:59,478 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 971 branches (-> 1018) from 32 files.\n" + "2019-01-17 22:49:59,630 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1516 branches from 28 files.\n" ] }, { @@ -62,7 +62,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:31:22,862 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 678 branches (-> 868) from 40 files.\n" + "2019-01-17 22:50:21,756 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1139 branches from 38 files.\n" ] }, { @@ -76,7 +76,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:31:41,561 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 570 branches (-> 896) from 42 files.\n" + "2019-01-17 22:50:39,501 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 886 branches from 39 files.\n" ] }, { @@ -90,7 +90,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:32:02,104 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 737 branches (-> 1055) from 40 files.\n" + "2019-01-17 22:50:59,757 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1086 branches from 38 files.\n" ] }, { @@ -104,7 +104,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:32:19,726 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 681 branches (-> 1017) from 28 files.\n" + "2019-01-17 22:51:17,096 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1051 branches from 22 files.\n" ] }, { @@ -118,7 +118,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:32:38,385 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 756 branches (-> 1095) from 33 files.\n" + "2019-01-17 22:51:35,236 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1128 branches from 25 files.\n" ] }, { @@ -132,7 +132,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:32:54,554 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 806 branches (-> 1190) from 22 files.\n" + "2019-01-17 22:51:50,596 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1102 branches from 13 files.\n" ] }, { @@ -146,7 +146,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:33:07,855 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 602 branches (-> 1236) from 6 files.\n" + "2019-01-17 22:52:02,589 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 798 branches from 4 files.\n" ] }, { @@ -160,7 +160,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:33:24,684 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 796 branches (-> 1289) from 23 files.\n" + "2019-01-17 22:52:17,925 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1091 branches from 15 files.\n" ] }, { @@ -174,7 +174,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:33:40,885 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 750 branches (-> 1376) from 6 files.\n" + "2019-01-17 22:52:33,381 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1150 branches from 4 files.\n" ] }, { @@ -188,7 +188,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:33:58,910 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 738 branches (-> 1354) from 27 files.\n" + "2019-01-17 22:52:50,679 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1322 branches from 20 files.\n" ] }, { @@ -202,7 +202,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:34:12,482 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 531 branches (-> 1367) from 9 files.\n" + "2019-01-17 22:53:03,821 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 957 branches from 2 files.\n" ] }, { @@ -216,7 +216,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:34:25,751 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 480 branches (-> 1406) from 2 files.\n" + "2019-01-17 22:53:17,070 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 935 branches from 2 files.\n" ] }, { @@ -230,7 +230,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:34:40,412 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 815 branches (-> 1475) from 20 files.\n" + "2019-01-17 22:53:31,372 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1041 branches from 6 files.\n" ] }, { @@ -244,7 +244,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:35:12,602 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1576 branches (-> 1841) from 51 files.\n" + "2019-01-17 22:54:01,717 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1926 branches from 32 files.\n" ] }, { @@ -258,7 +258,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:35:38,675 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1224 branches (-> 1867) from 34 files.\n" + "2019-01-17 22:54:28,115 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1609 branches from 13 files.\n" ] }, { @@ -272,7 +272,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:36:00,971 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 745 branches (-> 1892) from 2 files.\n" + "2019-01-17 22:54:51,311 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1355 branches from 2 files.\n" ] }, { @@ -286,7 +286,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:36:28,447 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1219 branches (-> 1891) from 34 files.\n" + "2019-01-17 22:55:19,094 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1744 branches from 22 files.\n" ] }, { @@ -300,7 +300,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:36:52,303 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1199 branches (-> 1907) from 20 files.\n" + "2019-01-17 22:55:43,285 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1414 branches from 5 files.\n" ] }, { @@ -314,7 +314,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:37:16,023 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1235 branches (-> 1969) from 19 files.\n" + "2019-01-17 22:56:07,344 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1358 branches from 3 files.\n" ] }, { @@ -328,7 +328,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:37:38,020 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 310 branches (-> 1554) from 1 files.\n" + "2019-01-17 22:56:29,110 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1157 branches from 1 files.\n" ] }, { @@ -342,7 +342,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:37:39,612 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 549 branches (-> 1593) from 18 files.\n" + "2019-01-17 22:56:29,402 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 70 branches from 1 files.\n" ] }, { @@ -356,7 +356,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:38:01,528 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 323 branches (-> 1606) from 5 files.\n" + "2019-01-17 22:56:52,954 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1177 branches from 5 files.\n" ] }, { @@ -370,7 +370,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:38:34,678 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1186 branches (-> 1764) from 59 files.\n" + "2019-01-17 22:57:24,481 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2019-01-17 22:57:27,024 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2654 branches from 49 files.\n" ] }, { @@ -384,7 +385,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:38:57,748 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 931 branches (-> 1764) from 18 files.\n" + "2019-01-17 22:57:49,788 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1274 branches from 3 files.\n" ] }, { @@ -398,7 +399,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:39:28,120 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 755 branches (-> 1784) from 12 files.\n" + "2019-01-17 22:58:14,737 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1314 branches from 6 files.\n" ] }, { @@ -412,7 +413,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:39:56,149 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 610 branches (-> 1774) from 11 files.\n" + "2019-01-17 22:58:40,763 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1410 branches from 5 files.\n" ] }, { @@ -426,7 +427,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:40:14,381 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 708 branches (-> 1837) from 6 files.\n" + "2019-01-17 22:58:58,677 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1081 branches from 3 files.\n" ] }, { @@ -440,7 +441,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:40:31,524 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 657 branches (-> 1836) from 7 files.\n" + "2019-01-17 22:59:13,770 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 945 branches from 3 files.\n" ] }, { @@ -454,7 +455,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:40:48,697 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 592 branches (-> 1848) from 7 files.\n" + "2019-01-17 22:59:29,088 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 862 branches from 5 files.\n" ] }, { @@ -468,7 +469,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:41:24,817 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1602 branches (-> 1983) from 76 files.\n" + "2019-01-17 22:59:59,882 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2019-01-17 23:00:03,505 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3015 branches from 72 files.\n" ] }, { @@ -482,7 +484,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:41:45,286 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1337 branches (-> 1991) from 24 files.\n" + "2019-01-17 23:00:22,231 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1133 branches from 4 files.\n" ] }, { @@ -496,7 +498,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:42:19,733 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1388 branches (-> 1991) from 74 files.\n" + "2019-01-17 23:00:52,943 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2019-01-17 23:00:55,921 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2950 branches from 71 files.\n" ] }, { @@ -510,7 +513,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:42:43,018 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1455 branches (-> 2007) from 33 files.\n" + "2019-01-17 23:01:17,915 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1320 branches from 11 files.\n" ] }, { @@ -524,7 +527,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:43:03,368 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 718 branches (-> 2020) from 10 files.\n" + "2019-01-17 23:01:37,582 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 991 branches from 7 files.\n" ] }, { @@ -538,7 +541,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:43:40,328 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1591 branches (-> 2024) from 81 files.\n" + "2019-01-17 23:02:10,127 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2019-01-17 23:02:13,838 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3038 branches from 77 files.\n" ] }, { @@ -552,7 +556,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:44:11,562 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1602 branches (-> 2051) from 68 files.\n" + "2019-01-17 23:02:41,859 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2019-01-17 23:02:43,349 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2530 branches from 47 files.\n" ] }, { @@ -566,7 +571,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:44:30,835 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 772 branches (-> 2087) from 4 files.\n" + "2019-01-17 23:03:02,148 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 976 branches from 1 files.\n" ] }, { @@ -580,7 +585,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:44:49,593 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 722 branches (-> 2088) from 4 files.\n" + "2019-01-17 23:03:21,097 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 979 branches from 2 files.\n" ] }, { @@ -594,7 +599,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:45:08,609 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 724 branches (-> 2090) from 3 files.\n" + "2019-01-17 23:03:39,146 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 981 branches from 1 files.\n" ] }, { @@ -608,7 +613,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:45:28,386 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 902 branches (-> 2093) from 10 files.\n" + "2019-01-17 23:03:57,577 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1014 branches from 2 files.\n" ] }, { @@ -622,7 +627,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:46:03,491 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 914 branches (-> 2195) from 6 files.\n" + "2019-01-17 23:04:31,849 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1961 branches from 3 files.\n" ] }, { @@ -636,7 +641,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:46:45,751 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 877 branches (-> 2158) from 7 files.\n" + "2019-01-17 23:05:02,159 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2019-01-17 23:05:03,906 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2249 branches from 3 files.\n" ] }, { @@ -650,7 +656,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:47:08,789 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 990 branches (-> 2184) from 19 files.\n" + "2019-01-17 23:05:29,698 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1549 branches from 17 files.\n" ] }, { @@ -664,7 +670,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:47:31,566 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1089 branches (-> 2333) from 13 files.\n" + "2019-01-17 23:05:49,879 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1231 branches from 6 files.\n" ] }, { @@ -678,7 +684,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:47:52,709 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1114 branches (-> 2207) from 13 files.\n" + "2019-01-17 23:05:55,146 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 305 branches from 5 files.\n" ] }, { @@ -692,7 +698,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:48:09,699 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1048 branches (-> 2381) from 7 files.\n" + "2019-01-17 23:06:05,985 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 796 branches from 3 files.\n" ] }, { @@ -706,7 +712,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:48:26,615 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1144 branches (-> 2381) from 9 files.\n" + "2019-01-17 23:06:08,113 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 104 branches from 1 files.\n" ] }, { @@ -720,7 +726,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:48:54,066 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1193 branches (-> 2408) from 12 files.\n" + "2019-01-17 23:06:20,942 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 788 branches from 4 files.\n" ] }, { @@ -734,7 +740,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:49:24,333 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1153 branches (-> 2376) from 12 files.\n" + "2019-01-17 23:06:39,811 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1302 branches from 6 files.\n" ] }, { @@ -748,8 +754,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:50:01,888 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-25 00:50:02,262 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2012 branches (-> 2443) from 55 files.\n" + "2019-01-17 23:07:03,955 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2019-01-17 23:07:05,376 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2405 branches from 29 files.\n" ] }, { @@ -763,7 +769,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:50:42,130 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1976 branches (-> 3191) from 18 files.\n" + "2019-01-17 23:07:31,312 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2019-01-17 23:07:33,346 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2274 branches from 9 files.\n" ] }, { @@ -777,7 +784,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:51:21,707 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1971 branches (-> 3217) from 14 files.\n" + "2019-01-17 23:07:57,378 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2019-01-17 23:07:58,810 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2195 branches from 5 files.\n" ] }, { @@ -791,7 +799,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:51:54,448 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1685 branches (-> 3358) from 8 files.\n" + "2019-01-17 23:08:10,003 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1042 branches from 2 files.\n" ] }, { @@ -805,7 +813,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:52:40,484 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1748 branches (-> 2999) from 14 files.\n" + "2019-01-17 23:08:34,505 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2019-01-17 23:08:41,626 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2727 branches from 5 files.\n" ] }, { @@ -820,7 +829,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:53:26,301 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1694 branches (-> 2945) from 14 files.\n" + "2019-01-17 23:09:07,540 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2019-01-17 23:09:11,405 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2526 branches from 4 files.\n" ] }, { @@ -834,7 +844,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:54:08,039 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1857 branches (-> 3108) from 14 files.\n" + "2019-01-17 23:09:31,911 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2019-01-17 23:09:38,203 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2646 branches from 5 files.\n" ] }, { @@ -848,7 +859,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:54:48,489 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1941 branches (-> 3253) from 11 files.\n" + "2019-01-17 23:09:58,608 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2019-01-17 23:10:00,234 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2191 branches from 3 files.\n" ] }, { @@ -862,8 +874,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:55:27,064 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-25 00:55:27,387 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2026 branches (-> 3250) from 18 files.\n" + "2019-01-17 23:10:15,581 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1310 branches from 3 files.\n" ] }, { @@ -877,7 +888,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:55:47,192 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1023 branches (-> 3210) from 5 files.\n" + "2019-01-17 23:10:21,608 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 879 branches from 1 files.\n" ] }, { @@ -891,7 +902,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:56:38,645 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1926 branches (-> 3201) from 12 files.\n" + "2019-01-17 23:10:35,476 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1293 branches from 2 files.\n" ] }, { @@ -905,7 +916,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:57:31,425 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1696 branches (-> 3201) from 12 files.\n" + "2019-01-17 23:10:37,707 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 235 branches from 1 files.\n" ] }, { @@ -919,7 +930,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:58:23,935 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1950 branches (-> 3191) from 13 files.\n" + "2019-01-17 23:10:58,201 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2019-01-17 23:11:03,455 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2464 branches from 6 files.\n" ] }, { @@ -933,8 +945,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 00:59:13,408 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-25 00:59:13,667 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2018 branches (-> 3254) from 15 files.\n" + "2019-01-17 23:11:20,372 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1199 branches from 7 files.\n" ] }, { @@ -948,8 +959,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:00:03,088 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-25 01:00:04,250 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2052 branches (-> 3295) from 15 files.\n" + "2019-01-17 23:11:37,984 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1290 branches from 6 files.\n" ] }, { @@ -963,7 +973,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:00:44,053 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1794 branches (-> 3727) from 11 files.\n" + "2019-01-17 23:11:53,919 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 953 branches from 7 files.\n" ] }, { @@ -977,7 +987,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:01:12,743 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1076 branches (-> 3589) from 11 files.\n" + "2019-01-17 23:12:03,207 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 375 branches from 6 files.\n" ] }, { @@ -991,7 +1001,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:01:37,812 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1148 branches (-> 3713) from 8 files.\n" + "2019-01-17 23:12:10,976 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 536 branches from 3 files.\n" ] }, { @@ -1005,7 +1015,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:01:51,650 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 803 branches (-> 3713) from 3 files.\n" + "2019-01-17 23:12:20,117 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 873 branches from 1 files.\n" ] }, { @@ -1019,7 +1029,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:02:12,247 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 910 branches (-> 3768) from 5 files.\n" + "2019-01-17 23:12:26,529 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 388 branches from 3 files.\n" ] }, { @@ -1033,7 +1043,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:02:34,062 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1008 branches (-> 3866) from 5 files.\n" + "2019-01-17 23:12:34,166 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 448 branches from 3 files.\n" ] }, { @@ -1047,7 +1057,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:02:42,225 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 336 branches (-> 3886) from 3 files.\n" + "2019-01-17 23:12:41,827 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 452 branches from 3 files.\n" ] }, { @@ -1061,7 +1071,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:02:50,092 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 392 branches (-> 3942) from 3 files.\n" + "2019-01-17 23:12:47,812 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 319 branches from 2 files.\n" ] }, { @@ -1076,7 +1086,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:03:26,950 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1128 branches (-> 3993) from 10 files.\n" + "2019-01-17 23:13:11,934 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1650 branches from 7 files.\n" ] }, { @@ -1090,8 +1100,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:04:07,508 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-25 01:04:27,833 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2804 branches (-> 4521) from 17 files.\n" + "2019-01-17 23:13:40,596 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1889 branches from 7 files.\n" ] }, { @@ -1105,8 +1114,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:05:09,496 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-25 01:05:40,036 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3754 branches (-> 4979) from 23 files.\n" + "2019-01-17 23:14:17,629 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2019-01-17 23:14:37,878 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3634 branches from 14 files.\n" ] }, { @@ -1120,7 +1129,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:05:40,504 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 5 branches (-> 4984) from 2 files.\n" + "2019-01-17 23:14:38,331 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 9 branches from 2 files.\n" ] }, { @@ -1134,8 +1143,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:06:18,512 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-25 01:06:34,779 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2917 branches (-> 5019) from 35 files.\n" + "2019-01-17 23:15:08,228 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1691 branches from 19 files.\n" ] }, { @@ -1149,8 +1157,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:07:10,061 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-25 01:07:21,423 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2627 branches (-> 5041) from 17 files.\n" + "2019-01-17 23:15:29,193 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1178 branches from 7 files.\n" ] }, { @@ -1164,8 +1171,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:07:59,017 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-25 01:08:06,460 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2540 branches (-> 5046) from 16 files.\n" + "2019-01-17 23:15:45,933 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 974 branches from 6 files.\n" ] }, { @@ -1179,8 +1185,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:08:39,989 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-25 01:08:50,183 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2549 branches (-> 5056) from 13 files.\n" + "2019-01-17 23:16:01,735 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 876 branches from 3 files.\n" ] }, { @@ -1194,8 +1199,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:09:27,227 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-25 01:09:35,580 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2554 branches (-> 5063) from 14 files.\n" + "2019-01-17 23:16:18,063 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 898 branches from 4 files.\n" ] }, { @@ -1209,8 +1213,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:10:09,005 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-25 01:10:22,770 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2694 branches (-> 5069) from 13 files.\n" + "2019-01-17 23:16:40,232 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1266 branches from 3 files.\n" ] }, { @@ -1224,7 +1227,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:10:41,091 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1076 branches (-> 5077) from 9 files.\n" + "2019-01-17 23:16:47,947 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 454 branches from 2 files.\n" ] }, { @@ -1238,8 +1241,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:11:14,750 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-25 01:11:25,035 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2520 branches (-> 5077) from 11 files.\n" + "2019-01-17 23:17:02,149 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 834 branches from 1 files.\n" ] }, { @@ -1253,8 +1255,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:11:58,002 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-25 01:12:27,114 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3661 branches (-> 5294) from 15 files.\n" + "2019-01-17 23:17:31,047 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2019-01-17 23:17:34,265 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2432 branches from 4 files.\n" ] }, { @@ -1268,8 +1270,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:13:24,944 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-25 01:13:32,992 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2457 branches (-> 3854) from 18 files.\n" + "2019-01-17 23:18:14,687 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2019-01-17 23:18:23,985 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2761 branches from 9 files.\n" ] }, { @@ -1283,7 +1285,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:13:39,213 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 14 branches (-> 3861) from 2 files.\n" + "2019-01-17 23:18:30,312 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 424 branches from 2 files.\n" ] }, { @@ -1297,7 +1299,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:14:09,642 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1481 branches (-> 3904) from 71 files.\n" + "2019-01-17 23:18:43,079 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1168 branches from 42 files.\n" ] }, { @@ -1311,7 +1313,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:14:30,998 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1359 branches (-> 3907) from 72 files.\n" + "2019-01-17 23:18:55,784 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1543 branches from 53 files.\n" ] }, { @@ -1325,7 +1327,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:14:44,148 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 876 branches (-> 3912) from 45 files.\n" + "2019-01-17 23:19:05,209 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 771 branches from 15 files.\n" ] }, { @@ -1339,8 +1341,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:15:24,172 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-25 01:15:30,833 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2325 branches (-> 3909) from 12 files.\n" + "2019-01-17 23:19:18,287 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 765 branches from 3 files.\n" ] }, { @@ -1354,7 +1355,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:15:42,406 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 8 branches (-> 3917) from 1 files.\n" + "2019-01-17 23:19:30,279 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 820 branches from 1 files.\n" ] }, { @@ -1368,8 +1369,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:16:28,158 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-25 01:16:48,675 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3168 branches (-> 4911) from 16 files.\n" + "2019-01-17 23:20:04,554 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2019-01-17 23:20:13,549 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2624 branches from 7 files.\n" ] }, { @@ -1383,7 +1384,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:16:54,919 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 304 branches (-> 4928) from 5 files.\n" + "2019-01-17 23:20:15,408 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 79 branches from 1 files.\n" ] }, { @@ -1397,7 +1398,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:17:05,732 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 762 branches (-> 4940) from 4 files.\n" + "2019-01-17 23:20:17,041 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 54 branches from 1 files.\n" ] }, { @@ -1405,14 +1406,14 @@ "output_type": "stream", "text": [ "----- No.100 38fb4a4f1da53062cbaa881534f612cd78f8014b on main -----\n", - "------ Used time: 2798.011 -----\n" + "------ Used time: 1848.181 -----\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:17:21,147 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 771 branches (-> 4919) from 6 files.\n" + "2019-01-17 23:20:19,292 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 73 branches from 3 files.\n" ] }, { @@ -1426,7 +1427,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:17:22,051 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3 branches (-> 4922) from 2 files.\n" + "2019-01-17 23:20:20,304 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3 branches from 2 files.\n" ] }, { @@ -1440,8 +1441,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:18:01,105 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-25 01:18:33,001 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3464 branches (-> 4844) from 20 files.\n" + "2019-01-17 23:20:47,474 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1552 branches from 5 files.\n" ] }, { @@ -1455,8 +1455,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:19:17,270 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-25 01:19:38,440 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3175 branches (-> 4848) from 19 files.\n" + "2019-01-17 23:21:24,708 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", + "2019-01-17 23:21:31,032 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2806 branches from 8 files.\n" ] }, { @@ -1470,8 +1470,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:20:23,790 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-25 01:20:32,894 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 2556 branches (-> 4777) from 12 files.\n" + "2019-01-17 23:21:56,286 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1876 branches from 4 files.\n" ] }, { @@ -1485,7 +1484,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:20:47,888 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 912 branches (-> 4872) from 6 files.\n" + "2019-01-17 23:22:02,520 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 362 branches from 4 files.\n" ] }, { @@ -1499,8 +1498,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:21:25,180 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-25 01:21:45,689 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3046 branches (-> 4943) from 16 files.\n" + "2019-01-17 23:22:13,815 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 898 branches from 4 files.\n" ] }, { @@ -1514,7 +1512,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:21:57,642 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 944 branches (-> 5004) from 3 files.\n" + "2019-01-17 23:22:17,001 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 299 branches from 1 files.\n" ] }, { @@ -1528,8 +1526,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2018-12-25 01:22:33,337 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Already added 2000 branches.\n", - "2018-12-25 01:22:54,418 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 3121 branches (-> 5026) from 17 files.\n" + "2019-01-17 23:22:42,581 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 1538 branches from 4 files.\n" ] } ], @@ -1560,8 +1557,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "Nodes: 738\n", - "Edges: 3505\n" + "Nodes: 861\n", + "Edges: 3388\n" ] } ], @@ -1582,22 +1579,42 @@ "output_type": "stream", "text": [ "namespace clang {} | namespace clang {}\n", - "namespace clang {} | int clang::CodeCompleteResults::get_usr() const\n", - "namespace clang {} | clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column)\n", - "namespace clang {} | int clang::CodeCompleteResults::get(unsigned int i) const\n", - "namespace clang {} | unsigned int clang::CodeCompleteResults::size() const\n", + "namespace clang {} | clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int offset)\n", + "namespace clang {} | clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int line, unsigned int column)\n", + "namespace clang {} | std::string clang::SourceLocation::get_path()\n", + "namespace clang {} | clang::Offset clang::SourceLocation::get_offset()\n", + "namespace clang {} | void clang::SourceLocation::get_data(std::string *path, unsigned int *line, unsigned int *column, unsigned int *offset)\n", + "namespace clang {} | clang::Index::Index(int excludeDeclarationsFromPCH, int displayDiagnostics)\n", "namespace clang {} | clang::Diagnostic::Diagnostic(int &cx_tu, int &cx_diagnostic)\n", "namespace clang {} | static const std::string clang::Diagnostic::get_severity_spelling(unsigned int severity)\n", + "namespace clang {} | int clang::SourceRange::get_offsets()\n", + "namespace clang {} | int clang::CompileCommand::get_command()\n", + "namespace clang {} | int clang::CompileCommand::get_command_as_args()\n", + "namespace clang {} | clang::CompileCommands::CompileCommands(const int &filename, clang::CompilationDatabase &db)\n", + "namespace clang {} | int clang::CompileCommands::get_commands()\n", + "namespace clang {} | clang::CompletionString::CompletionString(const int &cx_completion_sting)\n", + "namespace clang {} | std::string clang::Cursor::get_usr() const\n", + "namespace clang {} | clang::CompilationDatabase::CompilationDatabase(const int &project_path)\n", + "namespace clang {} | clang::TranslationUnit::TranslationUnit(int &index, const std::string &file_path, const int &command_line_args, const std::string &buffer, unsigned int flags)\n", + "namespace clang {} | static unsigned int clang::TranslationUnit::DefaultFlags()\n", + "namespace clang {} | int clang::TranslationUnit::ReparseTranslationUnit(const std::string &buffer, unsigned int flags)\n", + "namespace clang {} | void clang::TranslationUnit::parse(int &index, const std::string &file_path, const int &command_line_args, const int &buffers, unsigned int flags)\n", + "namespace clang {} | int clang::TranslationUnit::get_code_completions(const std::string &buffer, unsigned int line_number, unsigned int column)\n", + "namespace clang {} | int clang::TranslationUnit::get_diagnostics()\n", + "namespace clang {} | int clang::TranslationUnit::get_tokens(unsigned int start_offset, unsigned int end_offset)\n", + "namespace clang {} | int clang::TranslationUnit::get_tokens(unsigned int start_line, unsigned int start_column, unsigned int end_line, unsigned int end_column)\n", + "namespace clang {} | int clang::TranslationUnit::get_cursor(std::string path, unsigned int offset)\n", + "namespace clang {} | int clang::TranslationUnit::get_cursor(std::string path, unsigned int line, unsigned int column)\n", "namespace clang {} | int clang::Token::get_spelling() const\n", "namespace clang {} | clang::Cursor::Cursor()\n", "namespace clang {} | clang::Cursor::Cursor::Cursor(const int &other)\n", "namespace clang {} | int Cursor::get_kind() const\n", - "namespace clang {} | int Cursor::get_type() const\n", + "namespace clang {} | clang::Type clang::Cursor::get_type() const\n", "namespace clang {} | clang::SourceLocation clang::Cursor::get_source_location() const\n", "namespace clang {} | int Cursor::get_source_range() const\n", + "namespace clang {} | std::string clang::Type::get_spelling() const\n", "namespace clang {} | std::string clang::Cursor::get_display_name() const\n", - "namespace clang {} | std::string clang::Cursor::get_usr() const\n", - "namespace clang {} | bool Cursor::is_definition() const\n", + "namespace clang {} | bool clang::Cursor::is_definition() const\n", "namespace clang {} | clang::Cursor clang::Cursor::get_referenced() const\n", "namespace clang {} | clang::Cursor clang::Cursor::get_canonical() const\n", "namespace clang {} | clang::Cursor clang::Cursor::get_definition() const\n", @@ -1605,21 +1622,8 @@ "namespace clang {} | int Cursor::get_arguments() const\n", "namespace clang {} | bool clang::Cursor::is_valid_kind() const\n", "namespace clang {} | std::string clang::Cursor::get_type_description() const\n", - "namespace clang {} | std::string Cursor::get_comments() const\n", - "namespace clang {} | struct TypeDef\n", - "namespace clang {} | clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int offset)\n", - "namespace clang {} | clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int line, unsigned int column)\n", - "namespace clang {} | std::string clang::SourceLocation::get_path()\n", - "namespace clang {} | clang::Offset clang::SourceLocation::get_offset()\n", - "namespace clang {} | void clang::SourceLocation::get_data(std::string *path, unsigned int *line, unsigned int *column, unsigned int *offset)\n", - "namespace clang {} | clang::CompletionString::CompletionString(const int &cx_completion_sting)\n", - "namespace clang {} | int clang::Token::get_source_range() const\n", - "namespace clang {} | clang::Token::Kind clang::Token::get_kind() const\n", - "namespace clang {} | clang::SourceLocation clang::Token::get_source_location() const\n", - "namespace clang {} | bool clang::Token::is_identifier() const\n", - "namespace clang {} | clang::TranslationUnit::TranslationUnit::TranslationUnit(int &index, const std::string &file_path, const int &command_line_args, unsigned int flags)\n", - "namespace clang {} | static unsigned int clang::TranslationUnit::DefaultFlags()\n", - "namespace clang {} | int clang::TranslationUnit::ReparseTranslationUnit(const std::string &buffer, unsigned int flags)\n", + "namespace clang {} | std::string clang::Cursor::get_comments() const\n", + "namespace clang {} | clang::TranslationUnit::TranslationUnit::TranslationUnit(int &index, const std::string &file_path, const int &command_line_args, const std::string &buffer, unsigned int flags)\n", "namespace clang {} | void TranslationUnit::parse(int &index, const std::string &file_path, const int &command_line_args, const int &buffers, unsigned int flags)\n", "namespace clang {} | int TranslationUnit::get_code_completions(const std::string &buffer, unsigned int line_number, unsigned int column)\n", "namespace clang {} | int TranslationUnit::get_diagnostics()\n", @@ -1628,59 +1632,52 @@ "namespace clang {} | int TranslationUnit::document_cursor() const\n", "namespace clang {} | int TranslationUnit::get_cursor(std::string path, unsigned int offset)\n", "namespace clang {} | int TranslationUnit::get_cursor(std::string path, unsigned int line, unsigned int column)\n", - "namespace clang {} | std::string clang::ToString(int cx_string)\n", + "namespace clang {} | int clang::CodeCompleteResults::get_usr() const\n", "namespace clang {} | clang::Type::Type()\n", - "namespace clang {} | clang::Type::Type::Type(const int &other)\n", - "namespace clang {} | std::string clang::Type::get_usr() const\n", - "namespace clang {} | std::string clang::Type::get_spelling() const\n", "namespace clang {} | clang::Type clang::Type::get_return_type() const\n", "namespace clang {} | int Type::get_arguments() const\n", - "namespace clang {} | clang::Type clang::Cursor::get_type() const\n", - "namespace clang {} | bool clang::Cursor::is_definition() const\n", - "namespace clang {} | std::string clang::Cursor::get_comments() const\n", + "namespace clang {} | std::string clang::Type::get_usr() const\n", "namespace clang {} | std::string clang::Cursor::ToString() const\n", "namespace clang {} | clang::SourceLocation::SourceLocation::SourceLocation(const int &cx_location)\n", - "namespace clang {} | std::string SourceLocation::ToString() const\n", + "namespace clang {} | std::string clang::SourceLocation::ToString() const\n", + "namespace clang {} | clang::Type::Type::Type(const int &other)\n", "namespace clang {} | std::string clang::Cursor::evaluate() const\n", "namespace clang {} | clang::Type clang::Type::strip_qualifiers() const\n", "namespace clang {} | bool clang::Type::is_fundamental() const\n", "namespace clang {} | int Type::get_template_arguments() const\n", - "namespace clang {} | clang::SourceLocation::SourceLocation::SourceLocation() noexcept\n", - "namespace clang {} | std::string clang::SourceLocation::ToString() const\n", "namespace clang {} | clang::SourceLocation::SourceLocation()\n", "namespace clang {} | class Foo\n", "namespace clang {} | clang::Cursor clang::Cursor::template_specialization_to_template_definition() const\n", + "namespace clang {} | struct FindChildOfKindParam {}\n", "namespace clang {} | clang::CompileCommand::CompileCommand::CompileCommand(const int &command)\n", "namespace clang {} | int CompileCommand::get_command() const\n", "namespace clang {} | int CompileCommand::get_command_as_args() const\n", - "namespace clang {} | clang::TranslationUnit::TranslationUnit::TranslationUnit(int &index, const std::string &file_path, const int &command_line_args, const std::string &buffer, unsigned int flags)\n", - "namespace clang {} | struct FindChildOfKindParam {}\n", "namespace clang {} | clang::CompileCommands::CompileCommands(const clang::CompilationDatabase &db)\n", "namespace clang {} | struct IndexParam {}\n", - "namespace clang {} | clang::CompileCommands::~CompileCommands() noexcept\n", - "namespace clang {} | int clang::CompileCommands::get_commands()\n", - "namespace clang {} | struct CachedIndexedFile {}\n", - "namespace clang {} | struct Location {}\n", + "namespace clang {} | explicit clang::CompilationDatabase::CompilationDatabase(const std::string &project_path)\n", + "namespace clang {} | int Type::get_declaration() const\n", "namespace clang {} | struct SymbolIdx {}\n", "namespace clang {} | struct QueryableLocation {}\n", - "namespace clang {} | int Type::get_declaration() const\n", - "int clang::CodeCompleteResults::get_usr() const | namespace clang {}\n", - "int clang::CodeCompleteResults::get_usr() const | std::string clang::ToString(int cx_string)\n", - "int clang::CodeCompleteResults::get_usr() const | std::string SourceLocation::ToString() const\n", - "int clang::CodeCompleteResults::get_usr() const | std::string clang::SourceLocation::ToString() const\n", - "int clang::CodeCompleteResults::get_usr() const | std::string ToString()\n", - "int clang::CodeCompleteResults::get_usr() const | std::string IndexedFile::ToString()\n", - "int clang::CodeCompleteResults::get_usr() const | std::string ToString(const int &document)\n", - "int clang::CodeCompleteResults::get_usr() const | int ToString(const int &document)\n", - "clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column) | namespace clang {}\n", - "clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column) | unsigned int clang::CodeCompleteResults::size() const\n", - "clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column) | std::string clang::ToString(int cx_string)\n", - "clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column) | struct Database {}\n", - "clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column) | struct QueryDatabase {}\n", - "clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column) | struct QueryableDatabase {}\n", - "clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column) | struct Location {}\n", - "clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column) | struct QueryableLocation {}\n", - "clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column) | std::string ToString(const int &document)\n", + "clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int offset) | namespace clang {}\n", + "clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int line, unsigned int column) | namespace clang {}\n", + "clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int line, unsigned int column) | clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int offset)\n", + "std::string clang::SourceLocation::get_path() | namespace clang {}\n", + "std::string clang::SourceLocation::get_path() | void clang::SourceLocation::get_data(std::string *path, unsigned int *line, unsigned int *column, unsigned int *offset)\n", + "void clang::SourceLocation::get_data(std::string *path, unsigned int *line, unsigned int *column, unsigned int *offset) | namespace clang {}\n", + "clang::Offset clang::SourceLocation::get_offset() | namespace clang {}\n", + "clang::Offset clang::SourceLocation::get_offset() | void clang::SourceLocation::get_data(std::string *path, unsigned int *line, unsigned int *column, unsigned int *offset)\n", + "int clang::CompileCommand::get_command() | namespace clang {}\n", + "int clang::CompileCommand::get_command_as_args() | namespace clang {}\n", + "int clang::CompileCommand::get_command_as_args() | int clang::CompileCommand::get_command()\n", + "clang::Diagnostic::Diagnostic(int &cx_tu, int &cx_diagnostic) | namespace clang {}\n", + "clang::Diagnostic::Diagnostic(int &cx_tu, int &cx_diagnostic) | static const std::string clang::Diagnostic::get_severity_spelling(unsigned int severity)\n", + "clang::Diagnostic::Diagnostic(int &cx_tu, int &cx_diagnostic) | clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int line, unsigned int column)\n", + "clang::Diagnostic::Diagnostic(int &cx_tu, int &cx_diagnostic) | std::string clang::SourceLocation::get_path()\n", + "clang::Diagnostic::Diagnostic(int &cx_tu, int &cx_diagnostic) | clang::Offset clang::SourceLocation::get_offset()\n", + "clang::Diagnostic::Diagnostic(int &cx_tu, int &cx_diagnostic) | int clang::SourceRange::get_offsets()\n", + "static const std::string clang::Diagnostic::get_severity_spelling(unsigned int severity) | namespace clang {}\n", + "clang::Index::Index(int excludeDeclarationsFromPCH, int displayDiagnostics) | namespace clang {}\n", + "clang::Index::~Index() noexcept | namespace clang {}\n", "...\n" ] } @@ -1695,7 +1692,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 5, "metadata": {}, "outputs": [], "source": [ @@ -1706,7 +1703,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 6, "metadata": {}, "outputs": [ { @@ -1714,15 +1711,15 @@ "output_type": "stream", "text": [ "namespace clang {} | namespace clang {}\n", - "namespace clang {} | int clang::CodeCompleteResults::get_usr() const\n", - "namespace clang {} | clang::CodeCompleteResults::CodeCompleteResults(int &cx_tu, const int &buffer, unsigned int line_num, unsigned int column)\n", - "namespace clang {} | int clang::CodeCompleteResults::get(unsigned int i) const\n", - "namespace clang {} | unsigned int clang::CodeCompleteResults::size() const\n", + "namespace clang {} | clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int offset)\n", + "namespace clang {} | clang::SourceLocation::SourceLocation::SourceLocation(int &tu, const std::string &filepath, unsigned int line, unsigned int column)\n", + "namespace clang {} | std::string clang::SourceLocation::get_path()\n", + "namespace clang {} | clang::Offset clang::SourceLocation::get_offset()\n", + "namespace clang {} | void clang::SourceLocation::get_data(std::string *path, unsigned int *line, unsigned int *column, unsigned int *offset)\n", + "namespace clang {} | clang::Index::Index(int excludeDeclarationsFromPCH, int displayDiagnostics)\n", "namespace clang {} | clang::Diagnostic::Diagnostic(int &cx_tu, int &cx_diagnostic)\n", "namespace clang {} | static const std::string clang::Diagnostic::get_severity_spelling(unsigned int severity)\n", - "namespace clang {} | int clang::Token::get_spelling() const\n", - "namespace clang {} | clang::Cursor::Cursor()\n", - "namespace clang {} | clang::Cursor::Cursor::Cursor(const int &other)\n", + "namespace clang {} | int clang::SourceRange::get_offsets()\n", "...\n" ] } @@ -1736,10 +1733,139 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "metadata": {}, - "outputs": [], - "source": [] + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "('jacobdufault@gmail.com', 0.9810053091032119)\n", + "('jdufault@google.com', 0.012631085775342622)\n" + ] + } + ], + "source": [ + "for t in sorted(G.developer_devranks(0.85).items(), key=lambda t:t[1], reverse=True):\n", + " print(t)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "('namespace clang {}', 0.10416510691561254)\n", + "('struct IndexedFile {}', 0.07087220405735957)\n", + "('struct Location {}', 0.05555621850983644)\n", + "('namespace language_server_api {}', 0.037464685336337684)\n", + "('void indexDeclaration(int client_data, const int *decl)', 0.03472520932390782)\n", + "('struct FuncDefDefinitionData {}', 0.028895424093886634)\n", + "('void indexEntityReference(int client_data, const int *ref)', 0.02833920522734296)\n", + "('struct TypeDefDefinitionData {}', 0.027383332592175774)\n", + "('struct ParsingDatabase {}', 0.022854110362300492)\n", + "('std::string ToString(const int &document)', 0.02239638361525142)\n", + "('struct VarDefDefinitionData {}', 0.01947827810897668)\n", + "('int main(int argc, char **argv)', 0.019008147207863067)\n", + "('struct Task {}', 0.01884614618353126)\n", + "('struct IdCache {}', 0.015987407624868322)\n", + "('struct IpcMessage_DocumentSymbolsRequest {}', 0.013928011009341164)\n", + "('struct QueryableDatabase {}', 0.012919324401159222)\n", + "('void DiffDocuments(int &expected, int &actual)', 0.012611768871416688)\n", + "('struct IndexUpdate {}', 0.012378546855131626)\n", + "('struct FileDb {}', 0.01196728115668281)\n", + "('struct Ref {}', 0.01193202317742757)\n", + "('struct TypeDef {}', 0.011067953367303708)\n", + "('struct IndexedFuncDef', 0.010908566394791454)\n", + "('struct QueryableEntry {}', 0.010537160251545136)\n", + "('int split_string(const std::string &str, const std::string &delimiter)', 0.010378125173983973)\n", + "('struct IndexParam {}', 0.008533911695897674)\n", + "('std::string clang::Cursor::get_type_description() const', 0.008457797123827646)\n", + "('struct SymbolIdx {}', 0.008086007774187281)\n", + "('constexpr T &std::experimental::optional::value() const', 0.0076216112589732)\n", + "('struct IndexedTypeDef', 0.0071560895156070016)\n", + "('struct IndexedVarDef', 0.007133033715821756)\n", + "('int Location(bool interesting, FileId file_id, uint32_t line, uint32_t column)', 0.007066448628771181)\n", + "('enum class SymbolKind : int {}', 0.006912848701970875)\n", + "('ParsingDatabase Parse(std::string filename)', 0.0067913702697211965)\n", + "('struct BitFieldArray {}', 0.006366912671229168)\n", + "('struct Id {}', 0.006269151986432736)\n", + "('struct VarDef {}', 0.005896723758458707)\n", + "('std::string clang::Cursor::evaluate() const', 0.005549030917220253)\n", + "('struct BitFieldMember {}', 0.005470120861961143)\n", + "('namespace boost {}', 0.0053367123489663605)\n", + "('struct CachedIndexedFile {}', 0.005335658634452737)\n", + "('void HandleFunc(ParsingDatabase *db, NamespaceStack *ns, int func, int declaring_type)', 0.0050395202572343775)\n", + "('struct TypeDef', 0.004942921362123896)\n", + "('struct FindChildOfKindParam {}', 0.004811707018465551)\n", + "('void QueryDbMainLoop(int *ipc, int *db)', 0.004635264126963808)\n", + "('void writer()', 0.004239041393394854)\n", + "('struct IpcDirectionalChannel {}', 0.004222606591724942)\n", + "('std::string IndexedFile::ToString()', 0.003914891670763644)\n", + "('void reader()', 0.0037276335662578255)\n", + "('struct IdMap', 0.003698909960007504)\n", + "('struct IdMap {}', 0.0036803694125616855)\n", + "('int IpcDirectionalChannel::TakeMessages()', 0.0036664616786350667)\n", + "('int VisitFuncDefinition(int cursor, int parent, FuncDefinitionParam *param)', 0.0034997936428615207)\n", + "('class Foo', 0.00343298027102756)\n", + "('int ResolveDeclToType(ParsingDatabase *db, int decl_cursor, bool is_interesting, const int *semantic_container, const int *lexical_container)', 0.0033863963771359707)\n", + "('IndexedFile Parse(std::string filename, int args, bool dump_ast)', 0.003382847159072873)\n", + "('struct IndexedTypeDef {}', 0.003350706789987754)\n", + "('int VisitFile(int cursor, int parent, FileParam *param)', 0.0032459392067467193)\n", + "('bool IsFunction(int kind)', 0.003175717188227563)\n", + "('void clang::TranslationUnit::parse(int &index, const std::string &file_path, const int &command_line_args, const int &buffers, unsigned int flags)', 0.0030653920792708656)\n", + "('struct FileDef {}', 0.0029827759084660936)\n", + "('struct FuncDefinitionParam {}', 0.002758694142937999)\n", + "('void VisitDeclForTypeUsageVisitorHandler(int cursor, VisitDeclForTypeUsageParam *param)', 0.0027553882048806048)\n", + "('struct IndexedFuncDef {}', 0.002528391159191035)\n", + "('int clang::TranslationUnit::ReparseTranslationUnit(const std::string &buffer, unsigned int flags)', 0.002438888588616966)\n", + "('IndexUpdate::IndexUpdate(IndexedFile &previous, IndexedFile ¤t)', 0.002352122087390797)\n", + "('void WriteToFile(const std::string &filename, const std::string &content)', 0.0022592441357603276)\n", + "('bool clang::Cursor::is_valid_kind() const', 0.002171323403042084)\n", + "('int VisitClassDecl(int cursor, int parent, ClassDeclParam *param)', 0.0021582765039166282)\n", + "('struct QueryableFuncDef {}', 0.0020856582400619085)\n", + "('static unsigned int clang::TranslationUnit::DefaultFlags()', 0.002080886593957595)\n", + "('struct JsonMessage {}', 0.0020459402128250007)\n", + "('int AddDeclUsages(IndexedFile *db, int decl_cursor, bool is_interesting, const int *semantic_container, const int *lexical_container)', 0.00201270564335991)\n", + "('struct IpcDirectionalChannel::MessageBuffer', 0.0019993007028381057)\n", + "('int clang::TranslationUnit::get_tokens(unsigned int start_line, unsigned int start_column, unsigned int end_line, unsigned int end_column)', 0.001991386095292752)\n", + "('int clang::TranslationUnit::get_diagnostics()', 0.001969010970626541)\n", + "('std::string ToString()', 0.0019628876731040126)\n", + "('int Cursor::get_arguments() const', 0.0019476279951740321)\n", + "('struct ClassDeclParam {}', 0.0018981883361736543)\n", + "('int clang::TranslationUnit::get_tokens(unsigned int start_offset, unsigned int end_offset)', 0.0018616870295877258)\n", + "('struct BaseIpcMessage {}', 0.001852010555893617)\n", + "('struct VisitDeclForTypeUsageParam {}', 0.0018259155343039947)\n", + "('int VisitDeclForTypeUsageVisitor(int cursor, int parent, VisitDeclForTypeUsageParam *param)', 0.0017714792188222434)\n", + "('void HandleVarDecl(ParsingDatabase *db, NamespaceStack *ns, int var, int declaring_type, int func_id, bool declare_variable)', 0.001756397282995158)\n", + "('int VisitUsing(int cursor, int parent, UsingParam *param)', 0.0017391009245347117)\n", + "('struct UsrToIdResolver {}', 0.0017315288570247623)\n", + "('void TranslationUnit::parse(int &index, const std::string &file_path, const int &command_line_args, const int &buffers, unsigned int flags)', 0.0017228845992982237)\n", + "('struct BaseIpcMessage : BaseIpcMessageElided {}', 0.0017163981552008037)\n", + "('VarId IndexedFile::ToVarId(const std::string &usr)', 0.0017095020484755572)\n", + "('void Write(const int &strs)', 0.0016931594709985746)\n", + "('enum class Command : int {}', 0.001685452539998441)\n", + "('int mai2n(int argc, char **argv)', 0.0016762636027911257)\n", + "('std::string ParsingDatabase::ToString()', 0.0016742361305578815)\n", + "('IndexUpdate ComputeDiff(IdMap *id_map, IndexedFile &previous, IndexedFile ¤t)', 0.0016399275787473788)\n", + "('struct IndexedFileDb {}', 0.0016140476193728611)\n", + "('struct QueryableVarDef {}', 0.0016131194704164442)\n", + "('struct BaseIpcMessageElided {}', 0.00160707872062705)\n", + "('int main2(int argc, char **argv)', 0.0015949425203396002)\n", + "('struct FuncDef {}', 0.0015587623184678523)\n", + "('struct IndexedVarDef {}', 0.0015559247845952614)\n", + "('void LanguageServerMain(std::string process_name)', 0.001552187811482307)\n" + ] + } + ], + "source": [ + "for t in itertools.islice(sorted(G.function_devranks(0.85).items(), key=lambda t:t[1], reverse=True), 100):\n", + " print(t)" + ] } ], "metadata": { diff --git a/notebooks/lsp-ccls.ipynb b/notebooks/lsp-ccls.ipynb index 2b7c9ab4494..79c47c74f5b 100644 --- a/notebooks/lsp-ccls.ipynb +++ b/notebooks/lsp-ccls.ipynb @@ -25,14 +25,14 @@ "name": "stdout", "output_type": "stream", "text": [ - "Workspace root: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\n" + "Workspace root: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "2018-12-26 00:34:17,722 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Exceptions.h.\n" + "2019-01-17 23:45:01,005 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Exceptions.h.\n" ] }, { @@ -42,132 +42,124 @@ "----- Overview ------\n", "# of commits on master: 4\n", "# of commits on branch: 0\n", - "----- No.1 37d91e19997dcfac11c7213126761920ad57f0d5 on main -----\n" + "----- No.1 4965d8edcb63cab6e544c1ecd19454f37d9bb0d3 on main -----\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "2018-12-26 00:34:17,790 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\TextFileParsers.cpp.\n", - "2018-12-26 00:34:17,859 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\TextFileParsers.h.\n", - "2018-12-26 00:34:17,934 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\TypeTraits.h.\n", - "2018-12-26 00:34:18,012 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.cpp.\n", - "2018-12-26 00:34:18,093 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.h.\n", - "2018-12-26 00:34:18,190 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\main.cpp.\n", - "2018-12-26 00:34:18,284 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\stdafx.cpp.\n", - "2018-12-26 00:34:18,370 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\stdafx.h.\n", - "2018-12-26 00:34:18,376 INFO [persper.analytics.lsp_graph_server.callgraph] Removed 0 branches by 9 files.\n", - "2018-12-26 00:34:18,378 INFO [persper.analytics.lsp_graph_server] Invalidated 9 files, affected 9 files.\n", - "2018-12-26 00:34:18,380 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.h\n", - "2018-12-26 00:34:21,427 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 81 branches.\n", - "2018-12-26 00:34:21,429 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\stdafx.h\n", - "2018-12-26 00:34:21,577 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 0 branches.\n", - "2018-12-26 00:34:21,579 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\TextFileParsers.cpp\n", - "2018-12-26 00:34:22,468 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 80 branches.\n", - "2018-12-26 00:34:22,470 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.cpp\n", - "2018-12-26 00:34:22,809 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 20 branches.\n", - "2018-12-26 00:34:22,810 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\TypeTraits.h\n", - "2018-12-26 00:34:22,896 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 1 branches.\n", - "2018-12-26 00:34:22,898 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\TextFileParsers.h\n", - "2018-12-26 00:34:23,515 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 39 branches.\n", - "2018-12-26 00:34:23,516 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\main.cpp\n", - "2018-12-26 00:34:23,667 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 2 branches.\n", - "2018-12-26 00:34:23,668 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\stdafx.cpp\n", - "2018-12-26 00:34:23,775 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 0 branches.\n", - "2018-12-26 00:34:23,777 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Exceptions.h\n", - "2018-12-26 00:34:24,150 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 27 branches.\n", - "2018-12-26 00:34:24,151 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 130 branches (-> 130) from 9 files.\n", - "2018-12-26 00:34:24,151 INFO [persper.analytics.lsp_graph_server] End commit: 37d91e19997dcfac11c7213126761920ad57f0d5\n" + "2019-01-17 23:45:01,062 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\TextFileParsers.cpp.\n", + "2019-01-17 23:45:01,118 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\TextFileParsers.h.\n", + "2019-01-17 23:45:01,174 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\TypeTraits.h.\n", + "2019-01-17 23:45:01,230 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.cpp.\n", + "2019-01-17 23:45:01,284 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.h.\n", + "2019-01-17 23:45:01,339 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\main.cpp.\n", + "2019-01-17 23:45:01,396 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\stdafx.cpp.\n", + "2019-01-17 23:45:01,452 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\stdafx.h.\n", + "2019-01-17 23:45:01,455 INFO [persper.analytics.lsp_graph_server] Invalidated 9 files, affected 9 files.\n", + "2019-01-17 23:45:01,457 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.h\n", + "2019-01-17 23:45:03,434 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 73 branches.\n", + "2019-01-17 23:45:03,435 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Exceptions.h\n", + "2019-01-17 23:45:03,696 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 27 branches.\n", + "2019-01-17 23:45:03,697 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\TypeTraits.h\n", + "2019-01-17 23:45:03,770 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 1 branches.\n", + "2019-01-17 23:45:03,771 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\main.cpp\n", + "2019-01-17 23:45:05,443 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 6 branches.\n", + "2019-01-17 23:45:05,444 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\TextFileParsers.h\n", + "2019-01-17 23:45:07,225 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 40 branches.\n", + "2019-01-17 23:45:07,226 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.cpp\n", + "2019-01-17 23:45:08,342 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 77 branches.\n", + "2019-01-17 23:45:08,343 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\stdafx.h\n", + "2019-01-17 23:45:09,076 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 0 branches.\n", + "2019-01-17 23:45:09,077 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\TextFileParsers.cpp\n", + "2019-01-17 23:45:10,579 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 163 branches.\n", + "2019-01-17 23:45:10,580 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\stdafx.cpp\n", + "2019-01-17 23:45:11,346 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 0 branches.\n", + "2019-01-17 23:45:11,347 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 387 branches from 9 files.\n", + "2019-01-17 23:45:11,348 INFO [persper.analytics.lsp_graph_server] End commit: 4965d8edcb63cab6e544c1ecd19454f37d9bb0d3\n", + "2019-01-17 23:45:11,511 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility-1.cpp.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "----- No.2 1745919965e74be2ae6549d0db90eaa75af1b21c on main -----\n" + "----- No.2 12a65d92071e8ab32890bc0f69697b2efcb013f8 on main -----\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "2018-12-26 00:34:24,382 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility-1.cpp.\n", - "2018-12-26 00:34:24,513 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\main.cpp.\n", - "2018-12-26 00:34:24,515 INFO [persper.analytics.lsp_graph_server.callgraph] Removed 77 branches by 5 files.\n", - "2018-12-26 00:34:24,516 INFO [persper.analytics.lsp_graph_server] Invalidated 3 files, affected 5 files.\n", - "2018-12-26 00:34:24,517 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Confirm deleted: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.cpp\n", - "2018-12-26 00:34:24,519 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.h\n", - "2018-12-26 00:34:26,972 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 81 branches.\n", - "2018-12-26 00:34:26,974 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\main.cpp\n", - "2018-12-26 00:34:27,168 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 12 branches.\n", - "2018-12-26 00:34:27,171 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\TextFileParsers.cpp\n", - "2018-12-26 00:34:28,186 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 80 branches.\n", - "2018-12-26 00:34:28,189 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility-1.cpp\n", - "2018-12-26 00:34:28,514 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 77 branches.\n", - "2018-12-26 00:34:28,515 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 144 branches (-> 197) from 4 files.\n", - "2018-12-26 00:34:28,516 INFO [persper.analytics.lsp_graph_server] End commit: 1745919965e74be2ae6549d0db90eaa75af1b21c\n" + "2019-01-17 23:45:11,620 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\main.cpp.\n", + "2019-01-17 23:45:11,622 INFO [persper.analytics.lsp_graph_server] Invalidated 3 files, affected 3 files.\n", + "2019-01-17 23:45:11,623 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Confirm deleted: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.cpp\n", + "2019-01-17 23:45:11,625 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility-1.cpp\n", + "2019-01-17 23:45:12,790 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 77 branches.\n", + "2019-01-17 23:45:12,791 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\main.cpp\n", + "2019-01-17 23:45:13,805 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 12 branches.\n", + "2019-01-17 23:45:13,807 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 89 branches from 2 files.\n", + "2019-01-17 23:45:13,807 INFO [persper.analytics.lsp_graph_server] End commit: 12a65d92071e8ab32890bc0f69697b2efcb013f8\n", + "2019-01-17 23:45:13,984 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\TextFileParsers.cpp.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "----- No.3 1cb351c5c12916338c73aba2a2e4f0384ff06b07 on main -----\n" + "----- No.3 4928d2ec0ad82221b61b30f3ae2e1cc4c61a3ea0 on main -----\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "2018-12-26 00:34:28,854 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\TextFileParsers.cpp.\n", - "2018-12-26 00:34:29,091 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.cpp.\n", - "2018-12-26 00:34:29,287 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.h.\n", - "2018-12-26 00:34:29,476 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\main.cpp.\n", - "2018-12-26 00:34:29,481 INFO [persper.analytics.lsp_graph_server.callgraph] Removed 144 branches by 5 files.\n", - "2018-12-26 00:34:29,483 INFO [persper.analytics.lsp_graph_server] Invalidated 5 files, affected 5 files.\n", - "2018-12-26 00:34:29,485 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Confirm deleted: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility-1.cpp\n", - "2018-12-26 00:34:29,487 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.h\n", - "2018-12-26 00:34:32,527 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 81 branches.\n", - "2018-12-26 00:34:32,529 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\TextFileParsers.cpp\n", - "2018-12-26 00:34:33,509 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 162 branches.\n", - "2018-12-26 00:34:33,511 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.cpp\n", - "2018-12-26 00:34:33,844 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 77 branches.\n", - "2018-12-26 00:34:33,845 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\main.cpp\n", - "2018-12-26 00:34:33,996 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 6 branches.\n", - "2018-12-26 00:34:33,997 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 273 branches (-> 326) from 4 files.\n", - "2018-12-26 00:34:33,998 INFO [persper.analytics.lsp_graph_server] End commit: 1cb351c5c12916338c73aba2a2e4f0384ff06b07\n" + "2019-01-17 23:45:14,090 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.cpp.\n", + "2019-01-17 23:45:14,196 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.h.\n", + "2019-01-17 23:45:14,300 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\main.cpp.\n", + "2019-01-17 23:45:14,302 INFO [persper.analytics.lsp_graph_server] Invalidated 5 files, affected 5 files.\n", + "2019-01-17 23:45:14,303 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Confirm deleted: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility-1.cpp\n", + "2019-01-17 23:45:14,304 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.h\n", + "2019-01-17 23:45:15,886 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 78 branches.\n", + "2019-01-17 23:45:15,887 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.cpp\n", + "2019-01-17 23:45:16,872 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 77 branches.\n", + "2019-01-17 23:45:16,874 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\TextFileParsers.cpp\n", + "2019-01-17 23:45:18,407 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 163 branches.\n", + "2019-01-17 23:45:18,409 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\main.cpp\n", + "2019-01-17 23:45:19,358 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 6 branches.\n", + "2019-01-17 23:45:19,359 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 324 branches from 4 files.\n", + "2019-01-17 23:45:19,359 INFO [persper.analytics.lsp_graph_server] End commit: 4928d2ec0ad82221b61b30f3ae2e1cc4c61a3ea0\n", + "2019-01-17 23:45:19,538 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\TextFileParsers.cpp.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "----- No.4 dc00db06e9013dc4b92c199887f081049e3a4276 on main -----\n" + "----- No.4 639934cdc7499854e22df79835240ac786498300 on main -----\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "2018-12-26 00:34:34,232 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\TextFileParsers.cpp.\n", - "2018-12-26 00:34:34,397 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.cpp.\n", - "2018-12-26 00:34:34,562 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.h.\n", - "2018-12-26 00:34:34,728 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\main.cpp.\n", - "2018-12-26 00:34:34,733 INFO [persper.analytics.lsp_graph_server.callgraph] Removed 273 branches by 4 files.\n", - "2018-12-26 00:34:34,735 INFO [persper.analytics.lsp_graph_server] Invalidated 4 files, affected 4 files.\n", - "2018-12-26 00:34:34,737 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.h\n", - "2018-12-26 00:34:37,262 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 81 branches.\n", - "2018-12-26 00:34:37,263 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\TextFileParsers.cpp\n", - "2018-12-26 00:34:37,971 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 162 branches.\n", - "2018-12-26 00:34:37,972 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\Utility.cpp\n", - "2018-12-26 00:34:38,250 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 77 branches.\n", - "2018-12-26 00:34:38,251 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpxhlrhwon\\main.cpp\n", - "2018-12-26 00:34:38,399 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 3 branches.\n", - "2018-12-26 00:34:38,400 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 270 branches (-> 323) from 4 files.\n", - "2018-12-26 00:34:38,401 INFO [persper.analytics.lsp_graph_server] End commit: dc00db06e9013dc4b92c199887f081049e3a4276\n", - "2018-12-26 00:34:38,408 INFO [persper.analytics.lsp_graph_server] Shutting down language server...\n", - "2018-12-26 00:35:41,011 INFO [persper.analytics.lsp_graph_server] Language server 24988 exited with code: 3221225786.\n" + "2019-01-17 23:45:19,647 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.cpp.\n", + "2019-01-17 23:45:19,758 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.h.\n", + "2019-01-17 23:45:19,869 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\main.cpp.\n", + "2019-01-17 23:45:19,871 INFO [persper.analytics.lsp_graph_server] Invalidated 4 files, affected 4 files.\n", + "2019-01-17 23:45:19,874 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\TextFileParsers.cpp\n", + "2019-01-17 23:45:22,915 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 163 branches.\n", + "2019-01-17 23:45:22,916 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.h\n", + "2019-01-17 23:45:24,290 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 87 branches.\n", + "2019-01-17 23:45:24,291 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.cpp\n", + "2019-01-17 23:45:24,503 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 77 branches.\n", + "2019-01-17 23:45:24,505 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\main.cpp\n", + "2019-01-17 23:45:25,399 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 3 branches.\n", + "2019-01-17 23:45:25,400 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 330 branches from 4 files.\n", + "2019-01-17 23:45:25,401 INFO [persper.analytics.lsp_graph_server] End commit: 639934cdc7499854e22df79835240ac786498300\n", + "2019-01-17 23:45:25,402 INFO [persper.analytics.lsp_graph_server] Shutting down language server...\n", + "2019-01-17 23:45:35,405 WARNING [persper.analytics.lsp_graph_server] Killed language server 7904.\n" ] } ], @@ -198,8 +190,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "Nodes: 41\n", - "Edges: 71\n" + "Nodes: 56\n", + "Edges: 58\n" ] } ], @@ -221,32 +213,24 @@ "text": [ "_RangeToEnumerable RangeToEnumerable(const std::pair range) | class _RangeToEnumerable {}\n", "template<> struct is_flags : std::true_type {} | enum class StringComparison : int {}\n", - "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision) | bool Equal(const int &lhs, const int &rhs, int comparision)\n", "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision) | enum class StringComparison : int {}\n", "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision) | bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)\n", - "bool Equal(const int &lhs, const int &rhs, int comparision) | enum class StringComparison : int {}\n", - "bool Confirm(const std::string &prompt) | bool Confirm(const int &prompt)\n", - "bool Confirm(const std::string &prompt) | bool Confirm(const std::string &prompt)\n", - "bool Confirm(const int &prompt) | class RowReader {}\n", "const char *FriendlyNameOf(const std::type_index &type) | const char *FriendlyNameOf()\n", + "const char *FriendlyNameOf(const std::type_index &type) | namespace std {}\n", "const char *FriendlyNameOf(const type_info &type) | const char *FriendlyNameOf()\n", "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance) | const char *FriendlyNameOf()\n", - "void ReportException(const std::exception &ex, int level) | void ReportException(const int &ex, int level)\n", - "void ReportException(const std::exception &ex, int level) | void ReportException(const std::exception &ex, int level)\n", - "int &operator>>(int &reader, int &rhs) | class RowReader {}\n", - "int &operator>>(int &reader, long &rhs) | class RowReader {}\n", - "int &operator>>(int &reader, long &rhs) | int &operator>>(int &reader, int &rhs)\n", - "int &operator>>(int &reader, float &rhs) | class RowReader {}\n", - "int &operator>>(int &reader, float &rhs) | int &operator>>(int &reader, int &rhs)\n", - "int &operator>>(int &reader, double &rhs) | class RowReader {}\n", - "int &operator>>(int &reader, double &rhs) | int &operator>>(int &reader, int &rhs)\n", - "int &operator>>(int &reader, bool &rhs) | class RowReader {}\n", - "int &operator>>(int &reader, bool &rhs) | int &operator>>(int &reader, int &rhs)\n", - "int &operator>>(int &reader, bool &rhs) | bool Equal(const int &lhs, const int &rhs, int comparision)\n", - "int &operator>>(int &reader, bool &rhs) | enum class StringComparison : int {}\n", - "int &operator>>(int &reader, bool &rhs) | bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)\n", - "TStream &operator>>(TStream &s, RowReader &reader) | class RowReader {}\n", + "class InvalidCastException : public Exception {} | class Exception : public std::exception {}\n", + "class ArgumentException : public Exception {} | class Exception : public std::exception {}\n", + "class OperationFailureException : public Exception {} | class Exception : public std::exception {}\n", + "int main(int argc, char *argv[]) | TStream OpenAndValidate(const TPath arg1)\n", + "int main(int argc, char *argv[]) | class ConfigurationParser {}\n", + "int main(int argc, char *argv[]) | bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const\n", + "int main(int argc, char *argv[]) | double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const\n", + "int main(int argc, char *argv[]) | std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const\n", + "int main(int argc, char *argv[]) | void ReportException(const std::exception &ex, int level)\n", + "class ConfigurationParser {} | namespace std {}\n", "RowReader &operator>>(RowReader &reader, std::string &rhs) | class RowReader {}\n", + "TStream &operator>>(TStream &s, RowReader &reader) | class RowReader {}\n", "RowReader &operator>>(RowReader &reader, int &rhs) | class RowReader {}\n", "RowReader &operator>>(RowReader &reader, int &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", "RowReader &operator>>(RowReader &reader, long &rhs) | class RowReader {}\n", @@ -263,13 +247,8 @@ "RowReader &operator>>(RowReader &reader, bool &rhs) | bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)\n", "RowReader &operator>>(RowReader &reader, bool &rhs) | enum class StringComparison : int {}\n", "RowReader &operator>>(RowReader &reader, bool &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", - "int main(int argc, char *argv[]) | TStream OpenAndValidate(const TPath arg1)\n", - "int main(int argc, char *argv[]) | class ConfigurationParser {}\n", - "int main(int argc, char *argv[]) | void ReportException(const std::exception &ex, int level)\n", - "int main(int argc, char *argv[]) | bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const\n", - "class InvalidCastException : public Exception {} | class Exception : public std::exception {}\n", - "class ArgumentException : public Exception {} | class Exception : public std::exception {}\n", - "class OperationFailureException : public Exception {} | class Exception : public std::exception {}\n", + "bool Confirm(const std::string &prompt) | bool Confirm(const std::string &prompt)\n", + "void ReportException(const std::exception &ex, int level) | void ReportException(const std::exception &ex, int level)\n", "void ConfigurationParser::Load(std::istream &inputStream) | class ConfigurationParser {}\n", "void ConfigurationParser::Load(std::istream &inputStream) | RowReader &operator>>(RowReader &reader, int &rhs)\n", "void ConfigurationParser::Load(std::istream &inputStream) | class Exception : public std::exception {}\n", @@ -323,32 +302,24 @@ "text": [ "_RangeToEnumerable RangeToEnumerable(const std::pair range) | class _RangeToEnumerable {}\n", "template<> struct is_flags : std::true_type {} | enum class StringComparison : int {}\n", - "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision) | bool Equal(const int &lhs, const int &rhs, int comparision)\n", "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision) | enum class StringComparison : int {}\n", "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision) | bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)\n", - "bool Equal(const int &lhs, const int &rhs, int comparision) | enum class StringComparison : int {}\n", - "bool Confirm(const std::string &prompt) | bool Confirm(const int &prompt)\n", - "bool Confirm(const std::string &prompt) | bool Confirm(const std::string &prompt)\n", - "bool Confirm(const int &prompt) | class RowReader {}\n", "const char *FriendlyNameOf(const std::type_index &type) | const char *FriendlyNameOf()\n", + "const char *FriendlyNameOf(const std::type_index &type) | namespace std {}\n", "const char *FriendlyNameOf(const type_info &type) | const char *FriendlyNameOf()\n", "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance) | const char *FriendlyNameOf()\n", - "void ReportException(const std::exception &ex, int level) | void ReportException(const int &ex, int level)\n", - "void ReportException(const std::exception &ex, int level) | void ReportException(const std::exception &ex, int level)\n", - "int &operator>>(int &reader, int &rhs) | class RowReader {}\n", - "int &operator>>(int &reader, long &rhs) | class RowReader {}\n", - "int &operator>>(int &reader, long &rhs) | int &operator>>(int &reader, int &rhs)\n", - "int &operator>>(int &reader, float &rhs) | class RowReader {}\n", - "int &operator>>(int &reader, float &rhs) | int &operator>>(int &reader, int &rhs)\n", - "int &operator>>(int &reader, double &rhs) | class RowReader {}\n", - "int &operator>>(int &reader, double &rhs) | int &operator>>(int &reader, int &rhs)\n", - "int &operator>>(int &reader, bool &rhs) | class RowReader {}\n", - "int &operator>>(int &reader, bool &rhs) | int &operator>>(int &reader, int &rhs)\n", - "int &operator>>(int &reader, bool &rhs) | bool Equal(const int &lhs, const int &rhs, int comparision)\n", - "int &operator>>(int &reader, bool &rhs) | enum class StringComparison : int {}\n", - "int &operator>>(int &reader, bool &rhs) | bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)\n", - "TStream &operator>>(TStream &s, RowReader &reader) | class RowReader {}\n", + "class InvalidCastException : public Exception {} | class Exception : public std::exception {}\n", + "class ArgumentException : public Exception {} | class Exception : public std::exception {}\n", + "class OperationFailureException : public Exception {} | class Exception : public std::exception {}\n", + "int main(int argc, char *argv[]) | TStream OpenAndValidate(const TPath arg1)\n", + "int main(int argc, char *argv[]) | class ConfigurationParser {}\n", + "int main(int argc, char *argv[]) | bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const\n", + "int main(int argc, char *argv[]) | double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const\n", + "int main(int argc, char *argv[]) | std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const\n", + "int main(int argc, char *argv[]) | void ReportException(const std::exception &ex, int level)\n", + "class ConfigurationParser {} | namespace std {}\n", "RowReader &operator>>(RowReader &reader, std::string &rhs) | class RowReader {}\n", + "TStream &operator>>(TStream &s, RowReader &reader) | class RowReader {}\n", "RowReader &operator>>(RowReader &reader, int &rhs) | class RowReader {}\n", "RowReader &operator>>(RowReader &reader, int &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", "RowReader &operator>>(RowReader &reader, long &rhs) | class RowReader {}\n", @@ -365,13 +336,8 @@ "RowReader &operator>>(RowReader &reader, bool &rhs) | bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)\n", "RowReader &operator>>(RowReader &reader, bool &rhs) | enum class StringComparison : int {}\n", "RowReader &operator>>(RowReader &reader, bool &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", - "int main(int argc, char *argv[]) | TStream OpenAndValidate(const TPath arg1)\n", - "int main(int argc, char *argv[]) | class ConfigurationParser {}\n", - "int main(int argc, char *argv[]) | void ReportException(const std::exception &ex, int level)\n", - "int main(int argc, char *argv[]) | bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const\n", - "class InvalidCastException : public Exception {} | class Exception : public std::exception {}\n", - "class ArgumentException : public Exception {} | class Exception : public std::exception {}\n", - "class OperationFailureException : public Exception {} | class Exception : public std::exception {}\n", + "bool Confirm(const std::string &prompt) | bool Confirm(const std::string &prompt)\n", + "void ReportException(const std::exception &ex, int level) | void ReportException(const std::exception &ex, int level)\n", "void ConfigurationParser::Load(std::istream &inputStream) | class ConfigurationParser {}\n", "void ConfigurationParser::Load(std::istream &inputStream) | RowReader &operator>>(RowReader &reader, int &rhs)\n", "void ConfigurationParser::Load(std::istream &inputStream) | class Exception : public std::exception {}\n", @@ -418,6 +384,86 @@ "source": [ "%tb" ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "----" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "('bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)', 0.23606052783278517)\n", + "('bool Confirm(const std::string &prompt)', 0.1098426006632092)\n", + "('void ConfigurationParser::Load(std::istream &inputStream)', 0.07201434847105015)\n", + "('class Exception : public std::exception {}', 0.06966460237711009)\n", + "('class RowReader {}', 0.06380841209403623)\n", + "('enum class StringComparison : int {}', 0.050441798119527724)\n", + "('RowReader &operator>>(RowReader &reader, bool &rhs)', 0.04895870075815647)\n", + "('std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const', 0.03924446648355976)\n", + "('class ConfigurationParser {}', 0.03735805037229169)\n", + "('bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const', 0.03557813887174236)\n", + "('namespace std {}', 0.033826737287587075)\n", + "('class BlockExitHandler {}', 0.03199281435681512)\n", + "('RowReader &operator>>(RowReader &reader, int &rhs)', 0.02457173934894065)\n", + "('RowReader &operator>>(RowReader &reader, std::string &rhs)', 0.023712590188007344)\n", + "('int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const', 0.021328542904543415)\n", + "('ConfigurationParser::ConfigurationParser(std::istream &inputStream)', 0.009694792229337916)\n", + "('ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)', 0.009694792229337916)\n", + "('int main(int argc, char *argv[])', 0.00921005261787102)\n", + "('class ArgumentException : public Exception {}', 0.007755833783470333)\n", + "('void ReportException(const std::exception &ex, int level)', 0.007421220252219749)\n", + "('template<> struct is_flags : std::true_type {}', 0.007271094172003437)\n", + "('class _RangeToEnumerable {}', 0.0067136448924616805)\n", + "('double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const', 0.006670901038451692)\n", + "('const char *FriendlyNameOf()', 0.004544443409840942)\n", + "('RowReader &operator>>(RowReader &reader, long &rhs)', 0.0033931772802682707)\n", + "('RowReader &operator>>(RowReader &reader, float &rhs)', 0.0033931772802682707)\n", + "('RowReader &operator>>(RowReader &reader, double &rhs)', 0.0033931772802682707)\n", + "('class InvalidCastException : public Exception {}', 0.002423698057334479)\n", + "('inline std::string to_string(const std::pair &value)', 0.0019389584458675833)\n", + "('const char *FriendlyNameOf(const std::type_index &type)', 0.0014542188344006874)\n", + "('const char *FriendlyNameOf(const type_info &type)', 0.0014542188344006874)\n", + "('const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)', 0.0014542188344006874)\n", + "('TEnum operator&(TEnum lhs, TEnum rhs)', 0.0014542188344006874)\n", + "('TEnum operator|(TEnum lhs, TEnum rhs)', 0.0014542188344006874)\n", + "('TStream OpenAndValidate(const TPath arg1)', 0.0006263408888268776)\n", + "('_RangeToEnumerable RangeToEnumerable(const std::pair range)', 0.0004847396114668958)\n", + "('class OperationFailureException : public Exception {}', 0.0004847396114668958)\n", + "('TStream &operator>>(TStream &s, RowReader &reader)', 0.0004847396114668958)\n", + "('_DECLARE_ENUM', 0.0004847396114668958)\n", + "('_DECLARE_ENUM_DEFAULT', 0.0004847396114668958)\n", + "('ANSI_COLOR_RED', 0.0004847396114668958)\n", + "('ANSI_COLOR_GREEN', 0.0004847396114668958)\n", + "('ANSI_COLOR_YELLOW', 0.0004847396114668958)\n", + "('ANSI_COLOR_BLUE', 0.0004847396114668958)\n", + "('ANSI_COLOR_MAGENTA', 0.0004847396114668958)\n", + "('ANSI_COLOR_CYAN', 0.0004847396114668958)\n", + "('ANSI_COLOR_BRIGHT', 0.0004847396114668958)\n", + "('ANSI_COLOR_RESET', 0.0004847396114668958)\n", + "('bool dynamic_kind_of(const TSrc *obj)', 0.0004847396114668958)\n", + "('bool pointer_kind_of(const std::shared_ptr obj)', 0.0004847396114668958)\n", + "('TDest safe_cast(TSrc obj)', 0.0004847396114668958)\n", + "('std::shared_ptr safe_pointer_cast(const std::shared_ptr &obj)', 0.0004847396114668958)\n", + "('std::string StreamStatusToString(const TStream &stream)', 0.0004847396114668958)\n", + "('void ValidateStream(const TStream &stream)', 0.0004847396114668958)\n", + "('_RE_TRACE', 0.0004847396114668958)\n", + "('_SILENCE_STDEXT_ALLOCATORS_DEPRECATION_WARNING', 0.0004847396114668958)\n" + ] + } + ], + "source": [ + "for t in sorted(G.function_devranks(0.85).items(), key=lambda t:t[1], reverse=True):\n", + " print(t)" + ] } ], "metadata": { diff --git a/persper/analytics/lsp_graph_server/__init__.py b/persper/analytics/lsp_graph_server/__init__.py index 47ebb4bc2f1..d8fe8e89375 100644 --- a/persper/analytics/lsp_graph_server/__init__.py +++ b/persper/analytics/lsp_graph_server/__init__.py @@ -1,18 +1,20 @@ import asyncio import logging +import os import subprocess from abc import abstractclassmethod, abstractproperty from datetime import datetime, timedelta from os import path -from pathlib import Path -from typing import List, Union +from pathlib import Path, PurePath +from typing import Dict, List, Tuple, Union from persper.analytics.call_commit_graph import CallCommitGraph from persper.analytics.graph_server import GraphServer from persper.analytics.patch_parser import PatchParser +from .callgraph import CallGraphScope from .callgraph.adapters import CallCommitGraphSynchronizer -from .callgraph.builder import CallGraphBuilder +from .callgraph.builder import CallGraphBuilder, TokenizedDocument from .callgraph.manager import CallGraphManager from .languageclient.lspclient import LspClient @@ -63,6 +65,10 @@ def __init__(self, workspaceRoot: str, self._lastFileWrittenTime: datetime = None self._dumpLogs = dumpLogs self._dumpGraphs = dumpGraphs + self._patchParser = PatchParser() + # [(oldPath, newPath, addedLines, removedLines), ...] + # added/removedLines := [[startLine, modifiedLines], ...] + self._stashedPatches:List[Tuple[ PurePath, PurePath, List[Tuple[int, int]], List[Tuple[int, int]] ]] = [] def __getstate__(self): state = self.__dict__.copy() @@ -70,12 +76,14 @@ def __getstate__(self): state.pop("_lspClient", None) state.pop("_callGraphBuilder", None) state.pop("_callGraphManager", None) + state.pop("_patchParser", None) return state def __setstate__(self, state): self.__dict__.update(state) if not self._workspaceRoot.exists(): self._workspaceRoot.touch() + self._patchParser = PatchParser() def register_commit(self, hexsha, author_name, author_email, commit_message): self._ccgraph.add_commit(hexsha, author_name, author_email, commit_message) @@ -84,18 +92,96 @@ async def update_graph(self, old_filename: str, old_src: str, new_filename: str, oldPath = self._workspaceRoot.joinpath(old_filename).resolve() if old_filename else None newPath = self._workspaceRoot.joinpath(new_filename).resolve() if new_filename else None assert oldPath or newPath + if newPath is None: + # The file has been deleted + # We need to scan it before it's gone, instead of in end_commit + self._markWholeDocumentAsChanged(await self._callGraphBuilder.getTokenizedDocument(oldPath)) + elif oldPath is None: + # The file has been added + self._stashedPatches.append((oldPath, newPath, None, None)) + else: + added, removed = self._patchParser.parse(patch.decode('utf-8', 'replace')) + # calculate removed lines + if removed: + # we can have removed lines only when we have old file + oldDoc:TokenizedDocument = await self._callGraphBuilder.getTokenizedDocument(oldPath) + for start, count in removed: + for i in range(start - 1, start + count - 1): + scope = oldDoc.scopeAt(i, 0) + if scope: + self._safeUpdateNodeHistory(scope.name, 1) + self._stashedPatches.append((oldPath, newPath, added, None)) + + # perform file operations if oldPath and oldPath != newPath: + # The file has been moved/deleted await self._callGraphBuilder.deleteFile(oldPath) self._invalidatedFiles.add(oldPath) if newPath: + # The file has been created/modified await self._callGraphBuilder.modifyFile(newPath, new_src) self._invalidatedFiles.add(newPath) self._lastFileWrittenTime = datetime.now() + def _safeUpdateNodeHistory(self, name:str, changeOfLines:int): + if name not in self._ccgraph.nodes(): + self._ccgraph.add_node(name) + self._ccgraph.update_node_history(name, changeOfLines) + + def _markWholeDocumentAsChanged(self, doc:TokenizedDocument): + parentScopes = [] + # print("_markWholeDocumentAsChanged: ", doc.fileName) + for scope in doc.scopes: + while parentScopes and parentScopes[-1][0].endPos <= scope.startPos: + s, c = parentScopes.pop() + self._safeUpdateNodeHistory(s.name, c) + thisScopeLines = scope.endPos.line - scope.startPos.line + 1 + if parentScopes: + # Subtract LOC from innermost scope to eliminate dups + innermostScope = parentScopes[-1] + s, c = innermostScope + assert s.startPos <= scope.startPos and s.endPos >= scope.endPos, \ + "`scope` should be inside parent scope: {0}. parentScopes: {1}".format(s, parentScopes) + c -= thisScopeLines + # If there are more than 1 scope on the same line, + # we will count in 1 line for each scope + if s.startPos.line == scope.startPos.line: + c += 1 + if s.startPos.line < s.endPos.line == scope.endPos.line: + c += 1 + assert c >= 0, \ + "parentScope's LOC change is negative: {0}. parentScopes: {1}".format(s, parentScopes) + innermostScope[1] = c + parentScopes.append([scope, thisScopeLines]) + while parentScopes and parentScopes[-1][0].endPos <= scope.startPos: + s, c = parentScopes.pop() + self._safeUpdateNodeHistory(s.name, c) + async def end_commit(self, hexsha): + # calculate lines of change in functions + # update edges await self.updateGraph() if self._dumpGraphs: self._callGraph.dumpTo("Graph-" + hexsha + ".txt") + + # calculate added lines + for oldPath, newPath, added, _ in self._stashedPatches: + if not newPath: + continue + if oldPath and not added: + continue + newDoc:TokenizedDocument = await self._callGraphBuilder.getTokenizedDocument(newPath) + if not oldPath: + # file has been added + self._markWholeDocumentAsChanged(newDoc) + else: + assert added + for start, count in added: + for i in range(start - 1, start + count - 1): + scope = newDoc.scopeAt(i, 0) + if scope: + self._safeUpdateNodeHistory(scope.name, 1) + self._stashedPatches.clear() _logger.info("End commit: %s", hexsha) # ensure the files in the next commit has a different timestamp as this commit. if datetime.now() - self._lastFileWrittenTime < timedelta(seconds=1): @@ -131,11 +217,18 @@ async def startLspClient(self): * self._callGraphBuilder * self._callGraphManager """ - self._lspServerProc = subprocess.Popen( - self._languageServerCommand, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - shell=True) + if os.name == "nt": + self._lspServerProc = subprocess.Popen( + self._languageServerCommand, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + creationflags=subprocess.CREATE_NEW_CONSOLE) + else: + self._lspServerProc = subprocess.Popen( + self._languageServerCommand, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + shell=True) async def stopLspClient(self): """ diff --git a/persper/analytics/lsp_graph_server/callgraph/adapters.py b/persper/analytics/lsp_graph_server/callgraph/adapters.py index f45ec69108a..da181b57841 100644 --- a/persper/analytics/lsp_graph_server/callgraph/adapters.py +++ b/persper/analytics/lsp_graph_server/callgraph/adapters.py @@ -1,6 +1,11 @@ +import logging +from pathlib import Path, PurePath +from typing import Iterable + from persper.analytics.call_commit_graph import CallCommitGraph from . import CallGraph, CallGraphBranch +_logger = logging.getLogger(__name__) class CallCommitGraphSynchronizer(CallGraph): @@ -9,15 +14,21 @@ def __init__(self, callCommitGraph: CallCommitGraph): self._callCommitGraph = callCommitGraph def add(self, branch: CallGraphBranch): - super().add(branch) if branch.sourceScope == branch.definitionScope: # e.g. variable referernces. return + if branch.sourceScope is None or branch.definitionScope is None: + _logger.debug("Ignored branch with None scope: %s", branch) + return # Use scope full name as identifier. - self._callCommitGraph.add_node(branch.sourceScope.name) - self._callCommitGraph.add_node(branch.definitionScope.name) + if branch.sourceScope.name not in self._callCommitGraph.nodes().data(): + self._callCommitGraph.add_node(branch.sourceScope.name) + if branch.definitionScope.name not in self._callCommitGraph.nodes().data(): + self._callCommitGraph.add_node(branch.definitionScope.name) self._callCommitGraph.add_edge(branch.sourceScope.name, branch.definitionScope.name) + def removeBySourceFiles(self, fileNames: Iterable[PurePath]): + pass + def clear(self): - super().clear() self._callCommitGraph.reset() diff --git a/persper/analytics/lsp_graph_server/callgraph/builder.py b/persper/analytics/lsp_graph_server/callgraph/builder.py index 012c268a781..3c809958387 100644 --- a/persper/analytics/lsp_graph_server/callgraph/builder.py +++ b/persper/analytics/lsp_graph_server/callgraph/builder.py @@ -89,8 +89,9 @@ def PopulateSymbols(symbols): _logger.error("Invalid DocumentSymbol in %s: %s", fileName, s) PopulateSymbols(documentSymbols) - # put the scopes in document order of start positions, then by the document order of their end positions - self._scopes.sort(key=lambda sc: (sc.startPos, sc.endPos)) + # put the scopes in document order of start positions, then by the reversed document order of their end positions + # so that we can find the smallest scope by one traverse along the scope list. + self._scopes.sort(key=lambda sc: (sc.startPos.toTuple(), (-sc.endPos.line, -sc.endPos.character))) NOT_EXISTS = object() for t in tokens: t: Token @@ -164,8 +165,8 @@ def scopeAt(self, line: int, character: int) -> CallGraphScope: if scope.startPos > pos: break if pos < scope.endPos: - if lastScope is None or lastScope.startPos <= scope.startPos <= lastScope.endPos: - lastScope = scope + assert lastScope is None or lastScope.startPos <= scope.startPos <= lastScope.endPos + lastScope = scope return lastScope diff --git a/persper/analytics/lsp_graph_server/callgraph/manager.py b/persper/analytics/lsp_graph_server/callgraph/manager.py index efc1a5c59b6..b4b51a0296a 100644 --- a/persper/analytics/lsp_graph_server/callgraph/manager.py +++ b/persper/analytics/lsp_graph_server/callgraph/manager.py @@ -39,15 +39,14 @@ async def buildGraph(self, fileNames: Union[str, Iterable[str]] = None, globPatt globPattern: `str` or `str[]` containing the glob pattern of the files from which to build the call graph branches. """ - oldBranchesCount = len(self._graph.items) - branchCounter = oldBranchesCount + branchCounter = 0 # with dups fileCounter = 0 await self._builder.waitForFileSystem() def pushBranch(branch): nonlocal branchCounter try: self._graph.add(branch) - branchCounter = len(self._graph.items) - oldBranchesCount + branchCounter += 1 if branchCounter % 2000 == 0: _logger.info("Already added %d branches.", branchCounter) except ValueError as ex: @@ -67,9 +66,9 @@ def pushBranch(branch): async for b in self._builder.buildCallGraphInFiles(globPattern): pushBranch(b) if fileNames and not globPattern: - _logger.info("Added %d branches (-> %d) from %d files.", branchCounter, len(self._graph.items), fileCounter) + _logger.info("Added %d branches from %d files.", branchCounter, fileCounter) else: - _logger.info("Added %d branches (-> %d).", len(self._graph.items), branchCounter) + _logger.info("Added %d branches.", branchCounter) def removeByFiles(self, fileNames: Iterable[str]) -> Iterable[Path]: """ diff --git a/persper/analytics/lsp_graph_server/jsonrpcutils.py b/persper/analytics/lsp_graph_server/jsonrpcutils.py index bce06b62123..c611654b64b 100644 --- a/persper/analytics/lsp_graph_server/jsonrpcutils.py +++ b/persper/analytics/lsp_graph_server/jsonrpcutils.py @@ -11,6 +11,8 @@ def __init__(self, fileName): def logTX(self, message: dict): self._file.write("{0} < {1}\n".format(datetime.now(), json.dumps(message))) + if message.get("method", None) == "shutdown": + self._file.flush() def logRX(self, message: dict): self._file.write("{0} > {1}\n".format(datetime.now(), json.dumps(message))) From 7ee6cbfc1473d203f467fcea5339cd1b12657bae Mon Sep 17 00:00:00 2001 From: xinyan Date: Tue, 22 Jan 2019 23:41:28 +0800 Subject: [PATCH 24/84] Discard all non-function definitions. Separate vertex building from edge building. --- .../analytics/lsp_graph_server/__init__.py | 14 +++- .../lsp_graph_server/callgraph/adapters.py | 7 +- .../lsp_graph_server/callgraph/builder.py | 64 ++++++++++++------- persper/analytics/lsp_graph_server/ccls.py | 1 + 4 files changed, 57 insertions(+), 29 deletions(-) diff --git a/persper/analytics/lsp_graph_server/__init__.py b/persper/analytics/lsp_graph_server/__init__.py index d8fe8e89375..cd36fb3a13d 100644 --- a/persper/analytics/lsp_graph_server/__init__.py +++ b/persper/analytics/lsp_graph_server/__init__.py @@ -159,7 +159,7 @@ def _markWholeDocumentAsChanged(self, doc:TokenizedDocument): async def end_commit(self, hexsha): # calculate lines of change in functions - # update edges + # update vetices & edges await self.updateGraph() if self._dumpGraphs: self._callGraph.dumpTo("Graph-" + hexsha + ".txt") @@ -265,5 +265,17 @@ async def updateGraph(self): return affectedFiles = self._callGraphManager.removeByFiles(self._invalidatedFiles) _logger.info("Invalidated %d files, affected %d files.", len(self._invalidatedFiles), len(affectedFiles)) + await self._callGraphBuilder.waitForFileSystem() + # update vertices + # Use scope full name as identifier. + for path in affectedFiles: + path:Path + if not path.exists(): + continue + for scope in await self._callGraphBuilder.enumScopesInFile(str(path)): + scope:CallGraphScope + if scope.name not in self._ccgraph.nodes().data(): + self._ccgraph.add_node(scope.name) + # update edges await self._callGraphManager.buildGraph(fileNames=affectedFiles) self._invalidatedFiles.clear() diff --git a/persper/analytics/lsp_graph_server/callgraph/adapters.py b/persper/analytics/lsp_graph_server/callgraph/adapters.py index da181b57841..b0716c07206 100644 --- a/persper/analytics/lsp_graph_server/callgraph/adapters.py +++ b/persper/analytics/lsp_graph_server/callgraph/adapters.py @@ -20,11 +20,8 @@ def add(self, branch: CallGraphBranch): if branch.sourceScope is None or branch.definitionScope is None: _logger.debug("Ignored branch with None scope: %s", branch) return - # Use scope full name as identifier. - if branch.sourceScope.name not in self._callCommitGraph.nodes().data(): - self._callCommitGraph.add_node(branch.sourceScope.name) - if branch.definitionScope.name not in self._callCommitGraph.nodes().data(): - self._callCommitGraph.add_node(branch.definitionScope.name) + # assuming the referenced edges has already been registered, + # or there will be Error self._callCommitGraph.add_edge(branch.sourceScope.name, branch.definitionScope.name) def removeBySourceFiles(self, fileNames: Iterable[PurePath]): diff --git a/persper/analytics/lsp_graph_server/callgraph/builder.py b/persper/analytics/lsp_graph_server/callgraph/builder.py index 3c809958387..f790870bdf8 100644 --- a/persper/analytics/lsp_graph_server/callgraph/builder.py +++ b/persper/analytics/lsp_graph_server/callgraph/builder.py @@ -31,26 +31,6 @@ } -_SCOPE_SYMBOL_KINDS = { - # SymbolKind.Unknown, - SymbolKind.Class, - SymbolKind.Constructor, - SymbolKind.Enum, - SymbolKind.File, - SymbolKind.Function, - SymbolKind.Interface, - SymbolKind.Macro, - SymbolKind.Method, - SymbolKind.Module, - SymbolKind.Namespace, - SymbolKind.Operator, - SymbolKind.Package, - SymbolKind.Property, - SymbolKind.StaticMethod, - SymbolKind.Struct -} - - class TokenizedDocument: """ Represents a fully tokenized document that supports finding a symbol or scope from @@ -58,7 +38,9 @@ class TokenizedDocument: """ def __init__(self, tokens: Iterable[Token], - documentSymbols: Iterable[Union[DocumentSymbol, SymbolInformation]], fileName: PurePath): + documentSymbols: Iterable[Union[DocumentSymbol, SymbolInformation]], + fileName: PurePath, + documentSymbolFilter): self._tokens = [] self._scopes = [] self._fileName = fileName @@ -72,7 +54,12 @@ def __init__(self, tokens: Iterable[Token], def PopulateSymbols(symbols): for s in symbols: - if s.kind not in _SCOPE_SYMBOL_KINDS: + filterResult = documentSymbolFilter(s) + if filterResult == None: + continue + if filterResult == False: + if isinstance(s, SymbolInformation): + PopulateSymbols(s.children) continue if isinstance(s, DocumentSymbol): # We assume selectionRange is exactly the range of symbol name @@ -178,6 +165,7 @@ class CallGraphBuilder(ABC): def __init__(self, lspClient: LspClient): if not isinstance(lspClient, LspClient): raise TypeError("lspClient should be an instance of LspClient.") + # status self._lspClient = lspClient self._tokenizedDocCache: Dict[str, TokenizedDocument] = {} self._workspaceFilePatterns: List[str] = None @@ -256,7 +244,8 @@ def tokenGenerator(): return if self.filterToken(tk): yield tk - doc = TokenizedDocument(tokenGenerator(), documentSymbols, path) + doc = TokenizedDocument(tokenGenerator(), documentSymbols, path, + documentSymbolFilter=lambda s: self.filterSymbol(s)) self._tokenizedDocCache[path] = doc return doc @@ -282,6 +271,26 @@ def filterToken(self, token: Token) -> bool: """ raise NotImplementedError + def filterSymbol(self, symbol: Union[DocumentSymbol, SymbolInformation]) -> bool: + """ + When overridden in the derived class, determines whether the given symbol + should be treated as a target of goto definition / scope / call graph vertex. + Returns + True symbol should be included and its children, if available, will pass filterSymbol + False symbol should be excluded, while its children will pass filterSymbol + None symbol and its children will be excluded + """ + return symbol.kind in { + SymbolKind.Constructor, + SymbolKind.Enum, + SymbolKind.Function, + SymbolKind.Macro, + SymbolKind.Method, + SymbolKind.Operator, + SymbolKind.Property, + SymbolKind.StaticMethod, + } + def filterFile(self, fileName: str): if self._workspaceFilePatternsRegex: return any(p.match(str(fileName)) for p in self._workspaceFilePatternsRegex) @@ -386,6 +395,15 @@ async def buildCallGraphInFile(self, fileName: str) -> Iterable[CallGraphBranch] await self.closeDocument(textDoc.uri) _logger.info("Yielded %d branches.", counter) + async def enumScopesInFile(self, fileName: str) -> Iterable[CallGraphScope]: + """ + Enumerate all the "scope"s in the specified file. + Scopes are vertices of the call graph. + """ + srcPath = self.pathFromUri(fileName) + thisDoc: TokenizedDocument = await self.getTokenizedDocument(srcPath) + return thisDoc.scopes + async def deleteFile(self, fileName: str): path = Path(fileName).resolve() self.removeDocumentCache(path) diff --git a/persper/analytics/lsp_graph_server/ccls.py b/persper/analytics/lsp_graph_server/ccls.py index b0c945904f9..29a41e8f315 100644 --- a/persper/analytics/lsp_graph_server/ccls.py +++ b/persper/analytics/lsp_graph_server/ccls.py @@ -161,6 +161,7 @@ def __init__(self, workspaceRoot: str, cacheRoot: str = None, dumpGraphs: bool = False): super().__init__(workspaceRoot, languageServerCommand=languageServerCommand, dumpLogs=dumpLogs, dumpGraphs=dumpGraphs) self._cacheRoot = Path(cacheRoot).resolve() if cacheRoot else self._workspaceRoot.joinpath(".ccls-cache") + self._c_requireScopeDefinitionMatch = True async def startLspClient(self): await super().startLspClient() From f7e8379f792f2b8654d0b6509f0c0b9d983abf8e Mon Sep 17 00:00:00 2001 From: xinyan Date: Sat, 26 Jan 2019 12:00:05 +0800 Subject: [PATCH 25/84] Draft for analyzer testing. Make Analyzer observable, so we will be able to probe into it during testing. Still having trouble on graph comparison. is_isomorphic matches isolated nodes randomly, rather than by their id. Eliminate deprecation warning of re in wildcards.py. --- persper/analytics/analyzer.py | 39 +++++- .../analytics/lsp_graph_server/wildcards.py | 4 +- test/test_analytics/test_analyzer_lsp_ccls.py | 117 ++++++++++++++++++ 3 files changed, 157 insertions(+), 3 deletions(-) create mode 100644 test/test_analytics/test_analyzer_lsp_ccls.py diff --git a/persper/analytics/analyzer.py b/persper/analytics/analyzer.py index fed7dde2175..2ed1860a8f2 100644 --- a/persper/analytics/analyzer.py +++ b/persper/analytics/analyzer.py @@ -4,7 +4,8 @@ import asyncio from persper.analytics.git_tools import get_contents, _diff_with_first_parent from persper.analytics.iterator import RepoIterator - +from abc import ABC +from git import Commit def print_overview(commits, branch_commits): print('----- Overview ------') @@ -62,6 +63,15 @@ def __init__(self, repo_path, graph_server): self._graph_server = graph_server self._ri = RepoIterator(repo_path) self._ccgraph = None + self._observer:AnalyzerObserver = emptyAnalyzerObserver + + @property + def observer(self): + return self._observer + + @observer.setter + def observer(self, value): + self._observer = value or emptyAnalyzerObserver async def analyze(self, rev=None, from_beginning=False, @@ -94,13 +104,17 @@ async def analyze(self, rev=None, for idx, commit in enumerate(reversed(commits), 1): phase = 'main' print_commit_info(phase, idx, commit, start_time, verbose) + self._observer.onBeforeCommit(self, idx, commit, True) await self.analyze_master_commit(commit) + self._observer.onAfterCommit(self, idx, commit, True) self.autosave(phase, idx, checkpoint_interval) for idx, commit in enumerate(branch_commits, 1): phase = 'branch' print_commit_info(phase, idx, commit, start_time, verbose) + self._observer.onBeforeCommit(self, idx, commit, False) await self.analyze_branch_commit(commit) + self._observer.onAfterCommit(self, idx, commit, False) self.autosave(phase, idx, checkpoint_interval) self.autosave('finished', 0, 1) @@ -168,3 +182,26 @@ def autosave(self, phase, idx, checkpoint_interval): repo_name = os.path.basename(self._ri.repo_path.rstrip('/')) fname = repo_name + '-' + phase + '-' + str(idx) + '.pickle' self.save(fname) + + def __getstate__(self): + state = self.__dict__.copy() + state.pop("_observer", None) + return state + + def __setstate__(self, state): + self.__dict__.update(state) + +class AnalyzerObserver(ABC): + def __init__(self): + pass + + def onBeforeCommit(self, analyzer:Analyzer, index:int, commit:Commit, isMaster:bool): + pass + + def onAfterCommit(self, analyzer:Analyzer, index:int, commit:Commit, isMaster:bool): + pass + +class _EmptyAnalyzerObserverType(AnalyzerObserver): + pass + +emptyAnalyzerObserver = _EmptyAnalyzerObserverType() diff --git a/persper/analytics/lsp_graph_server/wildcards.py b/persper/analytics/lsp_graph_server/wildcards.py index 542d0a76d67..7c0261c3a8e 100644 --- a/persper/analytics/lsp_graph_server/wildcards.py +++ b/persper/analytics/lsp_graph_server/wildcards.py @@ -21,7 +21,7 @@ def translate(pat): if i < n and pat[i] == os.sep: i = i + 1 else: - res = res + '[^\/]+' + res = res + r'[^\\/]+' elif c == '?': res = res + '.' elif c == '[': @@ -44,4 +44,4 @@ def translate(pat): res = '%s[%s]' % (res, stuff) else: res = res + re.escape(c) - return res + '$(?ms)' + return '(?ms)' + res + '$' diff --git a/test/test_analytics/test_analyzer_lsp_ccls.py b/test/test_analytics/test_analyzer_lsp_ccls.py new file mode 100644 index 00000000000..4c22a32648f --- /dev/null +++ b/test/test_analytics/test_analyzer_lsp_ccls.py @@ -0,0 +1,117 @@ +import json +import logging +import os +import subprocess +from pathlib import Path +from tempfile import mkdtemp + +import networkx.readwrite.json_graph +import pytest +from git import Commit +from networkx import Graph +from networkx.algorithms.isomorphism import is_isomorphic + +from persper.analytics.analyzer import Analyzer, AnalyzerObserver +from persper.analytics.call_commit_graph import CallCommitGraph +from persper.analytics.lsp_graph_server.ccls import CclsGraphServer +from persper.util.path import root_path + +_logger = logging.getLogger() + + +def commitGraphEquals(g1: Graph, g2: Graph): + def nodeComparer(n1: dict, n2: dict): + if n1 == n2: + return True + _logger.warn(str.format("Node mismatch: n1 = {0}, n2 = {1}", n1, n2)) + return False + + def edgeComparer(e1: dict, e2: dict): + if e1 == e2: + return True + _logger.warn(str.format("Edge mismatch: e1 = {0}, e2 = {1}", e1, e2)) + return False + return is_isomorphic(g1, g2, nodeComparer, edgeComparer) + + +async def createFeatureBranchAnalyzer(): + # build the repo first if not exists yet + repo_path = os.path.join(root_path, 'repos/test_feature_branch') + script_path = os.path.join(root_path, 'tools/repo_creater/create_repo.py') + test_src_path = os.path.join(root_path, 'test/test_feature_branch') + if not os.path.isdir(repo_path): + cmd = '{} {}'.format(script_path, test_src_path) + subprocess.call(cmd, shell=True) + + # create workspace root folder + CCLS_COMMAND = os.path.join(root_path, "bin/ccls") + DUMP_LOGS = False + workspaceRoot = mkdtemp() + print("Workspace root: ", workspaceRoot) + graphServer = CclsGraphServer(workspaceRoot, cacheRoot="./.ccls-cache", + languageServerCommand=CCLS_COMMAND + + (" -log-file=ccls.log" if DUMP_LOGS else ""), + dumpLogs=DUMP_LOGS) + print(repo_path) + analyzer = Analyzer(repo_path, graphServer) + graphServer.reset_graph() + return graphServer, analyzer + + +class TestAnalyzerObserver(AnalyzerObserver): + def __init__(self, graphBaselineDumpPath: str = None, graphTestDumpPath: str = None, dumpOnlyOnError: bool = True): + super().__init__() + if graphBaselineDumpPath: + self._baselinePath = Path(graphBaselineDumpPath).resolve() + self._baselinePath.mkdir(parents=True, exist_ok=True) + else: + self._baselinePath = None + if graphTestDumpPath: + self._dumpPath = Path(graphTestDumpPath).resolve() + self._dumpPath.mkdir(parents=True, exist_ok=True) + else: + self._dumpPath = None + self._dumpOnlyOnError = dumpOnlyOnError + + def onAfterCommit(self, analyzer: Analyzer, index: int, commit: Commit, isMaster: bool): + graph: CallCommitGraph = analyzer.get_graph() + + def dumpGraph(warnIfNotAvailable: bool): + if not self._dumpPath: + if warnIfNotAvailable: + _logger.warning( + "Cannot dump call commit graph because no dump path has been specified. Commit %s: %s.", commit.hexsha, commit.message) + return False + data = networkx.readwrite.json_graph.node_link_data(graph._digraph) + graphPath = self._dumpPath.joinpath( + commit.message.strip() + ".g.json") + with open(graphPath, "wt") as f: + json.dump(data, f, sort_keys=True, indent=4) + return True + # check baseline for regression + if self._baselinePath: + try: + graphPath = self._baselinePath.joinpath( + commit.message.strip() + ".g.json") + data = None + with open(graphPath, "rt") as f: + data = json.load(f) + baseline = networkx.readwrite.json_graph.node_link_graph(data) + assert commitGraphEquals(baseline, graph._digraph), str.format( + "Graph not equvalent. Commit: {0}: {1}.", commit.hexsha, commit.message) + except: + dumpGraph(True) + raise + if not self._dumpOnlyOnError: + dumpGraph(False) + + +@pytest.mark.asyncio +async def testFeatureBranch(): + graphServer, analyzer = await createFeatureBranchAnalyzer() + graphServer: CclsGraphServer + analyzer: Analyzer + async with graphServer: + analyzer.observer = TestAnalyzerObserver( + "./feature_branch", "./feature_branch/test") + await analyzer.analyze(from_beginning=True) From 2f091bd9c755df75cd308b89f8181566b9a5f3bd Mon Sep 17 00:00:00 2001 From: xinyan Date: Sun, 27 Jan 2019 00:37:38 +0800 Subject: [PATCH 26/84] Use homemade graph comparison in test_analyzer. Fixed the bug causing the modification history for the last functions to be neglected in the created/deleted files. --- .../analytics/lsp_graph_server/__init__.py | 3 +- test/test_analytics/test_analyzer_lsp_ccls.py | 59 +++++++++++++++---- 2 files changed, 51 insertions(+), 11 deletions(-) diff --git a/persper/analytics/lsp_graph_server/__init__.py b/persper/analytics/lsp_graph_server/__init__.py index cd36fb3a13d..9bc42c5a05f 100644 --- a/persper/analytics/lsp_graph_server/__init__.py +++ b/persper/analytics/lsp_graph_server/__init__.py @@ -133,6 +133,7 @@ def _markWholeDocumentAsChanged(self, doc:TokenizedDocument): # print("_markWholeDocumentAsChanged: ", doc.fileName) for scope in doc.scopes: while parentScopes and parentScopes[-1][0].endPos <= scope.startPos: + # scope is out of parentScope, then the changed line count for parentScope is decided s, c = parentScopes.pop() self._safeUpdateNodeHistory(s.name, c) thisScopeLines = scope.endPos.line - scope.startPos.line + 1 @@ -153,7 +154,7 @@ def _markWholeDocumentAsChanged(self, doc:TokenizedDocument): "parentScope's LOC change is negative: {0}. parentScopes: {1}".format(s, parentScopes) innermostScope[1] = c parentScopes.append([scope, thisScopeLines]) - while parentScopes and parentScopes[-1][0].endPos <= scope.startPos: + while parentScopes: s, c = parentScopes.pop() self._safeUpdateNodeHistory(s.name, c) diff --git a/test/test_analytics/test_analyzer_lsp_ccls.py b/test/test_analytics/test_analyzer_lsp_ccls.py index 4c22a32648f..8e5f27792ec 100644 --- a/test/test_analytics/test_analyzer_lsp_ccls.py +++ b/test/test_analytics/test_analyzer_lsp_ccls.py @@ -19,6 +19,46 @@ _logger = logging.getLogger() +def formatEdgeId(u: str, v: str): + return u + "|->|" + v + + +def graphToDict(ccg: CallCommitGraph): + result = { + "nodes": dict(ccg.nodes(data=True)), + "edges": dict(((formatEdgeId(u, v), data) for (u, v, data) in ccg.edges(data=True))) + } + return result + +def fixGraphDict(graphData: dict): + if "nodes" in graphData: + for id, attr in graphData["nodes"].items(): + if "history" in attr: + attr["history"] = dict((int(k), v) for k, v in attr["history"].items()) + return graphData + +def assertGraphMatches(baseline: dict, ccg: CallCommitGraph): + baselineNodeIds = set(baseline["nodes"].keys()) + for id, attr in ccg.nodes(data=True): + baselineAttr = baseline["nodes"].get(id, None) + assert baselineAttr != None, str.format("Extra node: {0}.", id) + assert baselineAttr == attr, str.format( + "Node attribute mismatch: {0}. Baseline: {1}; Test: {2}.", id, baselineAttr, attr) + baselineNodeIds.remove(id) + assert not baselineNodeIds, str.format( + "Node(s) missing: %s.", baselineNodeIds) + baselineEdgeIds = set(baseline["edges"].keys()) + for u, v, attr in ccg.edges(data=True): + id = formatEdgeId(u, v) + baselineAttr = baseline["edges"].get(id, None) + assert baselineAttr != None, str.format("Extra branch: {0}.", id) + assert baselineAttr == attr, str.format( + "Branch attribute mismatch: {0}. Baseline: {1}; Test: {2}.", id, baselineAttr, attr) + baselineEdgeIds.remove(id) + assert not baselineEdgeIds, str.format( + "Branch(es) missing: {0}.", baselineEdgeIds) + + def commitGraphEquals(g1: Graph, g2: Graph): def nodeComparer(n1: dict, n2: dict): if n1 == n2: @@ -59,11 +99,10 @@ async def createFeatureBranchAnalyzer(): class TestAnalyzerObserver(AnalyzerObserver): - def __init__(self, graphBaselineDumpPath: str = None, graphTestDumpPath: str = None, dumpOnlyOnError: bool = True): + def __init__(self, graphBaselineDumpPath: str = None, graphTestDumpPath: str = None, dumpOnlyOnError: bool = None): super().__init__() if graphBaselineDumpPath: self._baselinePath = Path(graphBaselineDumpPath).resolve() - self._baselinePath.mkdir(parents=True, exist_ok=True) else: self._baselinePath = None if graphTestDumpPath: @@ -71,7 +110,7 @@ def __init__(self, graphBaselineDumpPath: str = None, graphTestDumpPath: str = N self._dumpPath.mkdir(parents=True, exist_ok=True) else: self._dumpPath = None - self._dumpOnlyOnError = dumpOnlyOnError + self._dumpOnlyOnError = graphBaselineDumpPath != None if dumpOnlyOnError == None else dumpOnlyOnError def onAfterCommit(self, analyzer: Analyzer, index: int, commit: Commit, isMaster: bool): graph: CallCommitGraph = analyzer.get_graph() @@ -82,7 +121,7 @@ def dumpGraph(warnIfNotAvailable: bool): _logger.warning( "Cannot dump call commit graph because no dump path has been specified. Commit %s: %s.", commit.hexsha, commit.message) return False - data = networkx.readwrite.json_graph.node_link_data(graph._digraph) + data = graphToDict(graph) graphPath = self._dumpPath.joinpath( commit.message.strip() + ".g.json") with open(graphPath, "wt") as f: @@ -93,13 +132,13 @@ def dumpGraph(warnIfNotAvailable: bool): try: graphPath = self._baselinePath.joinpath( commit.message.strip() + ".g.json") - data = None + baselineData: dict = None with open(graphPath, "rt") as f: - data = json.load(f) - baseline = networkx.readwrite.json_graph.node_link_graph(data) - assert commitGraphEquals(baseline, graph._digraph), str.format( - "Graph not equvalent. Commit: {0}: {1}.", commit.hexsha, commit.message) + baselineData = fixGraphDict(json.load(f)) + assertGraphMatches(baselineData, graph) except: + _logger.error("Failed on commit %s: %s.", + commit.hexsha, commit.message) dumpGraph(True) raise if not self._dumpOnlyOnError: @@ -113,5 +152,5 @@ async def testFeatureBranch(): analyzer: Analyzer async with graphServer: analyzer.observer = TestAnalyzerObserver( - "./feature_branch", "./feature_branch/test") + "./baseline/feature_branch", "./testdump/feature_branch") await analyzer.analyze(from_beginning=True) From e3233258236a4e2610fc252fa5280c3aeedd1ec8 Mon Sep 17 00:00:00 2001 From: xinyan Date: Sun, 27 Jan 2019 15:50:19 +0800 Subject: [PATCH 27/84] Fixed node history couting problem. Rewrite a unified diff patching parser. --- persper/analytics/another_patch_parser.py | 65 +++++++++++++++++++ .../analytics/lsp_graph_server/__init__.py | 16 ++--- test/test_analytics/test_analyzer_lsp_ccls.py | 14 ++-- 3 files changed, 82 insertions(+), 13 deletions(-) create mode 100644 persper/analytics/another_patch_parser.py diff --git a/persper/analytics/another_patch_parser.py b/persper/analytics/another_patch_parser.py new file mode 100644 index 00000000000..cb735c164d3 --- /dev/null +++ b/persper/analytics/another_patch_parser.py @@ -0,0 +1,65 @@ +import re + +_hunkHeader = re.compile( + r"^@@\s*\-(?P\d+),\s*\d+\s+\+(?P\d+),\s*\d+\s*@@") + + +def parseUnifiedDiff(diffContent: str, lineNumberOffset: int = 0): + """ + Parse unified diff content, and return the ranges of added and removed lines. + Returns + (addedRanges, removedRanges), where + addedRanges: a list of line ranges [[startLine1, endLine1], ...] added into the new file, + using the 1-based line numbers in the new file. + removedRanges: a list of line ranges [[startLine1, endLine1], ...] removed from the old file, + using the 1-based line numbers in the old file. + """ + leftLine: int = None + rightLine: int = None + addedRanges = [] + removedRanges = [] + lastAddedRange: list = None + lastRemovedRange: list = None + isInPrologue = True + for diffLine in diffContent.rstrip("\r\n\v").split("\n"): + if diffLine.startswith("@@"): + match = _hunkHeader.search(diffLine) + if not match: + if isInPrologue: + continue + raise ValueError(str.format( + "Invalid diff line: {0}.", diffLine)) + leftLine = int(match.group("LN")) + lineNumberOffset + rightLine = int(match.group("RN")) + lineNumberOffset + lastAddedRange = lastRemovedRange = None + isInPrologue = False + elif diffLine.startswith(" "): + assert leftLine != None and rightLine != None + leftLine += 1 + rightLine += 1 + lastAddedRange = lastRemovedRange = None + elif diffLine.startswith("-"): + assert leftLine != None and rightLine != None + if lastRemovedRange: + lastRemovedRange[1] = leftLine + else: + lastRemovedRange = [leftLine, leftLine] + removedRanges.append(lastRemovedRange) + leftLine += 1 + elif diffLine.startswith("+"): + assert leftLine != None and rightLine != None + if lastAddedRange: + lastAddedRange[1] = rightLine + else: + lastAddedRange = [rightLine, rightLine] + addedRanges.append(lastAddedRange) + rightLine += 1 + elif diffLine.startswith("\\"): + # \ No newline at end of file + # Do nothing. We ignore blank lines. + pass + else: + if isInPrologue: + continue + raise ValueError(str.format("Invalid diff line: {0}.", diffLine)) + return addedRanges, removedRanges diff --git a/persper/analytics/lsp_graph_server/__init__.py b/persper/analytics/lsp_graph_server/__init__.py index 9bc42c5a05f..6d4745559d5 100644 --- a/persper/analytics/lsp_graph_server/__init__.py +++ b/persper/analytics/lsp_graph_server/__init__.py @@ -10,7 +10,7 @@ from persper.analytics.call_commit_graph import CallCommitGraph from persper.analytics.graph_server import GraphServer -from persper.analytics.patch_parser import PatchParser +from persper.analytics.another_patch_parser import parseUnifiedDiff from .callgraph import CallGraphScope from .callgraph.adapters import CallCommitGraphSynchronizer @@ -65,7 +65,6 @@ def __init__(self, workspaceRoot: str, self._lastFileWrittenTime: datetime = None self._dumpLogs = dumpLogs self._dumpGraphs = dumpGraphs - self._patchParser = PatchParser() # [(oldPath, newPath, addedLines, removedLines), ...] # added/removedLines := [[startLine, modifiedLines], ...] self._stashedPatches:List[Tuple[ PurePath, PurePath, List[Tuple[int, int]], List[Tuple[int, int]] ]] = [] @@ -76,14 +75,12 @@ def __getstate__(self): state.pop("_lspClient", None) state.pop("_callGraphBuilder", None) state.pop("_callGraphManager", None) - state.pop("_patchParser", None) return state def __setstate__(self, state): self.__dict__.update(state) if not self._workspaceRoot.exists(): self._workspaceRoot.touch() - self._patchParser = PatchParser() def register_commit(self, hexsha, author_name, author_email, commit_message): self._ccgraph.add_commit(hexsha, author_name, author_email, commit_message) @@ -100,13 +97,14 @@ async def update_graph(self, old_filename: str, old_src: str, new_filename: str, # The file has been added self._stashedPatches.append((oldPath, newPath, None, None)) else: - added, removed = self._patchParser.parse(patch.decode('utf-8', 'replace')) + added, removed = parseUnifiedDiff(patch.decode('utf-8', 'replace')) # calculate removed lines if removed: # we can have removed lines only when we have old file oldDoc:TokenizedDocument = await self._callGraphBuilder.getTokenizedDocument(oldPath) - for start, count in removed: - for i in range(start - 1, start + count - 1): + # start, end are inclusive, 1-based + for start, end in removed: + for i in range(start - 1, end): scope = oldDoc.scopeAt(i, 0) if scope: self._safeUpdateNodeHistory(scope.name, 1) @@ -177,8 +175,8 @@ async def end_commit(self, hexsha): self._markWholeDocumentAsChanged(newDoc) else: assert added - for start, count in added: - for i in range(start - 1, start + count - 1): + for start, end in added: + for i in range(start - 1, end): scope = newDoc.scopeAt(i, 0) if scope: self._safeUpdateNodeHistory(scope.name, 1) diff --git a/test/test_analytics/test_analyzer_lsp_ccls.py b/test/test_analytics/test_analyzer_lsp_ccls.py index 8e5f27792ec..f5a0ba54c54 100644 --- a/test/test_analytics/test_analyzer_lsp_ccls.py +++ b/test/test_analytics/test_analyzer_lsp_ccls.py @@ -18,6 +18,8 @@ _logger = logging.getLogger() +testDataRoot = os.path.dirname(os.path.abspath(__file__)) + def formatEdgeId(u: str, v: str): return u + "|->|" + v @@ -30,20 +32,23 @@ def graphToDict(ccg: CallCommitGraph): } return result + def fixGraphDict(graphData: dict): if "nodes" in graphData: for id, attr in graphData["nodes"].items(): if "history" in attr: - attr["history"] = dict((int(k), v) for k, v in attr["history"].items()) + attr["history"] = dict((int(k), v) + for k, v in attr["history"].items()) return graphData + def assertGraphMatches(baseline: dict, ccg: CallCommitGraph): baselineNodeIds = set(baseline["nodes"].keys()) for id, attr in ccg.nodes(data=True): baselineAttr = baseline["nodes"].get(id, None) assert baselineAttr != None, str.format("Extra node: {0}.", id) assert baselineAttr == attr, str.format( - "Node attribute mismatch: {0}. Baseline: {1}; Test: {2}.", id, baselineAttr, attr) + "Node attribute mismatch: {0}. Baseline: {1}; Actual: {2}.", id, baselineAttr, attr) baselineNodeIds.remove(id) assert not baselineNodeIds, str.format( "Node(s) missing: %s.", baselineNodeIds) @@ -53,7 +58,7 @@ def assertGraphMatches(baseline: dict, ccg: CallCommitGraph): baselineAttr = baseline["edges"].get(id, None) assert baselineAttr != None, str.format("Extra branch: {0}.", id) assert baselineAttr == attr, str.format( - "Branch attribute mismatch: {0}. Baseline: {1}; Test: {2}.", id, baselineAttr, attr) + "Branch attribute mismatch: {0}. Baseline: {1}; Actual: {2}.", id, baselineAttr, attr) baselineEdgeIds.remove(id) assert not baselineEdgeIds, str.format( "Branch(es) missing: {0}.", baselineEdgeIds) @@ -152,5 +157,6 @@ async def testFeatureBranch(): analyzer: Analyzer async with graphServer: analyzer.observer = TestAnalyzerObserver( - "./baseline/feature_branch", "./testdump/feature_branch") + os.path.join(testDataRoot, "baseline/feature_branch"), + os.path.join(testDataRoot, "actualdump/feature_branch")) await analyzer.analyze(from_beginning=True) From 404e9155ddae57afcc08d00a9d6decee20c37fa7 Mon Sep 17 00:00:00 2001 From: xinyan Date: Sun, 27 Jan 2019 15:51:01 +0800 Subject: [PATCH 28/84] Save graph baseline for test_analyzer_lsp_ccls.py . --- .../baseline/feature_branch/A.g.json | 17 ++++ .../baseline/feature_branch/B.g.json | 30 +++++++ .../baseline/feature_branch/C.g.json | 47 ++++++++++ .../baseline/feature_branch/D.g.json | 65 ++++++++++++++ .../baseline/feature_branch/E.g.json | 77 ++++++++++++++++ .../baseline/feature_branch/F.g.json | 89 ++++++++++++++++++ .../baseline/feature_branch/K.g.json | 90 +++++++++++++++++++ 7 files changed, 415 insertions(+) create mode 100644 test/test_analytics/baseline/feature_branch/A.g.json create mode 100644 test/test_analytics/baseline/feature_branch/B.g.json create mode 100644 test/test_analytics/baseline/feature_branch/C.g.json create mode 100644 test/test_analytics/baseline/feature_branch/D.g.json create mode 100644 test/test_analytics/baseline/feature_branch/E.g.json create mode 100644 test/test_analytics/baseline/feature_branch/F.g.json create mode 100644 test/test_analytics/baseline/feature_branch/K.g.json diff --git a/test/test_analytics/baseline/feature_branch/A.g.json b/test/test_analytics/baseline/feature_branch/A.g.json new file mode 100644 index 00000000000..237d19032ed --- /dev/null +++ b/test/test_analytics/baseline/feature_branch/A.g.json @@ -0,0 +1,17 @@ +{ + "edges": {}, + "nodes": { + "char *str_append(char *string, char *append)": { + "history": { + "0": 7 + }, + "size": null + }, + "int str_len(char *string)": { + "history": { + "0": 6 + }, + "size": null + } + } +} \ No newline at end of file diff --git a/test/test_analytics/baseline/feature_branch/B.g.json b/test/test_analytics/baseline/feature_branch/B.g.json new file mode 100644 index 00000000000..3d23e7c2929 --- /dev/null +++ b/test/test_analytics/baseline/feature_branch/B.g.json @@ -0,0 +1,30 @@ +{ + "edges": {}, + "nodes": { + "char *str_append(char *string, char *append)": { + "history": { + "0": 7, + "1": 3 + }, + "size": null + }, + "char *str_append_chr(char *string, char append)": { + "history": { + "1": 3 + }, + "size": null + }, + "int str_equals(char *equal1, char *eqaul2)": { + "history": { + "1": 11 + }, + "size": null + }, + "int str_len(char *string)": { + "history": { + "0": 6 + }, + "size": null + } + } +} \ No newline at end of file diff --git a/test/test_analytics/baseline/feature_branch/C.g.json b/test/test_analytics/baseline/feature_branch/C.g.json new file mode 100644 index 00000000000..919e12d7ef3 --- /dev/null +++ b/test/test_analytics/baseline/feature_branch/C.g.json @@ -0,0 +1,47 @@ +{ + "edges": { + "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "addedBy": 2, + "weight": null + }, + "char *str_append_chr(char *string, char append)|->|int str_len(char *string)": { + "addedBy": 2, + "weight": null + } + }, + "nodes": { + "char *str_append(char *string, char *append)": { + "history": { + "0": 7, + "1": 3 + }, + "size": null + }, + "char *str_append_chr(char *string, char append)": { + "history": { + "1": 3, + "2": 34 + }, + "size": null + }, + "int str_equals()": { + "history": { + "2": 1 + }, + "size": null + }, + "int str_equals(char *equal1, char *eqaul2)": { + "history": { + "1": 11, + "2": 1 + }, + "size": null + }, + "int str_len(char *string)": { + "history": { + "0": 6 + }, + "size": null + } + } +} \ No newline at end of file diff --git a/test/test_analytics/baseline/feature_branch/D.g.json b/test/test_analytics/baseline/feature_branch/D.g.json new file mode 100644 index 00000000000..036411b985d --- /dev/null +++ b/test/test_analytics/baseline/feature_branch/D.g.json @@ -0,0 +1,65 @@ +{ + "edges": { + "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "addedBy": 2, + "weight": null + }, + "char *str_append_chr(char *string, char append)|->|int str_len(char *string)": { + "addedBy": 2, + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|char *str_append_chr(char *string, char append)": { + "addedBy": 3, + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals()": { + "addedBy": 3, + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_len(char *string)": { + "addedBy": 3, + "weight": null + } + }, + "nodes": { + "char *str_append(char *string, char *append)": { + "history": { + "0": 7, + "1": 3 + }, + "size": null + }, + "char *str_append_chr(char *string, char append)": { + "history": { + "1": 3, + "2": 34 + }, + "size": null + }, + "char *str_replace(char *search, char *replace, char *subject)": { + "history": { + "3": 26 + }, + "size": null + }, + "int str_equals()": { + "history": { + "2": 1 + }, + "size": null + }, + "int str_equals(char *equal1, char *eqaul2)": { + "history": { + "1": 11, + "2": 1 + }, + "size": null + }, + "int str_len(char *string)": { + "history": { + "0": 6 + }, + "size": null + } + } +} \ No newline at end of file diff --git a/test/test_analytics/baseline/feature_branch/E.g.json b/test/test_analytics/baseline/feature_branch/E.g.json new file mode 100644 index 00000000000..a92bf26d34a --- /dev/null +++ b/test/test_analytics/baseline/feature_branch/E.g.json @@ -0,0 +1,77 @@ +{ + "edges": { + "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "addedBy": 2, + "weight": null + }, + "char *str_append_chr(char *string, char append)|->|int str_len(char *string)": { + "addedBy": 2, + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|char *str_append_chr(char *string, char append)": { + "addedBy": 3, + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals()": { + "addedBy": 3, + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_len(char *string)": { + "addedBy": 3, + "weight": null + } + }, + "nodes": { + "char *str_append(char *string, char *append)": { + "history": { + "0": 7, + "1": 3 + }, + "size": null + }, + "char *str_append_chr(char *string, char append)": { + "history": { + "1": 3, + "2": 34 + }, + "size": null + }, + "char *str_replace(char *search, char *replace, char *subject)": { + "history": { + "3": 26 + }, + "size": null + }, + "int str_equals()": { + "history": { + "2": 1 + }, + "size": null + }, + "int str_equals(char *equal1, char *eqaul2)": { + "history": { + "1": 11, + "2": 1 + }, + "size": null + }, + "int str_len(char *string)": { + "history": { + "0": 6 + }, + "size": null + }, + "void add(int num)": { + "history": { + "4": 11 + }, + "size": null + }, + "void append(int num)": { + "history": { + "4": 29 + }, + "size": null + } + } +} \ No newline at end of file diff --git a/test/test_analytics/baseline/feature_branch/F.g.json b/test/test_analytics/baseline/feature_branch/F.g.json new file mode 100644 index 00000000000..1cf97a4b913 --- /dev/null +++ b/test/test_analytics/baseline/feature_branch/F.g.json @@ -0,0 +1,89 @@ +{ + "edges": { + "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "addedBy": 2, + "weight": null + }, + "char *str_append_chr(char *string, char append)|->|int str_len(char *string)": { + "addedBy": 2, + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|char *str_append_chr(char *string, char append)": { + "addedBy": 3, + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals()": { + "addedBy": 3, + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_len(char *string)": { + "addedBy": 3, + "weight": null + } + }, + "nodes": { + "char *str_append(char *string, char *append)": { + "history": { + "0": 7, + "1": 3 + }, + "size": null + }, + "char *str_append_chr(char *string, char append)": { + "history": { + "1": 3, + "2": 34 + }, + "size": null + }, + "char *str_replace(char *search, char *replace, char *subject)": { + "history": { + "3": 26 + }, + "size": null + }, + "int count()": { + "history": { + "5": 12 + }, + "size": null + }, + "int str_equals()": { + "history": { + "2": 1 + }, + "size": null + }, + "int str_equals(char *equal1, char *eqaul2)": { + "history": { + "1": 11, + "2": 1 + }, + "size": null + }, + "int str_len(char *string)": { + "history": { + "0": 6 + }, + "size": null + }, + "void add(int num)": { + "history": { + "4": 11 + }, + "size": null + }, + "void append(int num)": { + "history": { + "4": 29 + }, + "size": null + }, + "void display(struct node *r)": { + "history": { + "5": 14 + }, + "size": null + } + } +} \ No newline at end of file diff --git a/test/test_analytics/baseline/feature_branch/K.g.json b/test/test_analytics/baseline/feature_branch/K.g.json new file mode 100644 index 00000000000..b0061dbff9f --- /dev/null +++ b/test/test_analytics/baseline/feature_branch/K.g.json @@ -0,0 +1,90 @@ +{ + "edges": { + "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "addedBy": 2, + "weight": null + }, + "char *str_append_chr(char *string, char append)|->|int str_len(char *string)": { + "addedBy": 2, + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|char *str_append_chr(char *string, char append)": { + "addedBy": 3, + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals()": { + "addedBy": 3, + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_len(char *string)": { + "addedBy": 3, + "weight": null + } + }, + "nodes": { + "char *str_append(char *string, char *append)": { + "history": { + "0": 7, + "1": 3 + }, + "size": null + }, + "char *str_append_chr(char *string, char append)": { + "history": { + "1": 3, + "2": 34 + }, + "size": null + }, + "char *str_replace(char *search, char *replace, char *subject)": { + "history": { + "3": 26 + }, + "size": null + }, + "int count()": { + "history": { + "5": 12 + }, + "size": null + }, + "int str_equals()": { + "history": { + "2": 1 + }, + "size": null + }, + "int str_equals(char *equal1, char *eqaul2)": { + "history": { + "1": 11, + "2": 1 + }, + "size": null + }, + "int str_len(char *string)": { + "history": { + "0": 6 + }, + "size": null + }, + "void add(int num)": { + "history": { + "4": 11 + }, + "size": null + }, + "void append(int num)": { + "history": { + "4": 29 + }, + "size": null + }, + "void display(struct node *r)": { + "history": { + "5": 14, + "6": 5 + }, + "size": null + } + } +} \ No newline at end of file From e356690f9e52ac958110f70eadba25e94cd220e5 Mon Sep 17 00:00:00 2001 From: xinyan Date: Mon, 28 Jan 2019 23:29:51 +0800 Subject: [PATCH 29/84] Extract common test utility methods. Add docs. --- Pipfile | 1 + persper/analytics/analyzer.py | 31 ++++ test/test_analytics/test_analyzer_lsp_ccls.py | 118 +--------------- test/test_analytics/utility/__init__.py | 0 test/test_analytics/utility/graph_baseline.py | 133 ++++++++++++++++++ 5 files changed, 172 insertions(+), 111 deletions(-) create mode 100644 test/test_analytics/utility/__init__.py create mode 100644 test/test_analytics/utility/graph_baseline.py diff --git a/Pipfile b/Pipfile index a89dc5c62fe..a232e639c21 100644 --- a/Pipfile +++ b/Pipfile @@ -19,6 +19,7 @@ ipykernel = "*" matplotlib = "*" antlr4-python3-runtime = "*" python-jsonrpc-server = "*" +pytest-asyncio = "*" [dev-packages] diff --git a/persper/analytics/analyzer.py b/persper/analytics/analyzer.py index 2ed1860a8f2..d39e2b543aa 100644 --- a/persper/analytics/analyzer.py +++ b/persper/analytics/analyzer.py @@ -67,6 +67,9 @@ def __init__(self, repo_path, graph_server): @property def observer(self): + """ + The AnalyzerObserver used to observe current Analyzer. + """ return self._observer @observer.setter @@ -192,16 +195,44 @@ def __setstate__(self, state): self.__dict__.update(state) class AnalyzerObserver(ABC): + """ + Used to observe the progress of `Analyzer` during its analysis of the target repository. + You need to derive your own observer class from it before assigning your observer instance + to `Analyzer.observer`. + """ def __init__(self): pass def onBeforeCommit(self, analyzer:Analyzer, index:int, commit:Commit, isMaster:bool): + """ + Called before the observed Analyzer is about to analyze a commit. + Params: + analyzer: the observed Analyzer instance. + index: the index of the commit, depending on the behavior of the analyzer. + This is usually a series of 1-based ordinal index for master commits, + and another series of 1-based ordinal index for branch commits. + commit: the commit to be analyzed. + isMaster: whether the current commit is one of the master commits. + """ pass def onAfterCommit(self, analyzer:Analyzer, index:int, commit:Commit, isMaster:bool): + """ + Called after the observed Analyzer has finished analyzing a commit. + Params: + analyzer: the observed Analyzer instance. + index: the index of the commit, depending on the behavior of the analyzer. + This is usually a series of 1-based ordinal index for master commits, + and another series of 1-based ordinal index for branch commits. + commit: the commit that has just been analyzed. + isMaster: whether the current commit is one of the master commits. + """ pass class _EmptyAnalyzerObserverType(AnalyzerObserver): pass emptyAnalyzerObserver = _EmptyAnalyzerObserverType() +""" +An AnalyzerObserver instance that does nothing in their notification methods. +""" diff --git a/test/test_analytics/test_analyzer_lsp_ccls.py b/test/test_analytics/test_analyzer_lsp_ccls.py index f5a0ba54c54..cfa2ef46a51 100644 --- a/test/test_analytics/test_analyzer_lsp_ccls.py +++ b/test/test_analytics/test_analyzer_lsp_ccls.py @@ -4,6 +4,7 @@ import subprocess from pathlib import Path from tempfile import mkdtemp +from .utility.graph_baseline import GraphDumpAnalyzerObserver import networkx.readwrite.json_graph import pytest @@ -11,7 +12,7 @@ from networkx import Graph from networkx.algorithms.isomorphism import is_isomorphic -from persper.analytics.analyzer import Analyzer, AnalyzerObserver +from persper.analytics.analyzer import Analyzer from persper.analytics.call_commit_graph import CallCommitGraph from persper.analytics.lsp_graph_server.ccls import CclsGraphServer from persper.util.path import root_path @@ -21,69 +22,11 @@ testDataRoot = os.path.dirname(os.path.abspath(__file__)) -def formatEdgeId(u: str, v: str): - return u + "|->|" + v - - -def graphToDict(ccg: CallCommitGraph): - result = { - "nodes": dict(ccg.nodes(data=True)), - "edges": dict(((formatEdgeId(u, v), data) for (u, v, data) in ccg.edges(data=True))) - } - return result - - -def fixGraphDict(graphData: dict): - if "nodes" in graphData: - for id, attr in graphData["nodes"].items(): - if "history" in attr: - attr["history"] = dict((int(k), v) - for k, v in attr["history"].items()) - return graphData - - -def assertGraphMatches(baseline: dict, ccg: CallCommitGraph): - baselineNodeIds = set(baseline["nodes"].keys()) - for id, attr in ccg.nodes(data=True): - baselineAttr = baseline["nodes"].get(id, None) - assert baselineAttr != None, str.format("Extra node: {0}.", id) - assert baselineAttr == attr, str.format( - "Node attribute mismatch: {0}. Baseline: {1}; Actual: {2}.", id, baselineAttr, attr) - baselineNodeIds.remove(id) - assert not baselineNodeIds, str.format( - "Node(s) missing: %s.", baselineNodeIds) - baselineEdgeIds = set(baseline["edges"].keys()) - for u, v, attr in ccg.edges(data=True): - id = formatEdgeId(u, v) - baselineAttr = baseline["edges"].get(id, None) - assert baselineAttr != None, str.format("Extra branch: {0}.", id) - assert baselineAttr == attr, str.format( - "Branch attribute mismatch: {0}. Baseline: {1}; Actual: {2}.", id, baselineAttr, attr) - baselineEdgeIds.remove(id) - assert not baselineEdgeIds, str.format( - "Branch(es) missing: {0}.", baselineEdgeIds) - - -def commitGraphEquals(g1: Graph, g2: Graph): - def nodeComparer(n1: dict, n2: dict): - if n1 == n2: - return True - _logger.warn(str.format("Node mismatch: n1 = {0}, n2 = {1}", n1, n2)) - return False - - def edgeComparer(e1: dict, e2: dict): - if e1 == e2: - return True - _logger.warn(str.format("Edge mismatch: e1 = {0}, e2 = {1}", e1, e2)) - return False - return is_isomorphic(g1, g2, nodeComparer, edgeComparer) - - -async def createFeatureBranchAnalyzer(): +async def createFeatureBranchAnalyzer(repoName: str): # build the repo first if not exists yet - repo_path = os.path.join(root_path, 'repos/test_feature_branch') + repo_path = os.path.join(root_path, 'repos/' + repoName) script_path = os.path.join(root_path, 'tools/repo_creater/create_repo.py') - test_src_path = os.path.join(root_path, 'test/test_feature_branch') + test_src_path = os.path.join(root_path, 'test/' + repoName) if not os.path.isdir(repo_path): cmd = '{} {}'.format(script_path, test_src_path) subprocess.call(cmd, shell=True) @@ -103,60 +46,13 @@ async def createFeatureBranchAnalyzer(): return graphServer, analyzer -class TestAnalyzerObserver(AnalyzerObserver): - def __init__(self, graphBaselineDumpPath: str = None, graphTestDumpPath: str = None, dumpOnlyOnError: bool = None): - super().__init__() - if graphBaselineDumpPath: - self._baselinePath = Path(graphBaselineDumpPath).resolve() - else: - self._baselinePath = None - if graphTestDumpPath: - self._dumpPath = Path(graphTestDumpPath).resolve() - self._dumpPath.mkdir(parents=True, exist_ok=True) - else: - self._dumpPath = None - self._dumpOnlyOnError = graphBaselineDumpPath != None if dumpOnlyOnError == None else dumpOnlyOnError - - def onAfterCommit(self, analyzer: Analyzer, index: int, commit: Commit, isMaster: bool): - graph: CallCommitGraph = analyzer.get_graph() - - def dumpGraph(warnIfNotAvailable: bool): - if not self._dumpPath: - if warnIfNotAvailable: - _logger.warning( - "Cannot dump call commit graph because no dump path has been specified. Commit %s: %s.", commit.hexsha, commit.message) - return False - data = graphToDict(graph) - graphPath = self._dumpPath.joinpath( - commit.message.strip() + ".g.json") - with open(graphPath, "wt") as f: - json.dump(data, f, sort_keys=True, indent=4) - return True - # check baseline for regression - if self._baselinePath: - try: - graphPath = self._baselinePath.joinpath( - commit.message.strip() + ".g.json") - baselineData: dict = None - with open(graphPath, "rt") as f: - baselineData = fixGraphDict(json.load(f)) - assertGraphMatches(baselineData, graph) - except: - _logger.error("Failed on commit %s: %s.", - commit.hexsha, commit.message) - dumpGraph(True) - raise - if not self._dumpOnlyOnError: - dumpGraph(False) - - @pytest.mark.asyncio async def testFeatureBranch(): - graphServer, analyzer = await createFeatureBranchAnalyzer() + graphServer, analyzer = await createFeatureBranchAnalyzer("test_feature_branch") graphServer: CclsGraphServer analyzer: Analyzer async with graphServer: - analyzer.observer = TestAnalyzerObserver( + analyzer.observer = GraphDumpAnalyzerObserver( os.path.join(testDataRoot, "baseline/feature_branch"), os.path.join(testDataRoot, "actualdump/feature_branch")) await analyzer.analyze(from_beginning=True) diff --git a/test/test_analytics/utility/__init__.py b/test/test_analytics/utility/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/test/test_analytics/utility/graph_baseline.py b/test/test_analytics/utility/graph_baseline.py new file mode 100644 index 00000000000..d45731b3625 --- /dev/null +++ b/test/test_analytics/utility/graph_baseline.py @@ -0,0 +1,133 @@ +""" +Utility functions for graph-dump-based regression tests. +""" +import json +import logging +import os +from pathlib import Path +from enum import Enum + +from git import Commit +from networkx import Graph + +from persper.analytics.analyzer import Analyzer, AnalyzerObserver +from persper.analytics.call_commit_graph import CallCommitGraph + +_logger = logging.getLogger() + +testDataRoot = os.path.dirname(os.path.abspath(__file__)) + + +def formatEdgeId(u: str, v: str): + return u + "|->|" + v + + +def graphToDict(ccg: CallCommitGraph): + result = { + "nodes": dict(ccg.nodes(data=True)), + "edges": dict(((formatEdgeId(u, v), data) for (u, v, data) in ccg.edges(data=True))) + } + return result + + +def fixGraphDict(graphData: dict): + if "nodes" in graphData: + for id, attr in graphData["nodes"].items(): + if "history" in attr: + attr["history"] = dict((int(k), v) + for k, v in attr["history"].items()) + return graphData + + +def assertGraphMatches(baseline: dict, ccg: CallCommitGraph): + baselineNodeIds = set(baseline["nodes"].keys()) + for id, attr in ccg.nodes(data=True): + baselineAttr = baseline["nodes"].get(id, None) + assert baselineAttr != None, str.format("Extra node: {0}.", id) + assert baselineAttr == attr, str.format( + "Node attribute mismatch: {0}. Baseline: {1}; Actual: {2}.", id, baselineAttr, attr) + baselineNodeIds.remove(id) + assert not baselineNodeIds, str.format( + "Node(s) missing: %s.", baselineNodeIds) + baselineEdgeIds = set(baseline["edges"].keys()) + for u, v, attr in ccg.edges(data=True): + id = formatEdgeId(u, v) + baselineAttr = baseline["edges"].get(id, None) + assert baselineAttr != None, str.format("Extra branch: {0}.", id) + assert baselineAttr == attr, str.format( + "Branch attribute mismatch: {0}. Baseline: {1}; Actual: {2}.", id, baselineAttr, attr) + baselineEdgeIds.remove(id) + assert not baselineEdgeIds, str.format( + "Branch(es) missing: {0}.", baselineEdgeIds) + + +class GraphDumpNamingRule(Enum): + CommitMessage = 0, + CommitHexSha = 1 + + +class GraphDumpAnalyzerObserver(AnalyzerObserver): + """ + An implementation of AnalyzerObserver that generates graph dump after each commit, + and/or asserts the generated graph is the same as baseline graph dump. + """ + + def __init__(self, graphBaselineDumpPath: str = None, graphTestDumpPath: str = None, + dumpOnlyOnError: bool = None, dumpNaming: GraphDumpNamingRule = GraphDumpNamingRule.CommitHexSha): + """ + Params: + graphBaselineDumpPath: root folder of the baseline graph dump files. Set to values other than `None` + to perform basline assertions after each commit. + graphTestDumpPath: root folder to persist graph dump of observed Analyzer after each commit. This is + also the root folder to dump current graph if baseline assertion fails in any commit. + dumpOnlyOnError: True: dump current graph in Analyzer only when baseline assertion fails. + False: dump current graph in Analyzer after each commit. + None: if graphBaselineDumpPath == None, same as True; otherwise, same as False. + dumpNaming: specify how to name the graph dump files. + Remarks: + Set `graphBaselineDumpPath` to `None` to generate graph dump files in the folder specified in `graphTestDumpPath`, + which can be used as `graphBaselineDumpPath` in the next run. + """ + super().__init__() + if graphBaselineDumpPath: + self._baselinePath = Path(graphBaselineDumpPath).resolve() + else: + self._baselinePath = None + if graphTestDumpPath: + self._dumpPath = Path(graphTestDumpPath).resolve() + self._dumpPath.mkdir(parents=True, exist_ok=True) + else: + self._dumpPath = None + self._dumpOnlyOnError = graphBaselineDumpPath != None if dumpOnlyOnError == None else dumpOnlyOnError + + def onAfterCommit(self, analyzer: Analyzer, index: int, commit: Commit, isMaster: bool): + graph: CallCommitGraph = analyzer.get_graph() + + def dumpGraph(warnIfNotAvailable: bool): + if not self._dumpPath: + if warnIfNotAvailable: + _logger.warning( + "Cannot dump call commit graph because no dump path has been specified. Commit %s: %s.", commit.hexsha, commit.message) + return False + data = graphToDict(graph) + graphPath = self._dumpPath.joinpath( + commit.message.strip() + ".g.json") + with open(graphPath, "wt") as f: + json.dump(data, f, sort_keys=True, indent=4) + return True + # check baseline for regression + if self._baselinePath: + try: + graphPath = self._baselinePath.joinpath( + commit.message.strip() + ".g.json") + baselineData: dict = None + with open(graphPath, "rt") as f: + baselineData = fixGraphDict(json.load(f)) + assertGraphMatches(baselineData, graph) + except: + _logger.error("Failed on commit %s: %s.", + commit.hexsha, commit.message) + dumpGraph(True) + raise + if not self._dumpOnlyOnError: + dumpGraph(False) From 9dc5f75d54b8257bbfce4ed734bc96a27a51ccdd Mon Sep 17 00:00:00 2001 From: xinyan Date: Mon, 28 Jan 2019 23:38:18 +0800 Subject: [PATCH 30/84] Add unit test for ccls on cpp_test_repo. --- .../actualdump/cpp_test_repo/A.g.json | 314 +++++++++++++++++ .../actualdump/cpp_test_repo/B.g.json | 319 +++++++++++++++++ .../actualdump/cpp_test_repo/C.g.json | 323 +++++++++++++++++ .../actualdump/cpp_test_repo/D.g.json | 327 ++++++++++++++++++ test/test_analytics/test_analyzer_lsp_ccls.py | 11 + 5 files changed, 1294 insertions(+) create mode 100644 test/test_analytics/actualdump/cpp_test_repo/A.g.json create mode 100644 test/test_analytics/actualdump/cpp_test_repo/B.g.json create mode 100644 test/test_analytics/actualdump/cpp_test_repo/C.g.json create mode 100644 test/test_analytics/actualdump/cpp_test_repo/D.g.json diff --git a/test/test_analytics/actualdump/cpp_test_repo/A.g.json b/test/test_analytics/actualdump/cpp_test_repo/A.g.json new file mode 100644 index 00000000000..a44e165a511 --- /dev/null +++ b/test/test_analytics/actualdump/cpp_test_repo/A.g.json @@ -0,0 +1,314 @@ +{ + "edges": { + "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { + "addedBy": 0, + "weight": null + }, + "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|enum class StringComparison : int {}": { + "addedBy": 0, + "weight": null + }, + "bool Equal(const int &lhs, const int &rhs, int comparision)|->|enum class StringComparison : int {}": { + "addedBy": 0, + "weight": null + }, + "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)|->|const char *FriendlyNameOf()": { + "addedBy": 0, + "weight": null + }, + "const char *FriendlyNameOf(const std::type_index &type)|->|const char *FriendlyNameOf()": { + "addedBy": 0, + "weight": null + }, + "const char *FriendlyNameOf(const type_info &type)|->|const char *FriendlyNameOf()": { + "addedBy": 0, + "weight": null + }, + "int &operator>>(int &reader, bool &rhs)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { + "addedBy": 0, + "weight": null + }, + "int &operator>>(int &reader, bool &rhs)|->|enum class StringComparison : int {}": { + "addedBy": 0, + "weight": null + }, + "int &operator>>(int &reader, bool &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "addedBy": 0, + "weight": null + }, + "int &operator>>(int &reader, double &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "addedBy": 0, + "weight": null + }, + "int &operator>>(int &reader, float &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "addedBy": 0, + "weight": null + }, + "int &operator>>(int &reader, long &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "addedBy": 0, + "weight": null + }, + "int main(int argc, char *argv[])|->|TStream OpenAndValidate(const TPath arg1)": { + "addedBy": 0, + "weight": null + }, + "void ReportException(const int &ex, int level)|->|void ReportException(const int &ex, int level)": { + "addedBy": 0, + "weight": null + } + }, + "nodes": { + "ANSI_COLOR_BLUE": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_BRIGHT": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_CYAN": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_GREEN": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_MAGENTA": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_RED": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_RESET": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_YELLOW": { + "history": { + "0": 1 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, bool &rhs)": { + "history": { + "0": 1 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, double &rhs)": { + "history": { + "0": 1 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, float &rhs)": { + "history": { + "0": 1 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, int &rhs)": { + "history": { + "0": 1 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, long &rhs)": { + "history": { + "0": 1 + }, + "size": null + }, + "TDest safe_cast(TSrc obj)": { + "history": { + "0": 1 + }, + "size": null + }, + "TStream &operator>>(TStream &s, RowReader &reader)": { + "history": { + "0": 1 + }, + "size": null + }, + "TStream OpenAndValidate(const TPath arg1)": { + "history": { + "0": 1 + }, + "size": null + }, + "_DECLARE_ENUM": { + "history": { + "0": 1 + }, + "size": null + }, + "_DECLARE_ENUM_DEFAULT": { + "history": { + "0": 1 + }, + "size": null + }, + "_RE_TRACE": { + "history": { + "0": 1 + }, + "size": null + }, + "_RangeToEnumerable RangeToEnumerable(const int range)": { + "history": { + "0": 1 + }, + "size": null + }, + "_SILENCE_STDEXT_ALLOCATORS_DEPRECATION_WARNING": { + "history": { + "0": 1 + }, + "size": null + }, + "bool Confirm(const int &prompt)": { + "history": { + "0": 17 + }, + "size": null + }, + "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)": { + "history": { + "0": 1 + }, + "size": null + }, + "bool Equal(const int &lhs, const int &rhs, int comparision)": { + "history": { + "0": 29 + }, + "size": null + }, + "bool dynamic_kind_of(const TSrc *obj)": { + "history": { + "0": 1 + }, + "size": null + }, + "bool pointer_kind_of(const int obj)": { + "history": { + "0": 1 + }, + "size": null + }, + "const char *FriendlyNameOf()": { + "history": { + "0": 1 + }, + "size": null + }, + "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)": { + "history": { + "0": 1 + }, + "size": null + }, + "const char *FriendlyNameOf(const std::type_index &type)": { + "history": { + "0": 1 + }, + "size": null + }, + "const char *FriendlyNameOf(const type_info &type)": { + "history": { + "0": 1 + }, + "size": null + }, + "enum class StringComparison : int {}": { + "history": { + "0": 6 + }, + "size": null + }, + "inline int to_string(const int &value)": { + "history": { + "0": 4 + }, + "size": null + }, + "int &operator>>(int &reader, bool &rhs)": { + "history": { + "0": 24 + }, + "size": null + }, + "int &operator>>(int &reader, double &rhs)": { + "history": { + "0": 6 + }, + "size": null + }, + "int &operator>>(int &reader, float &rhs)": { + "history": { + "0": 6 + }, + "size": null + }, + "int &operator>>(int &reader, int &rhs)": { + "history": { + "0": 6 + }, + "size": null + }, + "int &operator>>(int &reader, long &rhs)": { + "history": { + "0": 6 + }, + "size": null + }, + "int StreamStatusToString(const TStream &stream)": { + "history": { + "0": 1 + }, + "size": null + }, + "int main(int argc, char *argv[])": { + "history": { + "0": 7 + }, + "size": null + }, + "int safe_pointer_cast(const int &obj)": { + "history": { + "0": 1 + }, + "size": null + }, + "void ReportException(const int &ex, int level)": { + "history": { + "0": 21 + }, + "size": null + }, + "void ValidateStream(const TStream &stream)": { + "history": { + "0": 1 + }, + "size": null + } + } +} \ No newline at end of file diff --git a/test/test_analytics/actualdump/cpp_test_repo/B.g.json b/test/test_analytics/actualdump/cpp_test_repo/B.g.json new file mode 100644 index 00000000000..7e0f202ba82 --- /dev/null +++ b/test/test_analytics/actualdump/cpp_test_repo/B.g.json @@ -0,0 +1,319 @@ +{ + "edges": { + "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { + "addedBy": 0, + "weight": null + }, + "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|enum class StringComparison : int {}": { + "addedBy": 0, + "weight": null + }, + "bool Equal(const int &lhs, const int &rhs, int comparision)|->|enum class StringComparison : int {}": { + "addedBy": 1, + "weight": null + }, + "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)|->|const char *FriendlyNameOf()": { + "addedBy": 0, + "weight": null + }, + "const char *FriendlyNameOf(const std::type_index &type)|->|const char *FriendlyNameOf()": { + "addedBy": 0, + "weight": null + }, + "const char *FriendlyNameOf(const type_info &type)|->|const char *FriendlyNameOf()": { + "addedBy": 0, + "weight": null + }, + "int &operator>>(int &reader, bool &rhs)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { + "addedBy": 0, + "weight": null + }, + "int &operator>>(int &reader, bool &rhs)|->|enum class StringComparison : int {}": { + "addedBy": 0, + "weight": null + }, + "int &operator>>(int &reader, bool &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "addedBy": 0, + "weight": null + }, + "int &operator>>(int &reader, double &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "addedBy": 0, + "weight": null + }, + "int &operator>>(int &reader, float &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "addedBy": 0, + "weight": null + }, + "int &operator>>(int &reader, long &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "addedBy": 0, + "weight": null + }, + "int main(int argc, char *argv[])|->|TStream OpenAndValidate(const TPath arg1)": { + "addedBy": 1, + "weight": null + }, + "int main(int argc, char *argv[])|->|void ReportException(const int &ex, int level)": { + "addedBy": 1, + "weight": null + }, + "void ReportException(const int &ex, int level)|->|void ReportException(const int &ex, int level)": { + "addedBy": 1, + "weight": null + } + }, + "nodes": { + "ANSI_COLOR_BLUE": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_BRIGHT": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_CYAN": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_GREEN": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_MAGENTA": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_RED": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_RESET": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_YELLOW": { + "history": { + "0": 1 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, bool &rhs)": { + "history": { + "0": 1 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, double &rhs)": { + "history": { + "0": 1 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, float &rhs)": { + "history": { + "0": 1 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, int &rhs)": { + "history": { + "0": 1 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, long &rhs)": { + "history": { + "0": 1 + }, + "size": null + }, + "TDest safe_cast(TSrc obj)": { + "history": { + "0": 1 + }, + "size": null + }, + "TStream &operator>>(TStream &s, RowReader &reader)": { + "history": { + "0": 1 + }, + "size": null + }, + "TStream OpenAndValidate(const TPath arg1)": { + "history": { + "0": 1 + }, + "size": null + }, + "_DECLARE_ENUM": { + "history": { + "0": 1 + }, + "size": null + }, + "_DECLARE_ENUM_DEFAULT": { + "history": { + "0": 1 + }, + "size": null + }, + "_RE_TRACE": { + "history": { + "0": 1 + }, + "size": null + }, + "_RangeToEnumerable RangeToEnumerable(const int range)": { + "history": { + "0": 1 + }, + "size": null + }, + "_SILENCE_STDEXT_ALLOCATORS_DEPRECATION_WARNING": { + "history": { + "0": 1 + }, + "size": null + }, + "bool Confirm(const int &prompt)": { + "history": { + "0": 17 + }, + "size": null + }, + "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)": { + "history": { + "0": 1 + }, + "size": null + }, + "bool Equal(const int &lhs, const int &rhs, int comparision)": { + "history": { + "0": 29 + }, + "size": null + }, + "bool dynamic_kind_of(const TSrc *obj)": { + "history": { + "0": 1 + }, + "size": null + }, + "bool pointer_kind_of(const int obj)": { + "history": { + "0": 1 + }, + "size": null + }, + "const char *FriendlyNameOf()": { + "history": { + "0": 1 + }, + "size": null + }, + "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)": { + "history": { + "0": 1 + }, + "size": null + }, + "const char *FriendlyNameOf(const std::type_index &type)": { + "history": { + "0": 1 + }, + "size": null + }, + "const char *FriendlyNameOf(const type_info &type)": { + "history": { + "0": 1 + }, + "size": null + }, + "enum class StringComparison : int {}": { + "history": { + "0": 6 + }, + "size": null + }, + "inline int to_string(const int &value)": { + "history": { + "0": 4 + }, + "size": null + }, + "int &operator>>(int &reader, bool &rhs)": { + "history": { + "0": 24 + }, + "size": null + }, + "int &operator>>(int &reader, double &rhs)": { + "history": { + "0": 6 + }, + "size": null + }, + "int &operator>>(int &reader, float &rhs)": { + "history": { + "0": 6 + }, + "size": null + }, + "int &operator>>(int &reader, int &rhs)": { + "history": { + "0": 6 + }, + "size": null + }, + "int &operator>>(int &reader, long &rhs)": { + "history": { + "0": 6 + }, + "size": null + }, + "int StreamStatusToString(const TStream &stream)": { + "history": { + "0": 1 + }, + "size": null + }, + "int main(int argc, char *argv[])": { + "history": { + "0": 7, + "1": 4 + }, + "size": null + }, + "int safe_pointer_cast(const int &obj)": { + "history": { + "0": 1 + }, + "size": null + }, + "void ReportException(const int &ex, int level)": { + "history": { + "0": 21 + }, + "size": null + }, + "void ValidateStream(const TStream &stream)": { + "history": { + "0": 1 + }, + "size": null + } + } +} \ No newline at end of file diff --git a/test/test_analytics/actualdump/cpp_test_repo/C.g.json b/test/test_analytics/actualdump/cpp_test_repo/C.g.json new file mode 100644 index 00000000000..73cb945c458 --- /dev/null +++ b/test/test_analytics/actualdump/cpp_test_repo/C.g.json @@ -0,0 +1,323 @@ +{ + "edges": { + "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { + "addedBy": 2, + "weight": null + }, + "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|enum class StringComparison : int {}": { + "addedBy": 2, + "weight": null + }, + "bool Equal(const int &lhs, const int &rhs, int comparision)|->|enum class StringComparison : int {}": { + "addedBy": 2, + "weight": null + }, + "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)|->|const char *FriendlyNameOf()": { + "addedBy": 2, + "weight": null + }, + "const char *FriendlyNameOf(const std::type_index &type)|->|const char *FriendlyNameOf()": { + "addedBy": 2, + "weight": null + }, + "const char *FriendlyNameOf(const type_info &type)|->|const char *FriendlyNameOf()": { + "addedBy": 2, + "weight": null + }, + "int &operator>>(int &reader, bool &rhs)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { + "addedBy": 2, + "weight": null + }, + "int &operator>>(int &reader, bool &rhs)|->|enum class StringComparison : int {}": { + "addedBy": 2, + "weight": null + }, + "int &operator>>(int &reader, bool &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "addedBy": 2, + "weight": null + }, + "int &operator>>(int &reader, double &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "addedBy": 2, + "weight": null + }, + "int &operator>>(int &reader, float &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "addedBy": 2, + "weight": null + }, + "int &operator>>(int &reader, long &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "addedBy": 2, + "weight": null + }, + "int main(int argc, char *argv[])|->|TStream OpenAndValidate(const TPath arg1)": { + "addedBy": 2, + "weight": null + }, + "int main(int argc, char *argv[])|->|void ReportException(const int &ex, int level)": { + "addedBy": 1, + "weight": null + }, + "void ReportException(const int &ex, int level)|->|void ReportException(const int &ex, int level)": { + "addedBy": 2, + "weight": null + } + }, + "nodes": { + "ANSI_COLOR_BLUE": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_BRIGHT": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_CYAN": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_GREEN": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_MAGENTA": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_RED": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_RESET": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_YELLOW": { + "history": { + "0": 1 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, bool &rhs)": { + "history": { + "0": 1 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, double &rhs)": { + "history": { + "0": 1 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, float &rhs)": { + "history": { + "0": 1 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, int &rhs)": { + "history": { + "0": 1 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, long &rhs)": { + "history": { + "0": 1 + }, + "size": null + }, + "TDest safe_cast(TSrc obj)": { + "history": { + "0": 1 + }, + "size": null + }, + "TStream &operator>>(TStream &s, RowReader &reader)": { + "history": { + "0": 1 + }, + "size": null + }, + "TStream OpenAndValidate(const TPath arg1)": { + "history": { + "0": 1 + }, + "size": null + }, + "_DECLARE_ENUM": { + "history": { + "0": 1 + }, + "size": null + }, + "_DECLARE_ENUM_DEFAULT": { + "history": { + "0": 1 + }, + "size": null + }, + "_RE_TRACE": { + "history": { + "0": 1 + }, + "size": null + }, + "_RangeToEnumerable RangeToEnumerable(const int range)": { + "history": { + "0": 1 + }, + "size": null + }, + "_SILENCE_STDEXT_ALLOCATORS_DEPRECATION_WARNING": { + "history": { + "0": 1 + }, + "size": null + }, + "bool Confirm(const int &prompt)": { + "history": { + "0": 17 + }, + "size": null + }, + "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)": { + "history": { + "0": 1 + }, + "size": null + }, + "bool Equal(const int &lhs, const int &rhs, int comparision)": { + "history": { + "0": 29, + "2": 2 + }, + "size": null + }, + "bool dynamic_kind_of(const TSrc *obj)": { + "history": { + "0": 1 + }, + "size": null + }, + "bool pointer_kind_of(const int obj)": { + "history": { + "0": 1 + }, + "size": null + }, + "const char *FriendlyNameOf()": { + "history": { + "0": 1 + }, + "size": null + }, + "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)": { + "history": { + "0": 1 + }, + "size": null + }, + "const char *FriendlyNameOf(const std::type_index &type)": { + "history": { + "0": 1 + }, + "size": null + }, + "const char *FriendlyNameOf(const type_info &type)": { + "history": { + "0": 1 + }, + "size": null + }, + "enum class StringComparison : int {}": { + "history": { + "0": 6, + "2": 2 + }, + "size": null + }, + "inline int to_string(const int &value)": { + "history": { + "0": 4 + }, + "size": null + }, + "int &operator>>(int &reader, bool &rhs)": { + "history": { + "0": 24, + "2": 4 + }, + "size": null + }, + "int &operator>>(int &reader, double &rhs)": { + "history": { + "0": 6 + }, + "size": null + }, + "int &operator>>(int &reader, float &rhs)": { + "history": { + "0": 6 + }, + "size": null + }, + "int &operator>>(int &reader, int &rhs)": { + "history": { + "0": 6 + }, + "size": null + }, + "int &operator>>(int &reader, long &rhs)": { + "history": { + "0": 6 + }, + "size": null + }, + "int StreamStatusToString(const TStream &stream)": { + "history": { + "0": 1 + }, + "size": null + }, + "int main(int argc, char *argv[])": { + "history": { + "0": 7, + "1": 4, + "2": 4 + }, + "size": null + }, + "int safe_pointer_cast(const int &obj)": { + "history": { + "0": 1 + }, + "size": null + }, + "void ReportException(const int &ex, int level)": { + "history": { + "0": 21 + }, + "size": null + }, + "void ValidateStream(const TStream &stream)": { + "history": { + "0": 1 + }, + "size": null + } + } +} \ No newline at end of file diff --git a/test/test_analytics/actualdump/cpp_test_repo/D.g.json b/test/test_analytics/actualdump/cpp_test_repo/D.g.json new file mode 100644 index 00000000000..32fc9162859 --- /dev/null +++ b/test/test_analytics/actualdump/cpp_test_repo/D.g.json @@ -0,0 +1,327 @@ +{ + "edges": { + "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { + "addedBy": 3, + "weight": null + }, + "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|enum class StringComparison : int {}": { + "addedBy": 3, + "weight": null + }, + "bool Equal(const int &lhs, const int &rhs, int comparision)|->|enum class StringComparison : int {}": { + "addedBy": 3, + "weight": null + }, + "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)|->|const char *FriendlyNameOf()": { + "addedBy": 3, + "weight": null + }, + "const char *FriendlyNameOf(const std::type_index &type)|->|const char *FriendlyNameOf()": { + "addedBy": 3, + "weight": null + }, + "const char *FriendlyNameOf(const type_info &type)|->|const char *FriendlyNameOf()": { + "addedBy": 3, + "weight": null + }, + "int &operator>>(int &reader, bool &rhs)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { + "addedBy": 3, + "weight": null + }, + "int &operator>>(int &reader, bool &rhs)|->|enum class StringComparison : int {}": { + "addedBy": 3, + "weight": null + }, + "int &operator>>(int &reader, bool &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "addedBy": 3, + "weight": null + }, + "int &operator>>(int &reader, double &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "addedBy": 3, + "weight": null + }, + "int &operator>>(int &reader, float &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "addedBy": 3, + "weight": null + }, + "int &operator>>(int &reader, long &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "addedBy": 3, + "weight": null + }, + "int main(int argc, char *argv[])|->|TStream OpenAndValidate(const TPath arg1)": { + "addedBy": 2, + "weight": null + }, + "int main(int argc, char *argv[])|->|void ReportException(const int &ex, int level)": { + "addedBy": 1, + "weight": null + }, + "void ReportException(const int &ex, int level)|->|void ReportException(const int &ex, int level)": { + "addedBy": 3, + "weight": null + } + }, + "nodes": { + "ANSI_COLOR_BLUE": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_BRIGHT": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_CYAN": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_GREEN": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_MAGENTA": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_RED": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_RESET": { + "history": { + "0": 1 + }, + "size": null + }, + "ANSI_COLOR_YELLOW": { + "history": { + "0": 1 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, bool &rhs)": { + "history": { + "0": 1 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, double &rhs)": { + "history": { + "0": 1 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, float &rhs)": { + "history": { + "0": 1 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, int &rhs)": { + "history": { + "0": 1 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, long &rhs)": { + "history": { + "0": 1 + }, + "size": null + }, + "TDest safe_cast(TSrc obj)": { + "history": { + "0": 1 + }, + "size": null + }, + "TStream &operator>>(TStream &s, RowReader &reader)": { + "history": { + "0": 1 + }, + "size": null + }, + "TStream OpenAndValidate(const TPath arg1)": { + "history": { + "0": 1 + }, + "size": null + }, + "_DECLARE_ENUM": { + "history": { + "0": 1 + }, + "size": null + }, + "_DECLARE_ENUM_DEFAULT": { + "history": { + "0": 1 + }, + "size": null + }, + "_RE_TRACE": { + "history": { + "0": 1 + }, + "size": null + }, + "_RangeToEnumerable RangeToEnumerable(const int range)": { + "history": { + "0": 1 + }, + "size": null + }, + "_SILENCE_STDEXT_ALLOCATORS_DEPRECATION_WARNING": { + "history": { + "0": 1 + }, + "size": null + }, + "bool Confirm(const int &prompt)": { + "history": { + "0": 17 + }, + "size": null + }, + "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)": { + "history": { + "0": 1 + }, + "size": null + }, + "bool Equal(const int &lhs, const int &rhs, int comparision)": { + "history": { + "0": 29, + "2": 2, + "3": 2 + }, + "size": null + }, + "bool dynamic_kind_of(const TSrc *obj)": { + "history": { + "0": 1 + }, + "size": null + }, + "bool pointer_kind_of(const int obj)": { + "history": { + "0": 1 + }, + "size": null + }, + "const char *FriendlyNameOf()": { + "history": { + "0": 1 + }, + "size": null + }, + "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)": { + "history": { + "0": 1 + }, + "size": null + }, + "const char *FriendlyNameOf(const std::type_index &type)": { + "history": { + "0": 1 + }, + "size": null + }, + "const char *FriendlyNameOf(const type_info &type)": { + "history": { + "0": 1 + }, + "size": null + }, + "enum class StringComparison : int {}": { + "history": { + "0": 6, + "2": 2, + "3": 2 + }, + "size": null + }, + "inline int to_string(const int &value)": { + "history": { + "0": 4 + }, + "size": null + }, + "int &operator>>(int &reader, bool &rhs)": { + "history": { + "0": 24, + "2": 4, + "3": 4 + }, + "size": null + }, + "int &operator>>(int &reader, double &rhs)": { + "history": { + "0": 6 + }, + "size": null + }, + "int &operator>>(int &reader, float &rhs)": { + "history": { + "0": 6 + }, + "size": null + }, + "int &operator>>(int &reader, int &rhs)": { + "history": { + "0": 6 + }, + "size": null + }, + "int &operator>>(int &reader, long &rhs)": { + "history": { + "0": 6 + }, + "size": null + }, + "int StreamStatusToString(const TStream &stream)": { + "history": { + "0": 1 + }, + "size": null + }, + "int main(int argc, char *argv[])": { + "history": { + "0": 7, + "1": 4, + "2": 4, + "3": 7 + }, + "size": null + }, + "int safe_pointer_cast(const int &obj)": { + "history": { + "0": 1 + }, + "size": null + }, + "void ReportException(const int &ex, int level)": { + "history": { + "0": 21 + }, + "size": null + }, + "void ValidateStream(const TStream &stream)": { + "history": { + "0": 1 + }, + "size": null + } + } +} \ No newline at end of file diff --git a/test/test_analytics/test_analyzer_lsp_ccls.py b/test/test_analytics/test_analyzer_lsp_ccls.py index cfa2ef46a51..d4831cd422c 100644 --- a/test/test_analytics/test_analyzer_lsp_ccls.py +++ b/test/test_analytics/test_analyzer_lsp_ccls.py @@ -56,3 +56,14 @@ async def testFeatureBranch(): os.path.join(testDataRoot, "baseline/feature_branch"), os.path.join(testDataRoot, "actualdump/feature_branch")) await analyzer.analyze(from_beginning=True) + +@pytest.mark.asyncio +async def testCppTestRepo(): + graphServer, analyzer = await createFeatureBranchAnalyzer("cpp_test_repo") + graphServer: CclsGraphServer + analyzer: Analyzer + async with graphServer: + analyzer.observer = GraphDumpAnalyzerObserver( + os.path.join(testDataRoot, "baseline/cpp_test_repo"), + os.path.join(testDataRoot, "actualdump/cpp_test_repo")) + await analyzer.analyze(from_beginning=True) From 4e78e9ff2909978fb136eaf68793fe62a360489a Mon Sep 17 00:00:00 2001 From: xinyan Date: Fri, 15 Feb 2019 22:40:21 +0800 Subject: [PATCH 31/84] Add analyzer2.Analyzer that is agnostic to commit topology. Get tests working for master-branch only scenario. --- persper/analytics/analyzer2.py | 192 ++++++++++++++++++ persper/analytics/git_tools.py | 12 +- persper/analytics/graph_server.py | 15 +- test/test_analytics/test_analyzer_lsp_ccls.py | 33 +-- test/test_analytics/utility/graph_baseline.py | 11 +- 5 files changed, 241 insertions(+), 22 deletions(-) create mode 100644 persper/analytics/analyzer2.py diff --git a/persper/analytics/analyzer2.py b/persper/analytics/analyzer2.py new file mode 100644 index 00000000000..5427fa665df --- /dev/null +++ b/persper/analytics/analyzer2.py @@ -0,0 +1,192 @@ +import asyncio +from abc import ABC +from typing import Union + +from git import Commit, Diff, DiffIndex, Repo + +from persper.analytics.git_tools import EMPTY_TREE_SHA, diff_with_commit, get_contents +from persper.analytics.graph_server import CommitSeekingMode, GraphServer + + +class Analyzer: + def __init__(self, repositoryRoot: str, graphServer: GraphServer, + terminalCommit: str = "master", + firstParentOnly: bool = False): + self._repositoryRoot = repositoryRoot + self._graphServer = graphServer + self._repo = Repo(repositoryRoot) + self._terminalCommit: Commit = self._repo.rev_parse(terminalCommit) + self._firstParentOnly = firstParentOnly + self._visitedCommits = set() + self._observer: AnalyzerObserver = emptyAnalyzerObserver + + @property + def observer(self): + """ + The AnalyzerObserver used to observe current Analyzer. + """ + return self._observer + + @observer.setter + def observer(self, value): + self._observer = value or emptyAnalyzerObserver + + @property + def terminalCommit(self): + return self._terminalCommit + + @property + def graph(self): + return self._graphServer.get_graph() + + async def analyze(self): + graphServerLastCommit = EMPTY_TREE_SHA + for commit in self._repo.iter_commits(self._terminalCommit, + topo_order=True, reverse=True, first_parent=self._firstParentOnly): + def printCommitStatus(status: str): + message = commit.message.strip()[:32] + print("Commit {0} ({1}): {2}".format(commit.hexsha, message, status)) + if commit.hexsha in self._visitedCommits: + printCommitStatus("Already visited.") + continue + if len(commit.parents) > 1: + # merge commit + # process connection, do not process diff + printCommitStatus("Going forward (merge).") + if self._firstParentOnly: + assert graphServerLastCommit == commit.parents[0].hexsha, \ + "git should traverse along first parent, but actually not." + await self._analyzeCommit(commit, graphServerLastCommit, CommitSeekingMode.NormalForward) + else: + await self._analyzeCommit(commit, graphServerLastCommit, CommitSeekingMode.MergeCommit) + elif not commit.parents: + printCommitStatus("Going forward (initial commit).") + await self._analyzeCommit(commit, None, CommitSeekingMode.NormalForward) + else: + parent: Commit = commit.parents[0] + if graphServerLastCommit != parent.hexsha: + printCommitStatus("Rewind to parent: {0}.".format(parent.hexsha)) + # jumping to the parent commit first + await self._analyzeCommit(parent, graphServerLastCommit, CommitSeekingMode.Rewind) + # then go on with current commit + printCommitStatus("Going forward.") + await self._analyzeCommit(commit, parent, CommitSeekingMode.NormalForward) + graphServerLastCommit = commit.hexsha + + async def _analyzeCommit(self, commit: Union[Commit, str], parentCommit: Union[Commit, str], + seekingMode: CommitSeekingMode): + """ + parentCommit can be None. + """ + if type(commit) != Commit: + commit = self._repo.commit(commit) + self._observer.onBeforeCommit(self, commit, seekingMode) + result = self._graphServer.start_commit(commit.hexsha, seekingMode, + commit.author.name, commit.author.email, commit.message) + if asyncio.iscoroutine(result): + await result + diff_index = diff_with_commit(commit, parentCommit) + + for diff in diff_index: + old_fname, new_fname = _get_fnames(diff) + # apply filter + # if a file comes into/goes from our view, we will set corresponding old_fname/new_fname to None, + # as if the file is introduced/removed in this commit. + # However, the diff will keep its original, no matter if the file has been filtered in/out. + if old_fname and not self._graphServer.filter_file(old_fname): + old_fname = None + if new_fname and not self._graphServer.filter_file(new_fname): + new_fname = None + if not old_fname and not new_fname: + # no modification + continue + + old_src = new_src = None + + if old_fname: + old_src = get_contents(self._repo, parentCommit, old_fname) + + if new_fname: + new_src = get_contents(self._repo, commit, new_fname) + + if old_src or new_src: + result = self._graphServer.update_graph( + old_fname, old_src, new_fname, new_src, diff.diff) + if asyncio.iscoroutine(result): + await result + + result = self._graphServer.end_commit(commit.hexsha) + if asyncio.iscoroutine(result): + await result + self._observer.onAfterCommit(self, commit, seekingMode) + + +def _get_fnames(diff: Diff): + if diff.new_file: + # change type 'A' + old_fname = None + new_fname = diff.b_blob.path + elif diff.deleted_file: + # change type 'D' + old_fname = diff.a_blob.path + new_fname = None + elif diff.renamed: + # change type 'R' + old_fname = diff.rename_from + new_fname = diff.rename_to + elif (diff.a_blob and diff.b_blob and + (diff.a_blob != diff.b_blob)): + # change type 'M' + old_fname = new_fname = diff.b_blob.path + else: + # change type 'U' + return None, None + + return old_fname, new_fname + + +class AnalyzerObserver(ABC): + """ + Used to observe the progress of `Analyzer` during its analysis of the target repository. + You need to derive your own observer class from it before assigning your observer instance + to `Analyzer.observer`. + """ + + def __init__(self): + pass + + def onBeforeCommit(self, analyzer: Analyzer, commit: Commit, seeking_mode: CommitSeekingMode): + """ + Called before the observed Analyzer is about to analyze a commit. + Params: + analyzer: the observed Analyzer instance. + index: the index of the commit, depending on the behavior of the analyzer. + This is usually a series of 1-based ordinal index for master commits, + and another series of 1-based ordinal index for branch commits. + commit: the commit to be analyzed. + isMaster: whether the current commit is one of the master commits. + """ + pass + + def onAfterCommit(self, analyzer: Analyzer, commit: Commit, seeking_mode: CommitSeekingMode): + """ + Called after the observed Analyzer has finished analyzing a commit. + Params: + analyzer: the observed Analyzer instance. + index: the index of the commit, depending on the behavior of the analyzer. + This is usually a series of 1-based ordinal index for master commits, + and another series of 1-based ordinal index for branch commits. + commit: the commit that has just been analyzed. + isMaster: whether the current commit is one of the master commits. + """ + pass + + +class _EmptyAnalyzerObserverType(AnalyzerObserver): + pass + + +emptyAnalyzerObserver = _EmptyAnalyzerObserverType() +""" +An AnalyzerObserver instance that does nothing in their notification methods. +""" diff --git a/persper/analytics/git_tools.py b/persper/analytics/git_tools.py index 716f80e3156..42401caa159 100644 --- a/persper/analytics/git_tools.py +++ b/persper/analytics/git_tools.py @@ -1,5 +1,6 @@ from git.exc import InvalidGitRepositoryError, NoSuchPathError -from git import Repo +from git import Repo, Commit +from typing import Union import sys EMPTY_TREE_SHA = '4b825dc642cb6eb9a060e54bf8d69288fbee4904' @@ -14,6 +15,11 @@ def _diff_with_first_parent(commit): return commit.diff(prev_commit, create_patch=True, R=True, indent_heuristic=True) +def diff_with_commit(current_commit:Commit, base_commit:Union[Commit, str]=None): + localBaseCommit = base_commit + if not localBaseCommit: + localBaseCommit = EMPTY_TREE_SHA + return current_commit.diff(localBaseCommit, create_patch=True, R=True, indent_heuristic=True) def initialize_repo(repo_path): try: @@ -29,4 +35,6 @@ def initialize_repo(repo_path): def get_contents(repo, commit, path): """Get contents of a path within a specific commit""" - return repo.git.show('{}:{}'.format(commit.hexsha, path)) + if type(commit) == Commit: + commit = commit.hexsha + return repo.git.show('{}:{}'.format(commit, path)) diff --git a/persper/analytics/graph_server.py b/persper/analytics/graph_server.py index b10b620b27c..41b45f708c1 100644 --- a/persper/analytics/graph_server.py +++ b/persper/analytics/graph_server.py @@ -1,5 +1,6 @@ from abc import ABC from abc import abstractmethod +from enum import Enum JS_FILENAME_REGEXES = [ r'.+\.js$', @@ -25,6 +26,12 @@ r'.+\.go$' ] +class CommitSeekingMode: + NormalForward = 0, + MergeCommit = 1, + Rewind = 2 + + class GraphServer(ABC): @abstractmethod @@ -48,7 +55,13 @@ def update_graph(self, old_filename: str, old_src: str, """ pass - def end_commit(self, hexsha): + def start_commit(self, hexsha: str, seeking_mode: CommitSeekingMode, author_name: str, + author_email: str, commit_message: str): + # default implementation for backwards compatibility + if seeking_mode == CommitSeekingMode.NormalForward: + self.register_commit(hexsha, author_name, author_email, commit_message) + + def end_commit(self, hexsha: str): pass @abstractmethod diff --git a/test/test_analytics/test_analyzer_lsp_ccls.py b/test/test_analytics/test_analyzer_lsp_ccls.py index d4831cd422c..c2b5c223c9b 100644 --- a/test/test_analytics/test_analyzer_lsp_ccls.py +++ b/test/test_analytics/test_analyzer_lsp_ccls.py @@ -12,7 +12,7 @@ from networkx import Graph from networkx.algorithms.isomorphism import is_isomorphic -from persper.analytics.analyzer import Analyzer +from persper.analytics.analyzer2 import Analyzer from persper.analytics.call_commit_graph import CallCommitGraph from persper.analytics.lsp_graph_server.ccls import CclsGraphServer from persper.util.path import root_path @@ -22,7 +22,7 @@ testDataRoot = os.path.dirname(os.path.abspath(__file__)) -async def createFeatureBranchAnalyzer(repoName: str): +def prepareRepo(repoName: str): # build the repo first if not exists yet repo_path = os.path.join(root_path, 'repos/' + repoName) script_path = os.path.join(root_path, 'tools/repo_creater/create_repo.py') @@ -30,7 +30,11 @@ async def createFeatureBranchAnalyzer(repoName: str): if not os.path.isdir(repo_path): cmd = '{} {}'.format(script_path, test_src_path) subprocess.call(cmd, shell=True) + print("Repository path: ", repo_path) + return repo_path + +def createCclsGraphServer(): # create workspace root folder CCLS_COMMAND = os.path.join(root_path, "bin/ccls") DUMP_LOGS = False @@ -40,30 +44,29 @@ async def createFeatureBranchAnalyzer(repoName: str): languageServerCommand=CCLS_COMMAND + (" -log-file=ccls.log" if DUMP_LOGS else ""), dumpLogs=DUMP_LOGS) - print(repo_path) - analyzer = Analyzer(repo_path, graphServer) graphServer.reset_graph() - return graphServer, analyzer + return graphServer @pytest.mark.asyncio async def testFeatureBranch(): - graphServer, analyzer = await createFeatureBranchAnalyzer("test_feature_branch") - graphServer: CclsGraphServer - analyzer: Analyzer + repoPath = prepareRepo("test_feature_branch") + graphServer = createCclsGraphServer() + analyzer = Analyzer(repoPath, graphServer, firstParentOnly=True) async with graphServer: analyzer.observer = GraphDumpAnalyzerObserver( - os.path.join(testDataRoot, "baseline/feature_branch"), - os.path.join(testDataRoot, "actualdump/feature_branch")) - await analyzer.analyze(from_beginning=True) + os.path.join(testDataRoot, "baseline/feature_branch_first_parent"), + os.path.join(testDataRoot, "actualdump/feature_branch_first_parent")) + await analyzer.analyze() + @pytest.mark.asyncio async def testCppTestRepo(): - graphServer, analyzer = await createFeatureBranchAnalyzer("cpp_test_repo") - graphServer: CclsGraphServer - analyzer: Analyzer + repoPath = prepareRepo("cpp_test_repo") + graphServer = createCclsGraphServer() + analyzer = Analyzer(repoPath, graphServer) async with graphServer: analyzer.observer = GraphDumpAnalyzerObserver( os.path.join(testDataRoot, "baseline/cpp_test_repo"), os.path.join(testDataRoot, "actualdump/cpp_test_repo")) - await analyzer.analyze(from_beginning=True) + await analyzer.analyze() diff --git a/test/test_analytics/utility/graph_baseline.py b/test/test_analytics/utility/graph_baseline.py index d45731b3625..8c06ae2d68d 100644 --- a/test/test_analytics/utility/graph_baseline.py +++ b/test/test_analytics/utility/graph_baseline.py @@ -4,14 +4,15 @@ import json import logging import os -from pathlib import Path from enum import Enum +from pathlib import Path from git import Commit from networkx import Graph -from persper.analytics.analyzer import Analyzer, AnalyzerObserver +from persper.analytics.analyzer2 import Analyzer, AnalyzerObserver from persper.analytics.call_commit_graph import CallCommitGraph +from persper.analytics.graph_server import CommitSeekingMode _logger = logging.getLogger() @@ -100,8 +101,10 @@ def __init__(self, graphBaselineDumpPath: str = None, graphTestDumpPath: str = N self._dumpPath = None self._dumpOnlyOnError = graphBaselineDumpPath != None if dumpOnlyOnError == None else dumpOnlyOnError - def onAfterCommit(self, analyzer: Analyzer, index: int, commit: Commit, isMaster: bool): - graph: CallCommitGraph = analyzer.get_graph() + def onAfterCommit(self, analyzer: Analyzer, commit: Commit, seeking_mode: CommitSeekingMode): + if seeking_mode == CommitSeekingMode.Rewind: + return + graph: CallCommitGraph = analyzer.graph def dumpGraph(warnIfNotAvailable: bool): if not self._dumpPath: From 9732c711b339393dfc33ced5a5c4a74345a60194 Mon Sep 17 00:00:00 2001 From: xinyan Date: Sat, 16 Feb 2019 22:19:41 +0800 Subject: [PATCH 32/84] Make CallCommitGraph & LSP graph server capable of processing merge commits & rewinds. Optionally use commit hexsha or comment as identifier in call commit graph. --- persper/analytics/analyzer2.py | 39 +++++- persper/analytics/call_commit_graph.py | 30 +++-- persper/analytics/graph_server.py | 10 +- .../analytics/lsp_graph_server/__init__.py | 115 ++++++++++-------- persper/analytics/lsp_graph_server/ccls.py | 18 +-- test/test_analytics/test_analyzer_lsp_ccls.py | 19 ++- test/test_analytics/utility/graph_baseline.py | 3 +- 7 files changed, 156 insertions(+), 78 deletions(-) diff --git a/persper/analytics/analyzer2.py b/persper/analytics/analyzer2.py index 5427fa665df..c6c4d2727d1 100644 --- a/persper/analytics/analyzer2.py +++ b/persper/analytics/analyzer2.py @@ -15,6 +15,7 @@ def __init__(self, repositoryRoot: str, graphServer: GraphServer, self._repositoryRoot = repositoryRoot self._graphServer = graphServer self._repo = Repo(repositoryRoot) + self._originCommit: Commit = None self._terminalCommit: Commit = self._repo.rev_parse(terminalCommit) self._firstParentOnly = firstParentOnly self._visitedCommits = set() @@ -31,17 +32,51 @@ def observer(self): def observer(self, value): self._observer = value or emptyAnalyzerObserver + @property + def originCommit(self): + """ + Gets/sets the first commit to visit. (exclusive) + Use None to start visiting from the first commit. + """ + return self._originCommit + + @originCommit.setter + def originCommit(self, value: Union[Commit, str]): + self._originCommit = self._repo.rev_parse(value) if value else None + @property def terminalCommit(self): + """ + Gets/sets the last commit to visit. (inclusive) + """ return self._terminalCommit + @terminalCommit.setter + def terminalCommit(self, value: Union[Commit, str]): + self._terminalCommit = self._repo.rev_parse(value) + + @property + def firstParentOnly(self): + """ + Whether to only visit each commit's first parent. + This is useful if you are only interested in the topical branch. + """ + return self._firstParentOnly + + @firstParentOnly.setter + def firstParentOnly(self, value: bool): + self._firstParentOnly = value + @property def graph(self): return self._graphServer.get_graph() async def analyze(self): graphServerLastCommit = EMPTY_TREE_SHA - for commit in self._repo.iter_commits(self._terminalCommit, + commitSpec = self._terminalCommit + if self._originCommit: + commitSpec = self._originCommit.hexsha + ".." + self._terminalCommit.hexsha + for commit in self._repo.iter_commits(commitSpec, topo_order=True, reverse=True, first_parent=self._firstParentOnly): def printCommitStatus(status: str): message = commit.message.strip()[:32] @@ -74,7 +109,7 @@ def printCommitStatus(status: str): graphServerLastCommit = commit.hexsha async def _analyzeCommit(self, commit: Union[Commit, str], parentCommit: Union[Commit, str], - seekingMode: CommitSeekingMode): + seekingMode: CommitSeekingMode): """ parentCommit can be None. """ diff --git a/persper/analytics/call_commit_graph.py b/persper/analytics/call_commit_graph.py index e2ad3d367d5..967ccb805f3 100644 --- a/persper/analytics/call_commit_graph.py +++ b/persper/analytics/call_commit_graph.py @@ -14,13 +14,29 @@ def normalize(devranks): return normalized_devranks +class CommitIdGenerators: + @staticmethod + def fromOrdinal(ordinal: int, hexsha: str, message: str): + return ordinal + + @staticmethod + def fromComment(ordinal: int, hexsha: str, message: str): + return message.strip() + + @staticmethod + def fromHexsha(ordinal: int, hexsha: str, message: str): + return hexsha + + class CallCommitGraph: - def __init__(self, node_link_data=None): + def __init__(self, node_link_data=None, commit_id_generator=CommitIdGenerators.fromOrdinal): if node_link_data: self._digraph = json_graph.node_link_graph(node_link_data) else: self._digraph = nx.DiGraph(commitList=[]) + self._commit_id_generator = commit_id_generator + self._current_commit_id = None # Read-only access def nodes(self, data=False): @@ -36,7 +52,9 @@ def commits(self): return self._digraph.graph['commitList'] def add_commit(self, hexsha, author_name, author_email, commit_message): + self._current_commit_id = self._commit_id_generator(self._cur_cindex(), hexsha, commit_message) self._digraph.graph['commitList'].append({ + 'id': self._current_commit_id, 'hexsha': hexsha, 'authorName': author_name, 'authorEmail': author_email, 'message': commit_message }) @@ -57,18 +75,16 @@ def add_node(self, node): # add_node must be called on source and target first def add_edge(self, source, target): self._digraph.add_edge(source, target, - addedBy=self._cur_cindex(), + addedBy=self._current_commit_id, weight=None) def update_node_history(self, node, size): - # Use current commit index - cc_idx = self._cur_cindex() node_history = self._get_node_history(node) # A commit might update a node's history more than once - if cc_idx in node_history: - node_history[cc_idx] += size + if self._current_commit_id in node_history: + node_history[self._current_commit_id] += size else: - node_history[cc_idx] = size + node_history[self._current_commit_id] = size # read/write access to node history are thourgh this function def _get_node_history(self, node): diff --git a/persper/analytics/graph_server.py b/persper/analytics/graph_server.py index 41b45f708c1..d685b49ad1d 100644 --- a/persper/analytics/graph_server.py +++ b/persper/analytics/graph_server.py @@ -26,20 +26,22 @@ r'.+\.go$' ] -class CommitSeekingMode: - NormalForward = 0, - MergeCommit = 1, + +class CommitSeekingMode(Enum): + NormalForward = 0 + MergeCommit = 1 Rewind = 2 class GraphServer(ABC): - @abstractmethod def register_commit(self, hexsha, author_name, author_email, commit_message): """ + Deprecated. Use start_commit instead. :return: a status code, success or failure """ + raise NotImplementedError() @abstractmethod def update_graph(self, old_filename: str, old_src: str, diff --git a/persper/analytics/lsp_graph_server/__init__.py b/persper/analytics/lsp_graph_server/__init__.py index 6d4745559d5..c94f8f7d002 100644 --- a/persper/analytics/lsp_graph_server/__init__.py +++ b/persper/analytics/lsp_graph_server/__init__.py @@ -9,7 +9,7 @@ from typing import Dict, List, Tuple, Union from persper.analytics.call_commit_graph import CallCommitGraph -from persper.analytics.graph_server import GraphServer +from persper.analytics.graph_server import GraphServer, CommitSeekingMode from persper.analytics.another_patch_parser import parseUnifiedDiff from .callgraph import CallGraphScope @@ -37,7 +37,8 @@ class LspClientGraphServer(GraphServer): def __init__(self, workspaceRoot: str, languageServerCommand: Union[str, List[str]] = None, dumpLogs: bool = False, - dumpGraphs: bool = False): + dumpGraphs: bool = False, + graph: CallCommitGraph = None): """ workspaceRoot: root of the temporary workspace path. LSP workspace and intermediate repository files will be placed in this folder. @@ -46,7 +47,7 @@ def __init__(self, workspaceRoot: str, language server process. If use `null` or default value, the value of current class's `defaultLanguageServerCommand` static field will be used. """ - self._ccgraph = CallCommitGraph() + self._ccgraph = graph or CallCommitGraph() self._callGraph = CallCommitGraphSynchronizer(self._ccgraph) self._workspaceRoot: Path = Path(workspaceRoot).resolve() self._invalidatedFiles = set() @@ -67,7 +68,8 @@ def __init__(self, workspaceRoot: str, self._dumpGraphs = dumpGraphs # [(oldPath, newPath, addedLines, removedLines), ...] # added/removedLines := [[startLine, modifiedLines], ...] - self._stashedPatches:List[Tuple[ PurePath, PurePath, List[Tuple[int, int]], List[Tuple[int, int]] ]] = [] + self._stashedPatches: List[Tuple[PurePath, PurePath, List[Tuple[int, int]], List[Tuple[int, int]]]] = [] + self._commitSeekingMode: CommitSeekingMode = None def __getstate__(self): state = self.__dict__.copy() @@ -82,33 +84,39 @@ def __setstate__(self, state): if not self._workspaceRoot.exists(): self._workspaceRoot.touch() - def register_commit(self, hexsha, author_name, author_email, commit_message): - self._ccgraph.add_commit(hexsha, author_name, author_email, commit_message) + def start_commit(self, hexsha: str, seeking_mode: CommitSeekingMode, author_name: str, + author_email: str, commit_message: str): + self._commitSeekingMode = seeking_mode + if seeking_mode != CommitSeekingMode.Rewind: + self._ccgraph.add_commit(hexsha, author_name, author_email, commit_message) async def update_graph(self, old_filename: str, old_src: str, new_filename: str, new_src: str, patch: bytes): oldPath = self._workspaceRoot.joinpath(old_filename).resolve() if old_filename else None newPath = self._workspaceRoot.joinpath(new_filename).resolve() if new_filename else None assert oldPath or newPath - if newPath is None: - # The file has been deleted - # We need to scan it before it's gone, instead of in end_commit - self._markWholeDocumentAsChanged(await self._callGraphBuilder.getTokenizedDocument(oldPath)) - elif oldPath is None: - # The file has been added - self._stashedPatches.append((oldPath, newPath, None, None)) - else: - added, removed = parseUnifiedDiff(patch.decode('utf-8', 'replace')) - # calculate removed lines - if removed: - # we can have removed lines only when we have old file - oldDoc:TokenizedDocument = await self._callGraphBuilder.getTokenizedDocument(oldPath) - # start, end are inclusive, 1-based - for start, end in removed: - for i in range(start - 1, end): - scope = oldDoc.scopeAt(i, 0) - if scope: - self._safeUpdateNodeHistory(scope.name, 1) - self._stashedPatches.append((oldPath, newPath, added, None)) + + # update node history + if self._commitSeekingMode == CommitSeekingMode.NormalForward: + if newPath is None: + # The file has been deleted + # We need to scan it before it's gone, instead of in end_commit + self._markWholeDocumentAsChanged(await self._callGraphBuilder.getTokenizedDocument(oldPath)) + elif oldPath is None: + # The file has been added + self._stashedPatches.append((oldPath, newPath, None, None)) + else: + added, removed = parseUnifiedDiff(patch.decode('utf-8', 'replace')) + # calculate removed lines + if removed: + # we can have removed lines only when we have old file + oldDoc: TokenizedDocument = await self._callGraphBuilder.getTokenizedDocument(oldPath) + # start, end are inclusive, 1-based + for start, end in removed: + for i in range(start - 1, end): + scope = oldDoc.scopeAt(i, 0) + if scope: + self._safeUpdateNodeHistory(scope.name, 1) + self._stashedPatches.append((oldPath, newPath, added, None)) # perform file operations if oldPath and oldPath != newPath: @@ -121,12 +129,12 @@ async def update_graph(self, old_filename: str, old_src: str, new_filename: str, self._invalidatedFiles.add(newPath) self._lastFileWrittenTime = datetime.now() - def _safeUpdateNodeHistory(self, name:str, changeOfLines:int): + def _safeUpdateNodeHistory(self, name: str, changeOfLines: int): if name not in self._ccgraph.nodes(): self._ccgraph.add_node(name) self._ccgraph.update_node_history(name, changeOfLines) - def _markWholeDocumentAsChanged(self, doc:TokenizedDocument): + def _markWholeDocumentAsChanged(self, doc: TokenizedDocument): parentScopes = [] # print("_markWholeDocumentAsChanged: ", doc.fileName) for scope in doc.scopes: @@ -157,32 +165,33 @@ def _markWholeDocumentAsChanged(self, doc:TokenizedDocument): self._safeUpdateNodeHistory(s.name, c) async def end_commit(self, hexsha): - # calculate lines of change in functions # update vetices & edges - await self.updateGraph() - if self._dumpGraphs: - self._callGraph.dumpTo("Graph-" + hexsha + ".txt") - + if self._commitSeekingMode != CommitSeekingMode.Rewind: + await self.updateGraph() + if self._dumpGraphs: + self._callGraph.dumpTo("Graph-" + hexsha + ".txt") + # calculate added lines - for oldPath, newPath, added, _ in self._stashedPatches: - if not newPath: - continue - if oldPath and not added: - continue - newDoc:TokenizedDocument = await self._callGraphBuilder.getTokenizedDocument(newPath) - if not oldPath: - # file has been added - self._markWholeDocumentAsChanged(newDoc) - else: - assert added - for start, end in added: - for i in range(start - 1, end): - scope = newDoc.scopeAt(i, 0) - if scope: - self._safeUpdateNodeHistory(scope.name, 1) + if self._commitSeekingMode == CommitSeekingMode.NormalForward: + for oldPath, newPath, added, _ in self._stashedPatches: + if not newPath: + continue + if oldPath and not added: + continue + newDoc: TokenizedDocument = await self._callGraphBuilder.getTokenizedDocument(newPath) + if not oldPath: + # file has been added + self._markWholeDocumentAsChanged(newDoc) + else: + assert added + for start, end in added: + for i in range(start - 1, end): + scope = newDoc.scopeAt(i, 0) + if scope: + self._safeUpdateNodeHistory(scope.name, 1) self._stashedPatches.clear() - _logger.info("End commit: %s", hexsha) - # ensure the files in the next commit has a different timestamp as this commit. + + # ensure the files in the next commit has a different timestamp from this commit. if datetime.now() - self._lastFileWrittenTime < timedelta(seconds=1): await asyncio.sleep(1) @@ -268,11 +277,11 @@ async def updateGraph(self): # update vertices # Use scope full name as identifier. for path in affectedFiles: - path:Path + path: Path if not path.exists(): continue for scope in await self._callGraphBuilder.enumScopesInFile(str(path)): - scope:CallGraphScope + scope: CallGraphScope if scope.name not in self._ccgraph.nodes().data(): self._ccgraph.add_node(scope.name) # update edges diff --git a/persper/analytics/lsp_graph_server/ccls.py b/persper/analytics/lsp_graph_server/ccls.py index 29a41e8f315..5974c4d7443 100644 --- a/persper/analytics/lsp_graph_server/ccls.py +++ b/persper/analytics/lsp_graph_server/ccls.py @@ -12,6 +12,8 @@ from jsonrpc.endpoint import Endpoint from jsonrpc.exceptions import JsonRpcException +from persper.analytics.call_commit_graph import CallCommitGraph + from . import LspClientGraphServer from .callgraph.builder import CallGraphBuilder from .callgraph.manager import CallGraphManager @@ -24,7 +26,7 @@ class CclsInfo(LspContractObject): - def __init__(self, pendingIndexRequests: int, postIndexWorkItems:int, projectEntries: int): + def __init__(self, pendingIndexRequests: int, postIndexWorkItems: int, projectEntries: int): self.pendingIndexRequests = pendingIndexRequests self.postIndexWorkItems = postIndexWorkItems self.projectEntries = projectEntries @@ -35,8 +37,8 @@ def toDict(self): @staticmethod def fromDict(d: dict): return CclsInfo(int(d["pipeline"]["pendingIndexRequests"]), - 0, - int(d["project"]["entries"])) + 0, + int(d["project"]["entries"])) class CclsLspServerStub(LspServerStub): @@ -103,7 +105,7 @@ def __init__(self, lspClient: CclsLspClient): if not isinstance(lspClient, CclsLspClient): raise TypeError("lspClient should be an instance of CclsLspClient.") super().__init__(lspClient) - self._lspClient:CclsLspClient + self._lspClient: CclsLspClient def createLexer(self, fileStream: FileStream): return CPP14Lexer(fileStream) @@ -120,7 +122,7 @@ def modifyFile(self, fileName: str, newContent: str): async def _waitForJobs(self): lastJobs = None while True: - info:CclsInfo = await self._lspClient.server.cclsInfo() + info: CclsInfo = await self._lspClient.server.cclsInfo() curJobs = info.pendingIndexRequests + info.postIndexWorkItems if curJobs != lastJobs: _logger.debug("Server jobs: %d.", curJobs) @@ -158,8 +160,10 @@ class CclsGraphServer(LspClientGraphServer): def __init__(self, workspaceRoot: str, cacheRoot: str = None, languageServerCommand: Union[str, List[str]] = None, dumpLogs: bool = False, - dumpGraphs: bool = False): - super().__init__(workspaceRoot, languageServerCommand=languageServerCommand, dumpLogs=dumpLogs, dumpGraphs=dumpGraphs) + dumpGraphs: bool = False, + graph: CallCommitGraph = None): + super().__init__(workspaceRoot, languageServerCommand=languageServerCommand, + dumpLogs=dumpLogs, dumpGraphs=dumpGraphs, graph=graph) self._cacheRoot = Path(cacheRoot).resolve() if cacheRoot else self._workspaceRoot.joinpath(".ccls-cache") self._c_requireScopeDefinitionMatch = True diff --git a/test/test_analytics/test_analyzer_lsp_ccls.py b/test/test_analytics/test_analyzer_lsp_ccls.py index c2b5c223c9b..f9e7fb9c892 100644 --- a/test/test_analytics/test_analyzer_lsp_ccls.py +++ b/test/test_analytics/test_analyzer_lsp_ccls.py @@ -13,7 +13,7 @@ from networkx.algorithms.isomorphism import is_isomorphic from persper.analytics.analyzer2 import Analyzer -from persper.analytics.call_commit_graph import CallCommitGraph +from persper.analytics.call_commit_graph import CallCommitGraph, CommitIdGenerators from persper.analytics.lsp_graph_server.ccls import CclsGraphServer from persper.util.path import root_path @@ -43,13 +43,14 @@ def createCclsGraphServer(): graphServer = CclsGraphServer(workspaceRoot, cacheRoot="./.ccls-cache", languageServerCommand=CCLS_COMMAND + (" -log-file=ccls.log" if DUMP_LOGS else ""), - dumpLogs=DUMP_LOGS) + dumpLogs=DUMP_LOGS, + graph=CallCommitGraph(commit_id_generator=CommitIdGenerators.fromComment)) graphServer.reset_graph() return graphServer @pytest.mark.asyncio -async def testFeatureBranch(): +async def testFeatureBranchFirstParent(): repoPath = prepareRepo("test_feature_branch") graphServer = createCclsGraphServer() analyzer = Analyzer(repoPath, graphServer, firstParentOnly=True) @@ -60,6 +61,18 @@ async def testFeatureBranch(): await analyzer.analyze() +@pytest.mark.asyncio +async def testFeatureBranch(): + repoPath = prepareRepo("test_feature_branch") + graphServer = createCclsGraphServer() + analyzer = Analyzer(repoPath, graphServer, firstParentOnly=False) + async with graphServer: + analyzer.observer = GraphDumpAnalyzerObserver( + None, + os.path.join(testDataRoot, "actualdump/feature_branch")) + await analyzer.analyze() + + @pytest.mark.asyncio async def testCppTestRepo(): repoPath = prepareRepo("cpp_test_repo") diff --git a/test/test_analytics/utility/graph_baseline.py b/test/test_analytics/utility/graph_baseline.py index 8c06ae2d68d..4cb5b7b8191 100644 --- a/test/test_analytics/utility/graph_baseline.py +++ b/test/test_analytics/utility/graph_baseline.py @@ -35,8 +35,7 @@ def fixGraphDict(graphData: dict): if "nodes" in graphData: for id, attr in graphData["nodes"].items(): if "history" in attr: - attr["history"] = dict((int(k), v) - for k, v in attr["history"].items()) + attr["history"] = dict(attr["history"].items()) return graphData From fe96a335e3e8eaccfd80ea641be625d0c26a1d4f Mon Sep 17 00:00:00 2001 From: xinyan Date: Sat, 16 Feb 2019 22:29:26 +0800 Subject: [PATCH 33/84] Check in baseline for test cases. --- .../cpp_test_repo/A.g.json | 112 +++++++-------- .../cpp_test_repo/B.g.json | 116 +++++++-------- .../cpp_test_repo/C.g.json | 124 ++++++++-------- .../cpp_test_repo/D.g.json | 132 +++++++++--------- .../baseline/feature_branch/A.g.json | 4 +- .../baseline/feature_branch/B.g.json | 10 +- .../baseline/feature_branch/C.g.json | 20 +-- .../baseline/feature_branch/D.g.json | 31 ++-- .../baseline/feature_branch/E.g.json | 56 +++++--- .../baseline/feature_branch/F.g.json | 66 ++++++--- .../baseline/feature_branch/G.g.json | 66 +++++++++ .../baseline/feature_branch/H.g.json | 96 +++++++++++++ .../baseline/feature_branch/I.g.json | 99 +++++++++++++ .../baseline/feature_branch/J.g.json | 111 +++++++++++++++ .../baseline/feature_branch/K.g.json | 68 ++++++--- .../feature_branch_first_parent/A.g.json | 17 +++ .../feature_branch_first_parent/B.g.json | 30 ++++ .../feature_branch_first_parent/C.g.json | 47 +++++++ .../feature_branch_first_parent/D.g.json | 65 +++++++++ .../feature_branch_first_parent/E.g.json | 77 ++++++++++ .../feature_branch_first_parent/F.g.json | 89 ++++++++++++ .../feature_branch_first_parent/K.g.json | 90 ++++++++++++ 22 files changed, 1198 insertions(+), 328 deletions(-) rename test/test_analytics/{actualdump => baseline}/cpp_test_repo/A.g.json (84%) rename test/test_analytics/{actualdump => baseline}/cpp_test_repo/B.g.json (84%) rename test/test_analytics/{actualdump => baseline}/cpp_test_repo/C.g.json (83%) rename test/test_analytics/{actualdump => baseline}/cpp_test_repo/D.g.json (82%) create mode 100644 test/test_analytics/baseline/feature_branch/G.g.json create mode 100644 test/test_analytics/baseline/feature_branch/H.g.json create mode 100644 test/test_analytics/baseline/feature_branch/I.g.json create mode 100644 test/test_analytics/baseline/feature_branch/J.g.json create mode 100644 test/test_analytics/baseline/feature_branch_first_parent/A.g.json create mode 100644 test/test_analytics/baseline/feature_branch_first_parent/B.g.json create mode 100644 test/test_analytics/baseline/feature_branch_first_parent/C.g.json create mode 100644 test/test_analytics/baseline/feature_branch_first_parent/D.g.json create mode 100644 test/test_analytics/baseline/feature_branch_first_parent/E.g.json create mode 100644 test/test_analytics/baseline/feature_branch_first_parent/F.g.json create mode 100644 test/test_analytics/baseline/feature_branch_first_parent/K.g.json diff --git a/test/test_analytics/actualdump/cpp_test_repo/A.g.json b/test/test_analytics/baseline/cpp_test_repo/A.g.json similarity index 84% rename from test/test_analytics/actualdump/cpp_test_repo/A.g.json rename to test/test_analytics/baseline/cpp_test_repo/A.g.json index a44e165a511..2bd1b9aacb1 100644 --- a/test/test_analytics/actualdump/cpp_test_repo/A.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/A.g.json @@ -1,312 +1,312 @@ { "edges": { "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|enum class StringComparison : int {}": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "bool Equal(const int &lhs, const int &rhs, int comparision)|->|enum class StringComparison : int {}": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)|->|const char *FriendlyNameOf()": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "const char *FriendlyNameOf(const std::type_index &type)|->|const char *FriendlyNameOf()": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "const char *FriendlyNameOf(const type_info &type)|->|const char *FriendlyNameOf()": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "int &operator>>(int &reader, bool &rhs)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "int &operator>>(int &reader, bool &rhs)|->|enum class StringComparison : int {}": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "int &operator>>(int &reader, bool &rhs)|->|int &operator>>(int &reader, int &rhs)": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "int &operator>>(int &reader, double &rhs)|->|int &operator>>(int &reader, int &rhs)": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "int &operator>>(int &reader, float &rhs)|->|int &operator>>(int &reader, int &rhs)": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "int &operator>>(int &reader, long &rhs)|->|int &operator>>(int &reader, int &rhs)": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "int main(int argc, char *argv[])|->|TStream OpenAndValidate(const TPath arg1)": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "void ReportException(const int &ex, int level)|->|void ReportException(const int &ex, int level)": { - "addedBy": 0, + "addedBy": "A", "weight": null } }, "nodes": { "ANSI_COLOR_BLUE": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_BRIGHT": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_CYAN": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_GREEN": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_MAGENTA": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_RED": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_RESET": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_YELLOW": { "history": { - "0": 1 + "A": 1 }, "size": null }, "RowReader &operator>>(RowReader &reader, bool &rhs)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "RowReader &operator>>(RowReader &reader, double &rhs)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "RowReader &operator>>(RowReader &reader, float &rhs)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "RowReader &operator>>(RowReader &reader, int &rhs)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "RowReader &operator>>(RowReader &reader, long &rhs)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "TDest safe_cast(TSrc obj)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "TStream &operator>>(TStream &s, RowReader &reader)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "TStream OpenAndValidate(const TPath arg1)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "_DECLARE_ENUM": { "history": { - "0": 1 + "A": 1 }, "size": null }, "_DECLARE_ENUM_DEFAULT": { "history": { - "0": 1 + "A": 1 }, "size": null }, "_RE_TRACE": { "history": { - "0": 1 + "A": 1 }, "size": null }, "_RangeToEnumerable RangeToEnumerable(const int range)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "_SILENCE_STDEXT_ALLOCATORS_DEPRECATION_WARNING": { "history": { - "0": 1 + "A": 1 }, "size": null }, "bool Confirm(const int &prompt)": { "history": { - "0": 17 + "A": 17 }, "size": null }, "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "bool Equal(const int &lhs, const int &rhs, int comparision)": { "history": { - "0": 29 + "A": 29 }, "size": null }, "bool dynamic_kind_of(const TSrc *obj)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "bool pointer_kind_of(const int obj)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "const char *FriendlyNameOf()": { "history": { - "0": 1 + "A": 1 }, "size": null }, "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "const char *FriendlyNameOf(const std::type_index &type)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "const char *FriendlyNameOf(const type_info &type)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "enum class StringComparison : int {}": { "history": { - "0": 6 + "A": 6 }, "size": null }, "inline int to_string(const int &value)": { "history": { - "0": 4 + "A": 4 }, "size": null }, "int &operator>>(int &reader, bool &rhs)": { "history": { - "0": 24 + "A": 24 }, "size": null }, "int &operator>>(int &reader, double &rhs)": { "history": { - "0": 6 + "A": 6 }, "size": null }, "int &operator>>(int &reader, float &rhs)": { "history": { - "0": 6 + "A": 6 }, "size": null }, "int &operator>>(int &reader, int &rhs)": { "history": { - "0": 6 + "A": 6 }, "size": null }, "int &operator>>(int &reader, long &rhs)": { "history": { - "0": 6 + "A": 6 }, "size": null }, "int StreamStatusToString(const TStream &stream)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "int main(int argc, char *argv[])": { "history": { - "0": 7 + "A": 7 }, "size": null }, "int safe_pointer_cast(const int &obj)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "void ReportException(const int &ex, int level)": { "history": { - "0": 21 + "A": 21 }, "size": null }, "void ValidateStream(const TStream &stream)": { "history": { - "0": 1 + "A": 1 }, "size": null } diff --git a/test/test_analytics/actualdump/cpp_test_repo/B.g.json b/test/test_analytics/baseline/cpp_test_repo/B.g.json similarity index 84% rename from test/test_analytics/actualdump/cpp_test_repo/B.g.json rename to test/test_analytics/baseline/cpp_test_repo/B.g.json index 7e0f202ba82..2d4d789f1b3 100644 --- a/test/test_analytics/actualdump/cpp_test_repo/B.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/B.g.json @@ -1,317 +1,317 @@ { "edges": { "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|enum class StringComparison : int {}": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "bool Equal(const int &lhs, const int &rhs, int comparision)|->|enum class StringComparison : int {}": { - "addedBy": 1, + "addedBy": "B", "weight": null }, "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)|->|const char *FriendlyNameOf()": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "const char *FriendlyNameOf(const std::type_index &type)|->|const char *FriendlyNameOf()": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "const char *FriendlyNameOf(const type_info &type)|->|const char *FriendlyNameOf()": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "int &operator>>(int &reader, bool &rhs)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "int &operator>>(int &reader, bool &rhs)|->|enum class StringComparison : int {}": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "int &operator>>(int &reader, bool &rhs)|->|int &operator>>(int &reader, int &rhs)": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "int &operator>>(int &reader, double &rhs)|->|int &operator>>(int &reader, int &rhs)": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "int &operator>>(int &reader, float &rhs)|->|int &operator>>(int &reader, int &rhs)": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "int &operator>>(int &reader, long &rhs)|->|int &operator>>(int &reader, int &rhs)": { - "addedBy": 0, + "addedBy": "A", "weight": null }, "int main(int argc, char *argv[])|->|TStream OpenAndValidate(const TPath arg1)": { - "addedBy": 1, + "addedBy": "B", "weight": null }, "int main(int argc, char *argv[])|->|void ReportException(const int &ex, int level)": { - "addedBy": 1, + "addedBy": "B", "weight": null }, "void ReportException(const int &ex, int level)|->|void ReportException(const int &ex, int level)": { - "addedBy": 1, + "addedBy": "B", "weight": null } }, "nodes": { "ANSI_COLOR_BLUE": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_BRIGHT": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_CYAN": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_GREEN": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_MAGENTA": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_RED": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_RESET": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_YELLOW": { "history": { - "0": 1 + "A": 1 }, "size": null }, "RowReader &operator>>(RowReader &reader, bool &rhs)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "RowReader &operator>>(RowReader &reader, double &rhs)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "RowReader &operator>>(RowReader &reader, float &rhs)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "RowReader &operator>>(RowReader &reader, int &rhs)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "RowReader &operator>>(RowReader &reader, long &rhs)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "TDest safe_cast(TSrc obj)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "TStream &operator>>(TStream &s, RowReader &reader)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "TStream OpenAndValidate(const TPath arg1)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "_DECLARE_ENUM": { "history": { - "0": 1 + "A": 1 }, "size": null }, "_DECLARE_ENUM_DEFAULT": { "history": { - "0": 1 + "A": 1 }, "size": null }, "_RE_TRACE": { "history": { - "0": 1 + "A": 1 }, "size": null }, "_RangeToEnumerable RangeToEnumerable(const int range)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "_SILENCE_STDEXT_ALLOCATORS_DEPRECATION_WARNING": { "history": { - "0": 1 + "A": 1 }, "size": null }, "bool Confirm(const int &prompt)": { "history": { - "0": 17 + "A": 17 }, "size": null }, "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "bool Equal(const int &lhs, const int &rhs, int comparision)": { "history": { - "0": 29 + "A": 29 }, "size": null }, "bool dynamic_kind_of(const TSrc *obj)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "bool pointer_kind_of(const int obj)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "const char *FriendlyNameOf()": { "history": { - "0": 1 + "A": 1 }, "size": null }, "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "const char *FriendlyNameOf(const std::type_index &type)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "const char *FriendlyNameOf(const type_info &type)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "enum class StringComparison : int {}": { "history": { - "0": 6 + "A": 6 }, "size": null }, "inline int to_string(const int &value)": { "history": { - "0": 4 + "A": 4 }, "size": null }, "int &operator>>(int &reader, bool &rhs)": { "history": { - "0": 24 + "A": 24 }, "size": null }, "int &operator>>(int &reader, double &rhs)": { "history": { - "0": 6 + "A": 6 }, "size": null }, "int &operator>>(int &reader, float &rhs)": { "history": { - "0": 6 + "A": 6 }, "size": null }, "int &operator>>(int &reader, int &rhs)": { "history": { - "0": 6 + "A": 6 }, "size": null }, "int &operator>>(int &reader, long &rhs)": { "history": { - "0": 6 + "A": 6 }, "size": null }, "int StreamStatusToString(const TStream &stream)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "int main(int argc, char *argv[])": { "history": { - "0": 7, - "1": 4 + "A": 7, + "B": 4 }, "size": null }, "int safe_pointer_cast(const int &obj)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "void ReportException(const int &ex, int level)": { "history": { - "0": 21 + "A": 21 }, "size": null }, "void ValidateStream(const TStream &stream)": { "history": { - "0": 1 + "A": 1 }, "size": null } diff --git a/test/test_analytics/actualdump/cpp_test_repo/C.g.json b/test/test_analytics/baseline/cpp_test_repo/C.g.json similarity index 83% rename from test/test_analytics/actualdump/cpp_test_repo/C.g.json rename to test/test_analytics/baseline/cpp_test_repo/C.g.json index 73cb945c458..4d25d6e1f11 100644 --- a/test/test_analytics/actualdump/cpp_test_repo/C.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/C.g.json @@ -1,321 +1,321 @@ { "edges": { "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { - "addedBy": 2, + "addedBy": "C", "weight": null }, "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|enum class StringComparison : int {}": { - "addedBy": 2, + "addedBy": "C", "weight": null }, "bool Equal(const int &lhs, const int &rhs, int comparision)|->|enum class StringComparison : int {}": { - "addedBy": 2, + "addedBy": "C", "weight": null }, "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)|->|const char *FriendlyNameOf()": { - "addedBy": 2, + "addedBy": "C", "weight": null }, "const char *FriendlyNameOf(const std::type_index &type)|->|const char *FriendlyNameOf()": { - "addedBy": 2, + "addedBy": "C", "weight": null }, "const char *FriendlyNameOf(const type_info &type)|->|const char *FriendlyNameOf()": { - "addedBy": 2, + "addedBy": "C", "weight": null }, "int &operator>>(int &reader, bool &rhs)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { - "addedBy": 2, + "addedBy": "C", "weight": null }, "int &operator>>(int &reader, bool &rhs)|->|enum class StringComparison : int {}": { - "addedBy": 2, + "addedBy": "C", "weight": null }, "int &operator>>(int &reader, bool &rhs)|->|int &operator>>(int &reader, int &rhs)": { - "addedBy": 2, + "addedBy": "C", "weight": null }, "int &operator>>(int &reader, double &rhs)|->|int &operator>>(int &reader, int &rhs)": { - "addedBy": 2, + "addedBy": "C", "weight": null }, "int &operator>>(int &reader, float &rhs)|->|int &operator>>(int &reader, int &rhs)": { - "addedBy": 2, + "addedBy": "C", "weight": null }, "int &operator>>(int &reader, long &rhs)|->|int &operator>>(int &reader, int &rhs)": { - "addedBy": 2, + "addedBy": "C", "weight": null }, "int main(int argc, char *argv[])|->|TStream OpenAndValidate(const TPath arg1)": { - "addedBy": 2, + "addedBy": "C", "weight": null }, "int main(int argc, char *argv[])|->|void ReportException(const int &ex, int level)": { - "addedBy": 1, + "addedBy": "B", "weight": null }, "void ReportException(const int &ex, int level)|->|void ReportException(const int &ex, int level)": { - "addedBy": 2, + "addedBy": "C", "weight": null } }, "nodes": { "ANSI_COLOR_BLUE": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_BRIGHT": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_CYAN": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_GREEN": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_MAGENTA": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_RED": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_RESET": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_YELLOW": { "history": { - "0": 1 + "A": 1 }, "size": null }, "RowReader &operator>>(RowReader &reader, bool &rhs)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "RowReader &operator>>(RowReader &reader, double &rhs)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "RowReader &operator>>(RowReader &reader, float &rhs)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "RowReader &operator>>(RowReader &reader, int &rhs)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "RowReader &operator>>(RowReader &reader, long &rhs)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "TDest safe_cast(TSrc obj)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "TStream &operator>>(TStream &s, RowReader &reader)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "TStream OpenAndValidate(const TPath arg1)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "_DECLARE_ENUM": { "history": { - "0": 1 + "A": 1 }, "size": null }, "_DECLARE_ENUM_DEFAULT": { "history": { - "0": 1 + "A": 1 }, "size": null }, "_RE_TRACE": { "history": { - "0": 1 + "A": 1 }, "size": null }, "_RangeToEnumerable RangeToEnumerable(const int range)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "_SILENCE_STDEXT_ALLOCATORS_DEPRECATION_WARNING": { "history": { - "0": 1 + "A": 1 }, "size": null }, "bool Confirm(const int &prompt)": { "history": { - "0": 17 + "A": 17 }, "size": null }, "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "bool Equal(const int &lhs, const int &rhs, int comparision)": { "history": { - "0": 29, - "2": 2 + "A": 29, + "C": 2 }, "size": null }, "bool dynamic_kind_of(const TSrc *obj)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "bool pointer_kind_of(const int obj)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "const char *FriendlyNameOf()": { "history": { - "0": 1 + "A": 1 }, "size": null }, "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "const char *FriendlyNameOf(const std::type_index &type)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "const char *FriendlyNameOf(const type_info &type)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "enum class StringComparison : int {}": { "history": { - "0": 6, - "2": 2 + "A": 6, + "C": 2 }, "size": null }, "inline int to_string(const int &value)": { "history": { - "0": 4 + "A": 4 }, "size": null }, "int &operator>>(int &reader, bool &rhs)": { "history": { - "0": 24, - "2": 4 + "A": 24, + "C": 4 }, "size": null }, "int &operator>>(int &reader, double &rhs)": { "history": { - "0": 6 + "A": 6 }, "size": null }, "int &operator>>(int &reader, float &rhs)": { "history": { - "0": 6 + "A": 6 }, "size": null }, "int &operator>>(int &reader, int &rhs)": { "history": { - "0": 6 + "A": 6 }, "size": null }, "int &operator>>(int &reader, long &rhs)": { "history": { - "0": 6 + "A": 6 }, "size": null }, "int StreamStatusToString(const TStream &stream)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "int main(int argc, char *argv[])": { "history": { - "0": 7, - "1": 4, - "2": 4 + "A": 7, + "B": 4, + "C": 4 }, "size": null }, "int safe_pointer_cast(const int &obj)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "void ReportException(const int &ex, int level)": { "history": { - "0": 21 + "A": 21 }, "size": null }, "void ValidateStream(const TStream &stream)": { "history": { - "0": 1 + "A": 1 }, "size": null } diff --git a/test/test_analytics/actualdump/cpp_test_repo/D.g.json b/test/test_analytics/baseline/cpp_test_repo/D.g.json similarity index 82% rename from test/test_analytics/actualdump/cpp_test_repo/D.g.json rename to test/test_analytics/baseline/cpp_test_repo/D.g.json index 32fc9162859..74f9d08d417 100644 --- a/test/test_analytics/actualdump/cpp_test_repo/D.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/D.g.json @@ -1,325 +1,325 @@ { "edges": { "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { - "addedBy": 3, + "addedBy": "D", "weight": null }, "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|enum class StringComparison : int {}": { - "addedBy": 3, + "addedBy": "D", "weight": null }, "bool Equal(const int &lhs, const int &rhs, int comparision)|->|enum class StringComparison : int {}": { - "addedBy": 3, + "addedBy": "D", "weight": null }, "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)|->|const char *FriendlyNameOf()": { - "addedBy": 3, + "addedBy": "D", "weight": null }, "const char *FriendlyNameOf(const std::type_index &type)|->|const char *FriendlyNameOf()": { - "addedBy": 3, + "addedBy": "D", "weight": null }, "const char *FriendlyNameOf(const type_info &type)|->|const char *FriendlyNameOf()": { - "addedBy": 3, + "addedBy": "D", "weight": null }, "int &operator>>(int &reader, bool &rhs)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { - "addedBy": 3, + "addedBy": "D", "weight": null }, "int &operator>>(int &reader, bool &rhs)|->|enum class StringComparison : int {}": { - "addedBy": 3, + "addedBy": "D", "weight": null }, "int &operator>>(int &reader, bool &rhs)|->|int &operator>>(int &reader, int &rhs)": { - "addedBy": 3, + "addedBy": "D", "weight": null }, "int &operator>>(int &reader, double &rhs)|->|int &operator>>(int &reader, int &rhs)": { - "addedBy": 3, + "addedBy": "D", "weight": null }, "int &operator>>(int &reader, float &rhs)|->|int &operator>>(int &reader, int &rhs)": { - "addedBy": 3, + "addedBy": "D", "weight": null }, "int &operator>>(int &reader, long &rhs)|->|int &operator>>(int &reader, int &rhs)": { - "addedBy": 3, + "addedBy": "D", "weight": null }, "int main(int argc, char *argv[])|->|TStream OpenAndValidate(const TPath arg1)": { - "addedBy": 2, + "addedBy": "C", "weight": null }, "int main(int argc, char *argv[])|->|void ReportException(const int &ex, int level)": { - "addedBy": 1, + "addedBy": "B", "weight": null }, "void ReportException(const int &ex, int level)|->|void ReportException(const int &ex, int level)": { - "addedBy": 3, + "addedBy": "D", "weight": null } }, "nodes": { "ANSI_COLOR_BLUE": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_BRIGHT": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_CYAN": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_GREEN": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_MAGENTA": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_RED": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_RESET": { "history": { - "0": 1 + "A": 1 }, "size": null }, "ANSI_COLOR_YELLOW": { "history": { - "0": 1 + "A": 1 }, "size": null }, "RowReader &operator>>(RowReader &reader, bool &rhs)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "RowReader &operator>>(RowReader &reader, double &rhs)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "RowReader &operator>>(RowReader &reader, float &rhs)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "RowReader &operator>>(RowReader &reader, int &rhs)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "RowReader &operator>>(RowReader &reader, long &rhs)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "TDest safe_cast(TSrc obj)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "TStream &operator>>(TStream &s, RowReader &reader)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "TStream OpenAndValidate(const TPath arg1)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "_DECLARE_ENUM": { "history": { - "0": 1 + "A": 1 }, "size": null }, "_DECLARE_ENUM_DEFAULT": { "history": { - "0": 1 + "A": 1 }, "size": null }, "_RE_TRACE": { "history": { - "0": 1 + "A": 1 }, "size": null }, "_RangeToEnumerable RangeToEnumerable(const int range)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "_SILENCE_STDEXT_ALLOCATORS_DEPRECATION_WARNING": { "history": { - "0": 1 + "A": 1 }, "size": null }, "bool Confirm(const int &prompt)": { "history": { - "0": 17 + "A": 17 }, "size": null }, "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "bool Equal(const int &lhs, const int &rhs, int comparision)": { "history": { - "0": 29, - "2": 2, - "3": 2 + "A": 29, + "C": 2, + "D": 2 }, "size": null }, "bool dynamic_kind_of(const TSrc *obj)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "bool pointer_kind_of(const int obj)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "const char *FriendlyNameOf()": { "history": { - "0": 1 + "A": 1 }, "size": null }, "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "const char *FriendlyNameOf(const std::type_index &type)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "const char *FriendlyNameOf(const type_info &type)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "enum class StringComparison : int {}": { "history": { - "0": 6, - "2": 2, - "3": 2 + "A": 6, + "C": 2, + "D": 2 }, "size": null }, "inline int to_string(const int &value)": { "history": { - "0": 4 + "A": 4 }, "size": null }, "int &operator>>(int &reader, bool &rhs)": { "history": { - "0": 24, - "2": 4, - "3": 4 + "A": 24, + "C": 4, + "D": 4 }, "size": null }, "int &operator>>(int &reader, double &rhs)": { "history": { - "0": 6 + "A": 6 }, "size": null }, "int &operator>>(int &reader, float &rhs)": { "history": { - "0": 6 + "A": 6 }, "size": null }, "int &operator>>(int &reader, int &rhs)": { "history": { - "0": 6 + "A": 6 }, "size": null }, "int &operator>>(int &reader, long &rhs)": { "history": { - "0": 6 + "A": 6 }, "size": null }, "int StreamStatusToString(const TStream &stream)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "int main(int argc, char *argv[])": { "history": { - "0": 7, - "1": 4, - "2": 4, - "3": 7 + "A": 7, + "B": 4, + "C": 4, + "D": 7 }, "size": null }, "int safe_pointer_cast(const int &obj)": { "history": { - "0": 1 + "A": 1 }, "size": null }, "void ReportException(const int &ex, int level)": { "history": { - "0": 21 + "A": 21 }, "size": null }, "void ValidateStream(const TStream &stream)": { "history": { - "0": 1 + "A": 1 }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch/A.g.json b/test/test_analytics/baseline/feature_branch/A.g.json index 237d19032ed..7833183bead 100644 --- a/test/test_analytics/baseline/feature_branch/A.g.json +++ b/test/test_analytics/baseline/feature_branch/A.g.json @@ -3,13 +3,13 @@ "nodes": { "char *str_append(char *string, char *append)": { "history": { - "0": 7 + "A": 7 }, "size": null }, "int str_len(char *string)": { "history": { - "0": 6 + "A": 6 }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch/B.g.json b/test/test_analytics/baseline/feature_branch/B.g.json index 3d23e7c2929..d6849e2667e 100644 --- a/test/test_analytics/baseline/feature_branch/B.g.json +++ b/test/test_analytics/baseline/feature_branch/B.g.json @@ -3,26 +3,26 @@ "nodes": { "char *str_append(char *string, char *append)": { "history": { - "0": 7, - "1": 3 + "A": 7, + "B": 3 }, "size": null }, "char *str_append_chr(char *string, char append)": { "history": { - "1": 3 + "B": 3 }, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { "history": { - "1": 11 + "B": 11 }, "size": null }, "int str_len(char *string)": { "history": { - "0": 6 + "A": 6 }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch/C.g.json b/test/test_analytics/baseline/feature_branch/C.g.json index 919e12d7ef3..73c976c5550 100644 --- a/test/test_analytics/baseline/feature_branch/C.g.json +++ b/test/test_analytics/baseline/feature_branch/C.g.json @@ -1,45 +1,45 @@ { "edges": { "char *str_append_chr(char *string, char append)|->|int str_equals()": { - "addedBy": 2, + "addedBy": "C", "weight": null }, "char *str_append_chr(char *string, char append)|->|int str_len(char *string)": { - "addedBy": 2, + "addedBy": "C", "weight": null } }, "nodes": { "char *str_append(char *string, char *append)": { "history": { - "0": 7, - "1": 3 + "A": 7, + "B": 3 }, "size": null }, "char *str_append_chr(char *string, char append)": { "history": { - "1": 3, - "2": 34 + "B": 3, + "C": 34 }, "size": null }, "int str_equals()": { "history": { - "2": 1 + "C": 1 }, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { "history": { - "1": 11, - "2": 1 + "B": 11, + "C": 1 }, "size": null }, "int str_len(char *string)": { "history": { - "0": 6 + "A": 6 }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch/D.g.json b/test/test_analytics/baseline/feature_branch/D.g.json index 036411b985d..dc709adda76 100644 --- a/test/test_analytics/baseline/feature_branch/D.g.json +++ b/test/test_analytics/baseline/feature_branch/D.g.json @@ -1,63 +1,64 @@ { "edges": { "char *str_append_chr(char *string, char append)|->|int str_equals()": { - "addedBy": 2, + "addedBy": "D", "weight": null }, "char *str_append_chr(char *string, char append)|->|int str_len(char *string)": { - "addedBy": 2, + "addedBy": "D", "weight": null }, "char *str_replace(char *search, char *replace, char *subject)|->|char *str_append_chr(char *string, char append)": { - "addedBy": 3, + "addedBy": "G", "weight": null }, - "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals()": { - "addedBy": 3, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals(char *equal1, char *eqaul2)": { + "addedBy": "G", "weight": null }, "char *str_replace(char *search, char *replace, char *subject)|->|int str_len(char *string)": { - "addedBy": 3, + "addedBy": "G", "weight": null } }, "nodes": { "char *str_append(char *string, char *append)": { "history": { - "0": 7, - "1": 3 + "A": 7, + "B": 3 }, "size": null }, "char *str_append_chr(char *string, char append)": { "history": { - "1": 3, - "2": 34 + "B": 3, + "C": 34 }, "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { "history": { - "3": 26 + "G": 26 }, "size": null }, "int str_equals()": { "history": { - "2": 1 + "C": 1 }, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { "history": { - "1": 11, - "2": 1 + "B": 11, + "C": 1, + "G": 2 }, "size": null }, "int str_len(char *string)": { "history": { - "0": 6 + "A": 6 }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch/E.g.json b/test/test_analytics/baseline/feature_branch/E.g.json index a92bf26d34a..421199f1fb2 100644 --- a/test/test_analytics/baseline/feature_branch/E.g.json +++ b/test/test_analytics/baseline/feature_branch/E.g.json @@ -1,75 +1,97 @@ { "edges": { "char *str_append_chr(char *string, char append)|->|int str_equals()": { - "addedBy": 2, + "addedBy": "D", "weight": null }, "char *str_append_chr(char *string, char append)|->|int str_len(char *string)": { - "addedBy": 2, + "addedBy": "D", "weight": null }, "char *str_replace(char *search, char *replace, char *subject)|->|char *str_append_chr(char *string, char append)": { - "addedBy": 3, + "addedBy": "G", "weight": null }, - "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals()": { - "addedBy": 3, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals(char *equal1, char *eqaul2)": { + "addedBy": "G", "weight": null }, "char *str_replace(char *search, char *replace, char *subject)|->|int str_len(char *string)": { - "addedBy": 3, + "addedBy": "G", + "weight": null + }, + "void add(int num)|->|void append(int num)": { + "addedBy": "H", + "weight": null + }, + "void insert(int num)|->|void add(int num)": { + "addedBy": "H", + "weight": null + }, + "void insert(int num)|->|void append(int num)": { + "addedBy": "H", "weight": null } }, "nodes": { "char *str_append(char *string, char *append)": { "history": { - "0": 7, - "1": 3 + "A": 7, + "B": 3 }, "size": null }, "char *str_append_chr(char *string, char append)": { "history": { - "1": 3, - "2": 34 + "B": 3, + "C": 34 }, "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { "history": { - "3": 26 + "G": 26 }, "size": null }, "int str_equals()": { "history": { - "2": 1 + "C": 1 }, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { "history": { - "1": 11, - "2": 1 + "B": 11, + "C": 1, + "G": 2 }, "size": null }, "int str_len(char *string)": { "history": { - "0": 6 + "A": 6 }, "size": null }, "void add(int num)": { "history": { - "4": 11 + "H": 16, + "I": 5 }, "size": null }, "void append(int num)": { "history": { - "4": 29 + "H": 12, + "I": 35 + }, + "size": null + }, + "void insert(int num)": { + "history": { + "H": 25, + "I": 25 }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch/F.g.json b/test/test_analytics/baseline/feature_branch/F.g.json index 1cf97a4b913..a8ec1d2d5f5 100644 --- a/test/test_analytics/baseline/feature_branch/F.g.json +++ b/test/test_analytics/baseline/feature_branch/F.g.json @@ -1,87 +1,117 @@ { "edges": { + "char *str_append_chr(char *string, char append)|->|int count()": { + "addedBy": "F", + "weight": null + }, "char *str_append_chr(char *string, char append)|->|int str_equals()": { - "addedBy": 2, + "addedBy": "F", "weight": null }, "char *str_append_chr(char *string, char append)|->|int str_len(char *string)": { - "addedBy": 2, + "addedBy": "F", "weight": null }, "char *str_replace(char *search, char *replace, char *subject)|->|char *str_append_chr(char *string, char append)": { - "addedBy": 3, + "addedBy": "F", "weight": null }, "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals()": { - "addedBy": 3, + "addedBy": "F", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals(char *equal1, char *eqaul2)": { + "addedBy": "G", "weight": null }, "char *str_replace(char *search, char *replace, char *subject)|->|int str_len(char *string)": { - "addedBy": 3, + "addedBy": "F", + "weight": null + }, + "void add(int num)|->|void append(int num)": { + "addedBy": "H", + "weight": null + }, + "void insert(int num)|->|void add(int num)": { + "addedBy": "H", + "weight": null + }, + "void insert(int num)|->|void append(int num)": { + "addedBy": "H", "weight": null } }, "nodes": { "char *str_append(char *string, char *append)": { "history": { - "0": 7, - "1": 3 + "A": 7, + "B": 3 }, "size": null }, "char *str_append_chr(char *string, char append)": { "history": { - "1": 3, - "2": 34 + "B": 3, + "C": 34 }, "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { "history": { - "3": 26 + "G": 26 }, "size": null }, "int count()": { "history": { - "5": 12 + "J": 12 }, "size": null }, "int str_equals()": { "history": { - "2": 1 + "C": 1 }, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { "history": { - "1": 11, - "2": 1 + "B": 11, + "C": 1, + "G": 2 }, "size": null }, "int str_len(char *string)": { "history": { - "0": 6 + "A": 6 }, "size": null }, "void add(int num)": { "history": { - "4": 11 + "H": 16, + "I": 5 }, "size": null }, "void append(int num)": { "history": { - "4": 29 + "H": 12, + "I": 35 }, "size": null }, "void display(struct node *r)": { "history": { - "5": 14 + "J": 14 + }, + "size": null + }, + "void insert(int num)": { + "history": { + "H": 25, + "I": 25 }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch/G.g.json b/test/test_analytics/baseline/feature_branch/G.g.json new file mode 100644 index 00000000000..c7f0ffddad3 --- /dev/null +++ b/test/test_analytics/baseline/feature_branch/G.g.json @@ -0,0 +1,66 @@ +{ + "edges": { + "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "addedBy": "C", + "weight": null + }, + "char *str_append_chr(char *string, char append)|->|int str_len(char *string)": { + "addedBy": "C", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|char *str_append_chr(char *string, char append)": { + "addedBy": "G", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals(char *equal1, char *eqaul2)": { + "addedBy": "G", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_len(char *string)": { + "addedBy": "G", + "weight": null + } + }, + "nodes": { + "char *str_append(char *string, char *append)": { + "history": { + "A": 7, + "B": 3 + }, + "size": null + }, + "char *str_append_chr(char *string, char append)": { + "history": { + "B": 3, + "C": 34 + }, + "size": null + }, + "char *str_replace(char *search, char *replace, char *subject)": { + "history": { + "G": 26 + }, + "size": null + }, + "int str_equals()": { + "history": { + "C": 1 + }, + "size": null + }, + "int str_equals(char *equal1, char *eqaul2)": { + "history": { + "B": 11, + "C": 1, + "G": 2 + }, + "size": null + }, + "int str_len(char *string)": { + "history": { + "A": 6 + }, + "size": null + } + } +} \ No newline at end of file diff --git a/test/test_analytics/baseline/feature_branch/H.g.json b/test/test_analytics/baseline/feature_branch/H.g.json new file mode 100644 index 00000000000..0fdd9219bd8 --- /dev/null +++ b/test/test_analytics/baseline/feature_branch/H.g.json @@ -0,0 +1,96 @@ +{ + "edges": { + "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "addedBy": "D", + "weight": null + }, + "char *str_append_chr(char *string, char append)|->|int str_len(char *string)": { + "addedBy": "D", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|char *str_append_chr(char *string, char append)": { + "addedBy": "G", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals(char *equal1, char *eqaul2)": { + "addedBy": "G", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_len(char *string)": { + "addedBy": "G", + "weight": null + }, + "void add(int num)|->|void append(int num)": { + "addedBy": "H", + "weight": null + }, + "void insert(int num)|->|void add(int num)": { + "addedBy": "H", + "weight": null + }, + "void insert(int num)|->|void append(int num)": { + "addedBy": "H", + "weight": null + } + }, + "nodes": { + "char *str_append(char *string, char *append)": { + "history": { + "A": 7, + "B": 3 + }, + "size": null + }, + "char *str_append_chr(char *string, char append)": { + "history": { + "B": 3, + "C": 34 + }, + "size": null + }, + "char *str_replace(char *search, char *replace, char *subject)": { + "history": { + "G": 26 + }, + "size": null + }, + "int str_equals()": { + "history": { + "C": 1 + }, + "size": null + }, + "int str_equals(char *equal1, char *eqaul2)": { + "history": { + "B": 11, + "C": 1, + "G": 2 + }, + "size": null + }, + "int str_len(char *string)": { + "history": { + "A": 6 + }, + "size": null + }, + "void add(int num)": { + "history": { + "H": 16 + }, + "size": null + }, + "void append(int num)": { + "history": { + "H": 12 + }, + "size": null + }, + "void insert(int num)": { + "history": { + "H": 25 + }, + "size": null + } + } +} \ No newline at end of file diff --git a/test/test_analytics/baseline/feature_branch/I.g.json b/test/test_analytics/baseline/feature_branch/I.g.json new file mode 100644 index 00000000000..421199f1fb2 --- /dev/null +++ b/test/test_analytics/baseline/feature_branch/I.g.json @@ -0,0 +1,99 @@ +{ + "edges": { + "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "addedBy": "D", + "weight": null + }, + "char *str_append_chr(char *string, char append)|->|int str_len(char *string)": { + "addedBy": "D", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|char *str_append_chr(char *string, char append)": { + "addedBy": "G", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals(char *equal1, char *eqaul2)": { + "addedBy": "G", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_len(char *string)": { + "addedBy": "G", + "weight": null + }, + "void add(int num)|->|void append(int num)": { + "addedBy": "H", + "weight": null + }, + "void insert(int num)|->|void add(int num)": { + "addedBy": "H", + "weight": null + }, + "void insert(int num)|->|void append(int num)": { + "addedBy": "H", + "weight": null + } + }, + "nodes": { + "char *str_append(char *string, char *append)": { + "history": { + "A": 7, + "B": 3 + }, + "size": null + }, + "char *str_append_chr(char *string, char append)": { + "history": { + "B": 3, + "C": 34 + }, + "size": null + }, + "char *str_replace(char *search, char *replace, char *subject)": { + "history": { + "G": 26 + }, + "size": null + }, + "int str_equals()": { + "history": { + "C": 1 + }, + "size": null + }, + "int str_equals(char *equal1, char *eqaul2)": { + "history": { + "B": 11, + "C": 1, + "G": 2 + }, + "size": null + }, + "int str_len(char *string)": { + "history": { + "A": 6 + }, + "size": null + }, + "void add(int num)": { + "history": { + "H": 16, + "I": 5 + }, + "size": null + }, + "void append(int num)": { + "history": { + "H": 12, + "I": 35 + }, + "size": null + }, + "void insert(int num)": { + "history": { + "H": 25, + "I": 25 + }, + "size": null + } + } +} \ No newline at end of file diff --git a/test/test_analytics/baseline/feature_branch/J.g.json b/test/test_analytics/baseline/feature_branch/J.g.json new file mode 100644 index 00000000000..9937b4ebebf --- /dev/null +++ b/test/test_analytics/baseline/feature_branch/J.g.json @@ -0,0 +1,111 @@ +{ + "edges": { + "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "addedBy": "D", + "weight": null + }, + "char *str_append_chr(char *string, char append)|->|int str_len(char *string)": { + "addedBy": "D", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|char *str_append_chr(char *string, char append)": { + "addedBy": "G", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals(char *equal1, char *eqaul2)": { + "addedBy": "G", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_len(char *string)": { + "addedBy": "G", + "weight": null + }, + "void add(int num)|->|void append(int num)": { + "addedBy": "H", + "weight": null + }, + "void insert(int num)|->|void add(int num)": { + "addedBy": "H", + "weight": null + }, + "void insert(int num)|->|void append(int num)": { + "addedBy": "H", + "weight": null + } + }, + "nodes": { + "char *str_append(char *string, char *append)": { + "history": { + "A": 7, + "B": 3 + }, + "size": null + }, + "char *str_append_chr(char *string, char append)": { + "history": { + "B": 3, + "C": 34 + }, + "size": null + }, + "char *str_replace(char *search, char *replace, char *subject)": { + "history": { + "G": 26 + }, + "size": null + }, + "int count()": { + "history": { + "J": 12 + }, + "size": null + }, + "int str_equals()": { + "history": { + "C": 1 + }, + "size": null + }, + "int str_equals(char *equal1, char *eqaul2)": { + "history": { + "B": 11, + "C": 1, + "G": 2 + }, + "size": null + }, + "int str_len(char *string)": { + "history": { + "A": 6 + }, + "size": null + }, + "void add(int num)": { + "history": { + "H": 16, + "I": 5 + }, + "size": null + }, + "void append(int num)": { + "history": { + "H": 12, + "I": 35 + }, + "size": null + }, + "void display(struct node *r)": { + "history": { + "J": 14 + }, + "size": null + }, + "void insert(int num)": { + "history": { + "H": 25, + "I": 25 + }, + "size": null + } + } +} \ No newline at end of file diff --git a/test/test_analytics/baseline/feature_branch/K.g.json b/test/test_analytics/baseline/feature_branch/K.g.json index b0061dbff9f..b3718df38be 100644 --- a/test/test_analytics/baseline/feature_branch/K.g.json +++ b/test/test_analytics/baseline/feature_branch/K.g.json @@ -1,88 +1,118 @@ { "edges": { + "char *str_append_chr(char *string, char append)|->|int count()": { + "addedBy": "F", + "weight": null + }, "char *str_append_chr(char *string, char append)|->|int str_equals()": { - "addedBy": 2, + "addedBy": "F", "weight": null }, "char *str_append_chr(char *string, char append)|->|int str_len(char *string)": { - "addedBy": 2, + "addedBy": "F", "weight": null }, "char *str_replace(char *search, char *replace, char *subject)|->|char *str_append_chr(char *string, char append)": { - "addedBy": 3, + "addedBy": "F", "weight": null }, "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals()": { - "addedBy": 3, + "addedBy": "F", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals(char *equal1, char *eqaul2)": { + "addedBy": "G", "weight": null }, "char *str_replace(char *search, char *replace, char *subject)|->|int str_len(char *string)": { - "addedBy": 3, + "addedBy": "F", + "weight": null + }, + "void add(int num)|->|void append(int num)": { + "addedBy": "H", + "weight": null + }, + "void insert(int num)|->|void add(int num)": { + "addedBy": "H", + "weight": null + }, + "void insert(int num)|->|void append(int num)": { + "addedBy": "H", "weight": null } }, "nodes": { "char *str_append(char *string, char *append)": { "history": { - "0": 7, - "1": 3 + "A": 7, + "B": 3 }, "size": null }, "char *str_append_chr(char *string, char append)": { "history": { - "1": 3, - "2": 34 + "B": 3, + "C": 34 }, "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { "history": { - "3": 26 + "G": 26 }, "size": null }, "int count()": { "history": { - "5": 12 + "J": 12 }, "size": null }, "int str_equals()": { "history": { - "2": 1 + "C": 1 }, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { "history": { - "1": 11, - "2": 1 + "B": 11, + "C": 1, + "G": 2 }, "size": null }, "int str_len(char *string)": { "history": { - "0": 6 + "A": 6 }, "size": null }, "void add(int num)": { "history": { - "4": 11 + "H": 16, + "I": 5 }, "size": null }, "void append(int num)": { "history": { - "4": 29 + "H": 12, + "I": 35 }, "size": null }, "void display(struct node *r)": { "history": { - "5": 14, - "6": 5 + "J": 14, + "K": 5 + }, + "size": null + }, + "void insert(int num)": { + "history": { + "H": 25, + "I": 25 }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch_first_parent/A.g.json b/test/test_analytics/baseline/feature_branch_first_parent/A.g.json new file mode 100644 index 00000000000..7833183bead --- /dev/null +++ b/test/test_analytics/baseline/feature_branch_first_parent/A.g.json @@ -0,0 +1,17 @@ +{ + "edges": {}, + "nodes": { + "char *str_append(char *string, char *append)": { + "history": { + "A": 7 + }, + "size": null + }, + "int str_len(char *string)": { + "history": { + "A": 6 + }, + "size": null + } + } +} \ No newline at end of file diff --git a/test/test_analytics/baseline/feature_branch_first_parent/B.g.json b/test/test_analytics/baseline/feature_branch_first_parent/B.g.json new file mode 100644 index 00000000000..d6849e2667e --- /dev/null +++ b/test/test_analytics/baseline/feature_branch_first_parent/B.g.json @@ -0,0 +1,30 @@ +{ + "edges": {}, + "nodes": { + "char *str_append(char *string, char *append)": { + "history": { + "A": 7, + "B": 3 + }, + "size": null + }, + "char *str_append_chr(char *string, char append)": { + "history": { + "B": 3 + }, + "size": null + }, + "int str_equals(char *equal1, char *eqaul2)": { + "history": { + "B": 11 + }, + "size": null + }, + "int str_len(char *string)": { + "history": { + "A": 6 + }, + "size": null + } + } +} \ No newline at end of file diff --git a/test/test_analytics/baseline/feature_branch_first_parent/C.g.json b/test/test_analytics/baseline/feature_branch_first_parent/C.g.json new file mode 100644 index 00000000000..73c976c5550 --- /dev/null +++ b/test/test_analytics/baseline/feature_branch_first_parent/C.g.json @@ -0,0 +1,47 @@ +{ + "edges": { + "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "addedBy": "C", + "weight": null + }, + "char *str_append_chr(char *string, char append)|->|int str_len(char *string)": { + "addedBy": "C", + "weight": null + } + }, + "nodes": { + "char *str_append(char *string, char *append)": { + "history": { + "A": 7, + "B": 3 + }, + "size": null + }, + "char *str_append_chr(char *string, char append)": { + "history": { + "B": 3, + "C": 34 + }, + "size": null + }, + "int str_equals()": { + "history": { + "C": 1 + }, + "size": null + }, + "int str_equals(char *equal1, char *eqaul2)": { + "history": { + "B": 11, + "C": 1 + }, + "size": null + }, + "int str_len(char *string)": { + "history": { + "A": 6 + }, + "size": null + } + } +} \ No newline at end of file diff --git a/test/test_analytics/baseline/feature_branch_first_parent/D.g.json b/test/test_analytics/baseline/feature_branch_first_parent/D.g.json new file mode 100644 index 00000000000..8fa297dd82e --- /dev/null +++ b/test/test_analytics/baseline/feature_branch_first_parent/D.g.json @@ -0,0 +1,65 @@ +{ + "edges": { + "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "addedBy": "C", + "weight": null + }, + "char *str_append_chr(char *string, char append)|->|int str_len(char *string)": { + "addedBy": "C", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|char *str_append_chr(char *string, char append)": { + "addedBy": "D", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals()": { + "addedBy": "D", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_len(char *string)": { + "addedBy": "D", + "weight": null + } + }, + "nodes": { + "char *str_append(char *string, char *append)": { + "history": { + "A": 7, + "B": 3 + }, + "size": null + }, + "char *str_append_chr(char *string, char append)": { + "history": { + "B": 3, + "C": 34 + }, + "size": null + }, + "char *str_replace(char *search, char *replace, char *subject)": { + "history": { + "D": 26 + }, + "size": null + }, + "int str_equals()": { + "history": { + "C": 1 + }, + "size": null + }, + "int str_equals(char *equal1, char *eqaul2)": { + "history": { + "B": 11, + "C": 1 + }, + "size": null + }, + "int str_len(char *string)": { + "history": { + "A": 6 + }, + "size": null + } + } +} \ No newline at end of file diff --git a/test/test_analytics/baseline/feature_branch_first_parent/E.g.json b/test/test_analytics/baseline/feature_branch_first_parent/E.g.json new file mode 100644 index 00000000000..5a43da6ea80 --- /dev/null +++ b/test/test_analytics/baseline/feature_branch_first_parent/E.g.json @@ -0,0 +1,77 @@ +{ + "edges": { + "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "addedBy": "C", + "weight": null + }, + "char *str_append_chr(char *string, char append)|->|int str_len(char *string)": { + "addedBy": "C", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|char *str_append_chr(char *string, char append)": { + "addedBy": "D", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals()": { + "addedBy": "D", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_len(char *string)": { + "addedBy": "D", + "weight": null + } + }, + "nodes": { + "char *str_append(char *string, char *append)": { + "history": { + "A": 7, + "B": 3 + }, + "size": null + }, + "char *str_append_chr(char *string, char append)": { + "history": { + "B": 3, + "C": 34 + }, + "size": null + }, + "char *str_replace(char *search, char *replace, char *subject)": { + "history": { + "D": 26 + }, + "size": null + }, + "int str_equals()": { + "history": { + "C": 1 + }, + "size": null + }, + "int str_equals(char *equal1, char *eqaul2)": { + "history": { + "B": 11, + "C": 1 + }, + "size": null + }, + "int str_len(char *string)": { + "history": { + "A": 6 + }, + "size": null + }, + "void add(int num)": { + "history": { + "E": 11 + }, + "size": null + }, + "void append(int num)": { + "history": { + "E": 29 + }, + "size": null + } + } +} \ No newline at end of file diff --git a/test/test_analytics/baseline/feature_branch_first_parent/F.g.json b/test/test_analytics/baseline/feature_branch_first_parent/F.g.json new file mode 100644 index 00000000000..547dea94605 --- /dev/null +++ b/test/test_analytics/baseline/feature_branch_first_parent/F.g.json @@ -0,0 +1,89 @@ +{ + "edges": { + "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "addedBy": "C", + "weight": null + }, + "char *str_append_chr(char *string, char append)|->|int str_len(char *string)": { + "addedBy": "C", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|char *str_append_chr(char *string, char append)": { + "addedBy": "D", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals()": { + "addedBy": "D", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_len(char *string)": { + "addedBy": "D", + "weight": null + } + }, + "nodes": { + "char *str_append(char *string, char *append)": { + "history": { + "A": 7, + "B": 3 + }, + "size": null + }, + "char *str_append_chr(char *string, char append)": { + "history": { + "B": 3, + "C": 34 + }, + "size": null + }, + "char *str_replace(char *search, char *replace, char *subject)": { + "history": { + "D": 26 + }, + "size": null + }, + "int count()": { + "history": { + "F": 12 + }, + "size": null + }, + "int str_equals()": { + "history": { + "C": 1 + }, + "size": null + }, + "int str_equals(char *equal1, char *eqaul2)": { + "history": { + "B": 11, + "C": 1 + }, + "size": null + }, + "int str_len(char *string)": { + "history": { + "A": 6 + }, + "size": null + }, + "void add(int num)": { + "history": { + "E": 11 + }, + "size": null + }, + "void append(int num)": { + "history": { + "E": 29 + }, + "size": null + }, + "void display(struct node *r)": { + "history": { + "F": 14 + }, + "size": null + } + } +} \ No newline at end of file diff --git a/test/test_analytics/baseline/feature_branch_first_parent/K.g.json b/test/test_analytics/baseline/feature_branch_first_parent/K.g.json new file mode 100644 index 00000000000..a4c6c6aebbb --- /dev/null +++ b/test/test_analytics/baseline/feature_branch_first_parent/K.g.json @@ -0,0 +1,90 @@ +{ + "edges": { + "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "addedBy": "C", + "weight": null + }, + "char *str_append_chr(char *string, char append)|->|int str_len(char *string)": { + "addedBy": "C", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|char *str_append_chr(char *string, char append)": { + "addedBy": "D", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals()": { + "addedBy": "D", + "weight": null + }, + "char *str_replace(char *search, char *replace, char *subject)|->|int str_len(char *string)": { + "addedBy": "D", + "weight": null + } + }, + "nodes": { + "char *str_append(char *string, char *append)": { + "history": { + "A": 7, + "B": 3 + }, + "size": null + }, + "char *str_append_chr(char *string, char append)": { + "history": { + "B": 3, + "C": 34 + }, + "size": null + }, + "char *str_replace(char *search, char *replace, char *subject)": { + "history": { + "D": 26 + }, + "size": null + }, + "int count()": { + "history": { + "F": 12 + }, + "size": null + }, + "int str_equals()": { + "history": { + "C": 1 + }, + "size": null + }, + "int str_equals(char *equal1, char *eqaul2)": { + "history": { + "B": 11, + "C": 1 + }, + "size": null + }, + "int str_len(char *string)": { + "history": { + "A": 6 + }, + "size": null + }, + "void add(int num)": { + "history": { + "E": 11 + }, + "size": null + }, + "void append(int num)": { + "history": { + "E": 29 + }, + "size": null + }, + "void display(struct node *r)": { + "history": { + "F": 14, + "K": 5 + }, + "size": null + } + } +} \ No newline at end of file From 4ab80c1e90d0cb51b97b0373e934c7b9316444b9 Mon Sep 17 00:00:00 2001 From: xinyan Date: Sat, 16 Feb 2019 22:29:56 +0800 Subject: [PATCH 34/84] Set test/test_analytics/actualdump in gitignore. Minor fix. --- .gitignore | 2 ++ test/test_analytics/test_analyzer_lsp_ccls.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 2f2b3d8d9b4..5360182e6c1 100644 --- a/.gitignore +++ b/.gitignore @@ -93,3 +93,5 @@ ENV/ .ccls-cache /bin /bin-* +.pytest_cache +test/test_analytics/actualdump diff --git a/test/test_analytics/test_analyzer_lsp_ccls.py b/test/test_analytics/test_analyzer_lsp_ccls.py index f9e7fb9c892..4bd6f79dce8 100644 --- a/test/test_analytics/test_analyzer_lsp_ccls.py +++ b/test/test_analytics/test_analyzer_lsp_ccls.py @@ -68,7 +68,7 @@ async def testFeatureBranch(): analyzer = Analyzer(repoPath, graphServer, firstParentOnly=False) async with graphServer: analyzer.observer = GraphDumpAnalyzerObserver( - None, + os.path.join(testDataRoot, "baseline/feature_branch"), os.path.join(testDataRoot, "actualdump/feature_branch")) await analyzer.analyze() From 6bbbbdc5e3b95f441a7bb018d7a529a566fc6c96 Mon Sep 17 00:00:00 2001 From: xinyan Date: Sat, 16 Feb 2019 23:17:45 +0800 Subject: [PATCH 35/84] Make analyzer2 pickle-able. Update notebook. --- notebooks/lsp-ccls.ipynb | 384 +++++++----------- persper/analytics/analyzer2.py | 50 ++- .../analytics/lsp_graph_server/__init__.py | 4 - persper/analytics/lsp_graph_server/ccls.py | 3 +- test/test_analytics/test_analyzer_lsp_ccls.py | 6 + 5 files changed, 201 insertions(+), 246 deletions(-) diff --git a/notebooks/lsp-ccls.ipynb b/notebooks/lsp-ccls.ipynb index 79c47c74f5b..2108a172c60 100644 --- a/notebooks/lsp-ccls.ipynb +++ b/notebooks/lsp-ccls.ipynb @@ -18,154 +18,148 @@ "cell_type": "code", "execution_count": 2, "metadata": { - "scrolled": false + "scrolled": true }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Workspace root: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\n" + "Workspace root: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "2019-01-17 23:45:01,005 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Exceptions.h.\n" + "2019-02-16 23:15:41,552 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\Exceptions.h.\n", + "2019-02-16 23:15:41,608 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\TextFileParsers.cpp.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "----- Overview ------\n", - "# of commits on master: 4\n", - "# of commits on branch: 0\n", - "----- No.1 4965d8edcb63cab6e544c1ecd19454f37d9bb0d3 on main -----\n" + "Commit 4965d8edcb63cab6e544c1ecd19454f37d9bb0d3 (A): Going forward (initial commit).\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "2019-01-17 23:45:01,062 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\TextFileParsers.cpp.\n", - "2019-01-17 23:45:01,118 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\TextFileParsers.h.\n", - "2019-01-17 23:45:01,174 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\TypeTraits.h.\n", - "2019-01-17 23:45:01,230 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.cpp.\n", - "2019-01-17 23:45:01,284 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.h.\n", - "2019-01-17 23:45:01,339 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\main.cpp.\n", - "2019-01-17 23:45:01,396 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\stdafx.cpp.\n", - "2019-01-17 23:45:01,452 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\stdafx.h.\n", - "2019-01-17 23:45:01,455 INFO [persper.analytics.lsp_graph_server] Invalidated 9 files, affected 9 files.\n", - "2019-01-17 23:45:01,457 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.h\n", - "2019-01-17 23:45:03,434 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 73 branches.\n", - "2019-01-17 23:45:03,435 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Exceptions.h\n", - "2019-01-17 23:45:03,696 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 27 branches.\n", - "2019-01-17 23:45:03,697 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\TypeTraits.h\n", - "2019-01-17 23:45:03,770 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 1 branches.\n", - "2019-01-17 23:45:03,771 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\main.cpp\n", - "2019-01-17 23:45:05,443 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 6 branches.\n", - "2019-01-17 23:45:05,444 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\TextFileParsers.h\n", - "2019-01-17 23:45:07,225 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 40 branches.\n", - "2019-01-17 23:45:07,226 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.cpp\n", - "2019-01-17 23:45:08,342 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 77 branches.\n", - "2019-01-17 23:45:08,343 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\stdafx.h\n", - "2019-01-17 23:45:09,076 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 0 branches.\n", - "2019-01-17 23:45:09,077 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\TextFileParsers.cpp\n", - "2019-01-17 23:45:10,579 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 163 branches.\n", - "2019-01-17 23:45:10,580 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\stdafx.cpp\n", - "2019-01-17 23:45:11,346 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 0 branches.\n", - "2019-01-17 23:45:11,347 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 387 branches from 9 files.\n", - "2019-01-17 23:45:11,348 INFO [persper.analytics.lsp_graph_server] End commit: 4965d8edcb63cab6e544c1ecd19454f37d9bb0d3\n", - "2019-01-17 23:45:11,511 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility-1.cpp.\n" + "2019-02-16 23:15:41,663 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\TextFileParsers.h.\n", + "2019-02-16 23:15:41,720 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\TypeTraits.h.\n", + "2019-02-16 23:15:41,776 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\Utility.cpp.\n", + "2019-02-16 23:15:41,830 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\Utility.h.\n", + "2019-02-16 23:15:41,887 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\main.cpp.\n", + "2019-02-16 23:15:41,941 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\stdafx.cpp.\n", + "2019-02-16 23:15:41,995 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\stdafx.h.\n", + "2019-02-16 23:15:41,998 INFO [persper.analytics.lsp_graph_server] Invalidated 9 files, affected 9 files.\n", + "2019-02-16 23:15:42,871 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\TextFileParsers.cpp\n", + "2019-02-16 23:15:43,083 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 86 branches.\n", + "2019-02-16 23:15:43,084 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\Utility.h\n", + "2019-02-16 23:15:43,339 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 66 branches.\n", + "2019-02-16 23:15:43,340 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\stdafx.cpp\n", + "2019-02-16 23:15:43,393 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 0 branches.\n", + "2019-02-16 23:15:43,394 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\Exceptions.h\n", + "2019-02-16 23:15:43,497 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 18 branches.\n", + "2019-02-16 23:15:43,498 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\main.cpp\n", + "2019-02-16 23:15:43,559 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 4 branches.\n", + "2019-02-16 23:15:43,561 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\TextFileParsers.h\n", + "2019-02-16 23:15:43,715 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 39 branches.\n", + "2019-02-16 23:15:43,716 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\TypeTraits.h\n", + "2019-02-16 23:15:43,773 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 1 branches.\n", + "2019-02-16 23:15:43,774 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\stdafx.h\n", + "2019-02-16 23:15:43,830 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 1 branches.\n", + "2019-02-16 23:15:43,832 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\Utility.cpp\n", + "2019-02-16 23:15:43,950 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 27 branches.\n", + "2019-02-16 23:15:43,951 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 242 branches from 9 files.\n", + "2019-02-16 23:15:44,115 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\Utility-1.cpp.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "----- No.2 12a65d92071e8ab32890bc0f69697b2efcb013f8 on main -----\n" + "Commit 12a65d92071e8ab32890bc0f69697b2efcb013f8 (B): Going forward.\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "2019-01-17 23:45:11,620 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\main.cpp.\n", - "2019-01-17 23:45:11,622 INFO [persper.analytics.lsp_graph_server] Invalidated 3 files, affected 3 files.\n", - "2019-01-17 23:45:11,623 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Confirm deleted: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.cpp\n", - "2019-01-17 23:45:11,625 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility-1.cpp\n", - "2019-01-17 23:45:12,790 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 77 branches.\n", - "2019-01-17 23:45:12,791 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\main.cpp\n", - "2019-01-17 23:45:13,805 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 12 branches.\n", - "2019-01-17 23:45:13,807 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 89 branches from 2 files.\n", - "2019-01-17 23:45:13,807 INFO [persper.analytics.lsp_graph_server] End commit: 12a65d92071e8ab32890bc0f69697b2efcb013f8\n", - "2019-01-17 23:45:13,984 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\TextFileParsers.cpp.\n" + "2019-02-16 23:15:44,224 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\main.cpp.\n", + "2019-02-16 23:15:44,226 INFO [persper.analytics.lsp_graph_server] Invalidated 3 files, affected 3 files.\n", + "2019-02-16 23:15:44,227 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Confirm deleted: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\Utility.cpp\n", + "2019-02-16 23:15:44,358 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\main.cpp\n", + "2019-02-16 23:15:44,423 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 5 branches.\n", + "2019-02-16 23:15:44,424 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\Utility-1.cpp\n", + "2019-02-16 23:15:44,546 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 27 branches.\n", + "2019-02-16 23:15:44,547 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 32 branches from 2 files.\n", + "2019-02-16 23:15:45,728 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\TextFileParsers.cpp.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "----- No.3 4928d2ec0ad82221b61b30f3ae2e1cc4c61a3ea0 on main -----\n" + "Commit 4928d2ec0ad82221b61b30f3ae2e1cc4c61a3ea0 (C): Going forward.\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "2019-01-17 23:45:14,090 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.cpp.\n", - "2019-01-17 23:45:14,196 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.h.\n", - "2019-01-17 23:45:14,300 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\main.cpp.\n", - "2019-01-17 23:45:14,302 INFO [persper.analytics.lsp_graph_server] Invalidated 5 files, affected 5 files.\n", - "2019-01-17 23:45:14,303 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Confirm deleted: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility-1.cpp\n", - "2019-01-17 23:45:14,304 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.h\n", - "2019-01-17 23:45:15,886 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 78 branches.\n", - "2019-01-17 23:45:15,887 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.cpp\n", - "2019-01-17 23:45:16,872 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 77 branches.\n", - "2019-01-17 23:45:16,874 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\TextFileParsers.cpp\n", - "2019-01-17 23:45:18,407 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 163 branches.\n", - "2019-01-17 23:45:18,409 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\main.cpp\n", - "2019-01-17 23:45:19,358 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 6 branches.\n", - "2019-01-17 23:45:19,359 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 324 branches from 4 files.\n", - "2019-01-17 23:45:19,359 INFO [persper.analytics.lsp_graph_server] End commit: 4928d2ec0ad82221b61b30f3ae2e1cc4c61a3ea0\n", - "2019-01-17 23:45:19,538 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\TextFileParsers.cpp.\n" + "2019-02-16 23:15:45,853 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\Utility.cpp.\n", + "2019-02-16 23:15:45,968 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\Utility.h.\n", + "2019-02-16 23:15:46,083 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\main.cpp.\n", + "2019-02-16 23:15:46,085 INFO [persper.analytics.lsp_graph_server] Invalidated 5 files, affected 5 files.\n", + "2019-02-16 23:15:46,086 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Confirm deleted: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\Utility-1.cpp\n", + "2019-02-16 23:15:46,489 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\TextFileParsers.cpp\n", + "2019-02-16 23:15:46,710 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 86 branches.\n", + "2019-02-16 23:15:46,711 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\Utility.h\n", + "2019-02-16 23:15:46,982 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 66 branches.\n", + "2019-02-16 23:15:46,984 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\main.cpp\n", + "2019-02-16 23:15:47,046 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 4 branches.\n", + "2019-02-16 23:15:47,047 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\Utility.cpp\n", + "2019-02-16 23:15:47,170 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 27 branches.\n", + "2019-02-16 23:15:47,171 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 183 branches from 4 files.\n", + "2019-02-16 23:15:47,338 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\TextFileParsers.cpp.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "----- No.4 639934cdc7499854e22df79835240ac786498300 on main -----\n" + "Commit 639934cdc7499854e22df79835240ac786498300 (D): Going forward.\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "2019-01-17 23:45:19,647 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.cpp.\n", - "2019-01-17 23:45:19,758 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.h.\n", - "2019-01-17 23:45:19,869 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\main.cpp.\n", - "2019-01-17 23:45:19,871 INFO [persper.analytics.lsp_graph_server] Invalidated 4 files, affected 4 files.\n", - "2019-01-17 23:45:19,874 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\TextFileParsers.cpp\n", - "2019-01-17 23:45:22,915 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 163 branches.\n", - "2019-01-17 23:45:22,916 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.h\n", - "2019-01-17 23:45:24,290 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 87 branches.\n", - "2019-01-17 23:45:24,291 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\Utility.cpp\n", - "2019-01-17 23:45:24,503 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 77 branches.\n", - "2019-01-17 23:45:24,505 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmpfdobutn9\\main.cpp\n", - "2019-01-17 23:45:25,399 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 3 branches.\n", - "2019-01-17 23:45:25,400 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 330 branches from 4 files.\n", - "2019-01-17 23:45:25,401 INFO [persper.analytics.lsp_graph_server] End commit: 639934cdc7499854e22df79835240ac786498300\n", - "2019-01-17 23:45:25,402 INFO [persper.analytics.lsp_graph_server] Shutting down language server...\n", - "2019-01-17 23:45:35,405 WARNING [persper.analytics.lsp_graph_server] Killed language server 7904.\n" + "2019-02-16 23:15:47,452 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\Utility.cpp.\n", + "2019-02-16 23:15:47,559 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\Utility.h.\n", + "2019-02-16 23:15:47,665 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Modified C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\main.cpp.\n", + "2019-02-16 23:15:47,667 INFO [persper.analytics.lsp_graph_server] Invalidated 4 files, affected 4 files.\n", + "2019-02-16 23:15:48,070 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\TextFileParsers.cpp\n", + "2019-02-16 23:15:48,284 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 86 branches.\n", + "2019-02-16 23:15:48,285 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\main.cpp\n", + "2019-02-16 23:15:48,345 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 2 branches.\n", + "2019-02-16 23:15:48,346 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\Utility.h\n", + "2019-02-16 23:15:48,606 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 66 branches.\n", + "2019-02-16 23:15:48,608 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Build call graph in: C:\\Users\\CXy\\AppData\\Local\\Temp\\tmp8mocg_mh\\Utility.cpp\n", + "2019-02-16 23:15:48,726 INFO [persper.analytics.lsp_graph_server.callgraph.builder] Yielded 27 branches.\n", + "2019-02-16 23:15:48,727 INFO [persper.analytics.lsp_graph_server.callgraph.manager] Added 181 branches from 4 files.\n", + "2019-02-16 23:15:48,728 INFO [persper.analytics.lsp_graph_server] Shutting down language server...\n", + "2019-02-16 23:15:58,730 WARNING [persper.analytics.lsp_graph_server] Killed language server 24144.\n" ] } ], "source": [ - "from persper.analytics.analyzer import Analyzer\n", + "from persper.analytics.analyzer2 import Analyzer\n", "from persper.analytics.lsp_graph_server.ccls import CclsGraphServer\n", + "from persper.analytics.call_commit_graph import CallCommitGraph, CommitIdGenerators\n", "from tempfile import mkdtemp\n", "\n", "dumpLogs = True\n", @@ -174,11 +168,12 @@ "G = None\n", "async with CclsGraphServer(workspaceRoot, cacheRoot=\"./.ccls-cache\",\n", " languageServerCommand=\"../bin/ccls\" + (\" -log-file=ccls.log\" if dumpLogs else \"\"),\n", - " dumpLogs=dumpLogs) as graphServer:\n", + " dumpLogs=dumpLogs,\n", + " graph=CallCommitGraph(commit_id_generator=CommitIdGenerators.fromComment)) as graphServer:\n", " analyzer = Analyzer(\"../repos/cpp_test_repo\", graphServer)\n", " graphServer.reset_graph()\n", " await analyzer.analyze()\n", - " G = analyzer.get_graph()" + " G = analyzer.graph" ] }, { @@ -190,8 +185,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "Nodes: 56\n", - "Edges: 58\n" + "Nodes: 42\n", + "Edges: 15\n" ] } ], @@ -211,64 +206,21 @@ "name": "stdout", "output_type": "stream", "text": [ - "_RangeToEnumerable RangeToEnumerable(const std::pair range) | class _RangeToEnumerable {}\n", - "template<> struct is_flags : std::true_type {} | enum class StringComparison : int {}\n", - "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision) | enum class StringComparison : int {}\n", - "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision) | bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)\n", + "int &operator>>(int &reader, long &rhs) | int &operator>>(int &reader, int &rhs)\n", + "int &operator>>(int &reader, float &rhs) | int &operator>>(int &reader, int &rhs)\n", + "int &operator>>(int &reader, double &rhs) | int &operator>>(int &reader, int &rhs)\n", + "int &operator>>(int &reader, bool &rhs) | int &operator>>(int &reader, int &rhs)\n", + "int &operator>>(int &reader, bool &rhs) | bool Equal(const int &lhs, const int &rhs, int comparision)\n", + "int &operator>>(int &reader, bool &rhs) | enum class StringComparison : int {}\n", + "bool Equal(const int &lhs, const int &rhs, StringComparison comparision) | bool Equal(const int &lhs, const int &rhs, int comparision)\n", + "bool Equal(const int &lhs, const int &rhs, StringComparison comparision) | enum class StringComparison : int {}\n", "const char *FriendlyNameOf(const std::type_index &type) | const char *FriendlyNameOf()\n", - "const char *FriendlyNameOf(const std::type_index &type) | namespace std {}\n", "const char *FriendlyNameOf(const type_info &type) | const char *FriendlyNameOf()\n", "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance) | const char *FriendlyNameOf()\n", - "class InvalidCastException : public Exception {} | class Exception : public std::exception {}\n", - "class ArgumentException : public Exception {} | class Exception : public std::exception {}\n", - "class OperationFailureException : public Exception {} | class Exception : public std::exception {}\n", + "void ReportException(const int &ex, int level) | void ReportException(const int &ex, int level)\n", "int main(int argc, char *argv[]) | TStream OpenAndValidate(const TPath arg1)\n", - "int main(int argc, char *argv[]) | class ConfigurationParser {}\n", - "int main(int argc, char *argv[]) | bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const\n", - "int main(int argc, char *argv[]) | double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const\n", - "int main(int argc, char *argv[]) | std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const\n", - "int main(int argc, char *argv[]) | void ReportException(const std::exception &ex, int level)\n", - "class ConfigurationParser {} | namespace std {}\n", - "RowReader &operator>>(RowReader &reader, std::string &rhs) | class RowReader {}\n", - "TStream &operator>>(TStream &s, RowReader &reader) | class RowReader {}\n", - "RowReader &operator>>(RowReader &reader, int &rhs) | class RowReader {}\n", - "RowReader &operator>>(RowReader &reader, int &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", - "RowReader &operator>>(RowReader &reader, long &rhs) | class RowReader {}\n", - "RowReader &operator>>(RowReader &reader, long &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", - "RowReader &operator>>(RowReader &reader, long &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", - "RowReader &operator>>(RowReader &reader, float &rhs) | class RowReader {}\n", - "RowReader &operator>>(RowReader &reader, float &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", - "RowReader &operator>>(RowReader &reader, float &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", - "RowReader &operator>>(RowReader &reader, double &rhs) | class RowReader {}\n", - "RowReader &operator>>(RowReader &reader, double &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", - "RowReader &operator>>(RowReader &reader, double &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", - "RowReader &operator>>(RowReader &reader, bool &rhs) | class RowReader {}\n", - "RowReader &operator>>(RowReader &reader, bool &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", - "RowReader &operator>>(RowReader &reader, bool &rhs) | bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)\n", - "RowReader &operator>>(RowReader &reader, bool &rhs) | enum class StringComparison : int {}\n", - "RowReader &operator>>(RowReader &reader, bool &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", - "bool Confirm(const std::string &prompt) | bool Confirm(const std::string &prompt)\n", - "void ReportException(const std::exception &ex, int level) | void ReportException(const std::exception &ex, int level)\n", - "void ConfigurationParser::Load(std::istream &inputStream) | class ConfigurationParser {}\n", - "void ConfigurationParser::Load(std::istream &inputStream) | RowReader &operator>>(RowReader &reader, int &rhs)\n", - "void ConfigurationParser::Load(std::istream &inputStream) | class Exception : public std::exception {}\n", - "std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const | class ConfigurationParser {}\n", - "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const | class ConfigurationParser {}\n", - "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const | std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const\n", - "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const | class Exception : public std::exception {}\n", - "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const | class ConfigurationParser {}\n", - "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const | std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const\n", - "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const | class Exception : public std::exception {}\n", - "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const | class ConfigurationParser {}\n", - "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const | std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const\n", - "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const | bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)\n", - "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const | enum class StringComparison : int {}\n", - "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const | class Exception : public std::exception {}\n", - "ConfigurationParser::ConfigurationParser(std::istream &inputStream) | class ConfigurationParser {}\n", - "ConfigurationParser::ConfigurationParser(std::istream &inputStream) | void ConfigurationParser::Load(std::istream &inputStream)\n", - "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath) | class ConfigurationParser {}\n", - "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath) | TStream OpenAndValidate(const TPath arg1)\n", - "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath) | void ConfigurationParser::Load(std::istream &inputStream)\n" + "int main(int argc, char *argv[]) | void ReportException(const int &ex, int level)\n", + "bool Equal(const int &lhs, const int &rhs, int comparision) | enum class StringComparison : int {}\n" ] } ], @@ -300,64 +252,38 @@ "name": "stdout", "output_type": "stream", "text": [ - "_RangeToEnumerable RangeToEnumerable(const std::pair range) | class _RangeToEnumerable {}\n", - "template<> struct is_flags : std::true_type {} | enum class StringComparison : int {}\n", - "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision) | enum class StringComparison : int {}\n", - "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision) | bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)\n", - "const char *FriendlyNameOf(const std::type_index &type) | const char *FriendlyNameOf()\n", - "const char *FriendlyNameOf(const std::type_index &type) | namespace std {}\n", - "const char *FriendlyNameOf(const type_info &type) | const char *FriendlyNameOf()\n", - "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance) | const char *FriendlyNameOf()\n", - "class InvalidCastException : public Exception {} | class Exception : public std::exception {}\n", - "class ArgumentException : public Exception {} | class Exception : public std::exception {}\n", - "class OperationFailureException : public Exception {} | class Exception : public std::exception {}\n", - "int main(int argc, char *argv[]) | TStream OpenAndValidate(const TPath arg1)\n", - "int main(int argc, char *argv[]) | class ConfigurationParser {}\n", - "int main(int argc, char *argv[]) | bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const\n", - "int main(int argc, char *argv[]) | double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const\n", - "int main(int argc, char *argv[]) | std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const\n", - "int main(int argc, char *argv[]) | void ReportException(const std::exception &ex, int level)\n", - "class ConfigurationParser {} | namespace std {}\n", - "RowReader &operator>>(RowReader &reader, std::string &rhs) | class RowReader {}\n", - "TStream &operator>>(TStream &s, RowReader &reader) | class RowReader {}\n", - "RowReader &operator>>(RowReader &reader, int &rhs) | class RowReader {}\n", "RowReader &operator>>(RowReader &reader, int &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", - "RowReader &operator>>(RowReader &reader, long &rhs) | class RowReader {}\n", "RowReader &operator>>(RowReader &reader, long &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", "RowReader &operator>>(RowReader &reader, long &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", - "RowReader &operator>>(RowReader &reader, float &rhs) | class RowReader {}\n", "RowReader &operator>>(RowReader &reader, float &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", "RowReader &operator>>(RowReader &reader, float &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", - "RowReader &operator>>(RowReader &reader, double &rhs) | class RowReader {}\n", "RowReader &operator>>(RowReader &reader, double &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", "RowReader &operator>>(RowReader &reader, double &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", - "RowReader &operator>>(RowReader &reader, bool &rhs) | class RowReader {}\n", "RowReader &operator>>(RowReader &reader, bool &rhs) | RowReader &operator>>(RowReader &reader, int &rhs)\n", "RowReader &operator>>(RowReader &reader, bool &rhs) | bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)\n", "RowReader &operator>>(RowReader &reader, bool &rhs) | enum class StringComparison : int {}\n", "RowReader &operator>>(RowReader &reader, bool &rhs) | RowReader &operator>>(RowReader &reader, std::string &rhs)\n", - "bool Confirm(const std::string &prompt) | bool Confirm(const std::string &prompt)\n", - "void ReportException(const std::exception &ex, int level) | void ReportException(const std::exception &ex, int level)\n", - "void ConfigurationParser::Load(std::istream &inputStream) | class ConfigurationParser {}\n", "void ConfigurationParser::Load(std::istream &inputStream) | RowReader &operator>>(RowReader &reader, int &rhs)\n", - "void ConfigurationParser::Load(std::istream &inputStream) | class Exception : public std::exception {}\n", - "std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const | class ConfigurationParser {}\n", - "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const | class ConfigurationParser {}\n", "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const | std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const\n", - "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const | class Exception : public std::exception {}\n", - "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const | class ConfigurationParser {}\n", "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const | std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const\n", - "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const | class Exception : public std::exception {}\n", - "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const | class ConfigurationParser {}\n", "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const | std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const\n", "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const | bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)\n", "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const | enum class StringComparison : int {}\n", - "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const | class Exception : public std::exception {}\n", - "ConfigurationParser::ConfigurationParser(std::istream &inputStream) | class ConfigurationParser {}\n", "ConfigurationParser::ConfigurationParser(std::istream &inputStream) | void ConfigurationParser::Load(std::istream &inputStream)\n", - "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath) | class ConfigurationParser {}\n", "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath) | TStream OpenAndValidate(const TPath arg1)\n", - "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath) | void ConfigurationParser::Load(std::istream &inputStream)\n" + "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath) | void ConfigurationParser::Load(std::istream &inputStream)\n", + "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision) | bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)\n", + "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision) | enum class StringComparison : int {}\n", + "bool Confirm(const std::string &prompt) | bool Confirm(const std::string &prompt)\n", + "const char *FriendlyNameOf(const std::type_index &type) | const char *FriendlyNameOf()\n", + "const char *FriendlyNameOf(const type_info &type) | const char *FriendlyNameOf()\n", + "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance) | const char *FriendlyNameOf()\n", + "void ReportException(const std::exception &ex, int level) | void ReportException(const std::exception &ex, int level)\n", + "int main(int argc, char *argv[]) | TStream OpenAndValidate(const TPath arg1)\n", + "int main(int argc, char *argv[]) | bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const\n", + "int main(int argc, char *argv[]) | double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const\n", + "int main(int argc, char *argv[]) | std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const\n", + "int main(int argc, char *argv[]) | void ReportException(const std::exception &ex, int level)\n" ] } ], @@ -401,62 +327,48 @@ "name": "stdout", "output_type": "stream", "text": [ - "('bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)', 0.23606052783278517)\n", - "('bool Confirm(const std::string &prompt)', 0.1098426006632092)\n", - "('void ConfigurationParser::Load(std::istream &inputStream)', 0.07201434847105015)\n", - "('class Exception : public std::exception {}', 0.06966460237711009)\n", - "('class RowReader {}', 0.06380841209403623)\n", - "('enum class StringComparison : int {}', 0.050441798119527724)\n", - "('RowReader &operator>>(RowReader &reader, bool &rhs)', 0.04895870075815647)\n", - "('std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const', 0.03924446648355976)\n", - "('class ConfigurationParser {}', 0.03735805037229169)\n", - "('bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const', 0.03557813887174236)\n", - "('namespace std {}', 0.033826737287587075)\n", - "('class BlockExitHandler {}', 0.03199281435681512)\n", - "('RowReader &operator>>(RowReader &reader, int &rhs)', 0.02457173934894065)\n", - "('RowReader &operator>>(RowReader &reader, std::string &rhs)', 0.023712590188007344)\n", - "('int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const', 0.021328542904543415)\n", - "('ConfigurationParser::ConfigurationParser(std::istream &inputStream)', 0.009694792229337916)\n", - "('ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)', 0.009694792229337916)\n", - "('int main(int argc, char *argv[])', 0.00921005261787102)\n", - "('class ArgumentException : public Exception {}', 0.007755833783470333)\n", - "('void ReportException(const std::exception &ex, int level)', 0.007421220252219749)\n", - "('template<> struct is_flags : std::true_type {}', 0.007271094172003437)\n", - "('class _RangeToEnumerable {}', 0.0067136448924616805)\n", - "('double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const', 0.006670901038451692)\n", - "('const char *FriendlyNameOf()', 0.004544443409840942)\n", - "('RowReader &operator>>(RowReader &reader, long &rhs)', 0.0033931772802682707)\n", - "('RowReader &operator>>(RowReader &reader, float &rhs)', 0.0033931772802682707)\n", - "('RowReader &operator>>(RowReader &reader, double &rhs)', 0.0033931772802682707)\n", - "('class InvalidCastException : public Exception {}', 0.002423698057334479)\n", - "('inline std::string to_string(const std::pair &value)', 0.0019389584458675833)\n", - "('const char *FriendlyNameOf(const std::type_index &type)', 0.0014542188344006874)\n", - "('const char *FriendlyNameOf(const type_info &type)', 0.0014542188344006874)\n", - "('const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)', 0.0014542188344006874)\n", - "('TEnum operator&(TEnum lhs, TEnum rhs)', 0.0014542188344006874)\n", - "('TEnum operator|(TEnum lhs, TEnum rhs)', 0.0014542188344006874)\n", - "('TStream OpenAndValidate(const TPath arg1)', 0.0006263408888268776)\n", - "('_RangeToEnumerable RangeToEnumerable(const std::pair range)', 0.0004847396114668958)\n", - "('class OperationFailureException : public Exception {}', 0.0004847396114668958)\n", - "('TStream &operator>>(TStream &s, RowReader &reader)', 0.0004847396114668958)\n", - "('_DECLARE_ENUM', 0.0004847396114668958)\n", - "('_DECLARE_ENUM_DEFAULT', 0.0004847396114668958)\n", - "('ANSI_COLOR_RED', 0.0004847396114668958)\n", - "('ANSI_COLOR_GREEN', 0.0004847396114668958)\n", - "('ANSI_COLOR_YELLOW', 0.0004847396114668958)\n", - "('ANSI_COLOR_BLUE', 0.0004847396114668958)\n", - "('ANSI_COLOR_MAGENTA', 0.0004847396114668958)\n", - "('ANSI_COLOR_CYAN', 0.0004847396114668958)\n", - "('ANSI_COLOR_BRIGHT', 0.0004847396114668958)\n", - "('ANSI_COLOR_RESET', 0.0004847396114668958)\n", - "('bool dynamic_kind_of(const TSrc *obj)', 0.0004847396114668958)\n", - "('bool pointer_kind_of(const std::shared_ptr obj)', 0.0004847396114668958)\n", - "('TDest safe_cast(TSrc obj)', 0.0004847396114668958)\n", - "('std::shared_ptr safe_pointer_cast(const std::shared_ptr &obj)', 0.0004847396114668958)\n", - "('std::string StreamStatusToString(const TStream &stream)', 0.0004847396114668958)\n", - "('void ValidateStream(const TStream &stream)', 0.0004847396114668958)\n", - "('_RE_TRACE', 0.0004847396114668958)\n", - "('_SILENCE_STDEXT_ALLOCATORS_DEPRECATION_WARNING', 0.0004847396114668958)\n" + "('void ReportException(const int &ex, int level)', 0.49527330555396226)\n", + "('enum class StringComparison : int {}', 0.1145983519196816)\n", + "('bool Equal(const int &lhs, const int &rhs, int comparision)', 0.09938796985042238)\n", + "('int &operator>>(int &reader, bool &rhs)', 0.061196080770132576)\n", + "('int &operator>>(int &reader, int &rhs)', 0.04710340714736633)\n", + "('int main(int argc, char *argv[])', 0.04207230552946614)\n", + "('bool Confirm(const int &prompt)', 0.03251041790913293)\n", + "('int &operator>>(int &reader, long &rhs)', 0.011474265144399858)\n", + "('int &operator>>(int &reader, float &rhs)', 0.011474265144399858)\n", + "('int &operator>>(int &reader, double &rhs)', 0.011474265144399858)\n", + "('inline int to_string(const int &value)', 0.007649510096266572)\n", + "('const char *FriendlyNameOf()', 0.006788991932293897)\n", + "('TStream OpenAndValidate(const TPath arg1)', 0.0035379156601423947)\n", + "('ANSI_COLOR_RED', 0.001912377524066643)\n", + "('ANSI_COLOR_GREEN', 0.001912377524066643)\n", + "('ANSI_COLOR_YELLOW', 0.001912377524066643)\n", + "('ANSI_COLOR_BLUE', 0.001912377524066643)\n", + "('ANSI_COLOR_MAGENTA', 0.001912377524066643)\n", + "('ANSI_COLOR_CYAN', 0.001912377524066643)\n", + "('ANSI_COLOR_BRIGHT', 0.001912377524066643)\n", + "('ANSI_COLOR_RESET', 0.001912377524066643)\n", + "('bool dynamic_kind_of(const TSrc *obj)', 0.001912377524066643)\n", + "('bool pointer_kind_of(const int obj)', 0.001912377524066643)\n", + "('TDest safe_cast(TSrc obj)', 0.001912377524066643)\n", + "('int safe_pointer_cast(const int &obj)', 0.001912377524066643)\n", + "('int StreamStatusToString(const TStream &stream)', 0.001912377524066643)\n", + "('void ValidateStream(const TStream &stream)', 0.001912377524066643)\n", + "('_RangeToEnumerable RangeToEnumerable(const int range)', 0.001912377524066643)\n", + "('bool Equal(const int &lhs, const int &rhs, StringComparison comparision)', 0.001912377524066643)\n", + "('_RE_TRACE', 0.001912377524066643)\n", + "('const char *FriendlyNameOf(const std::type_index &type)', 0.001912377524066643)\n", + "('const char *FriendlyNameOf(const type_info &type)', 0.001912377524066643)\n", + "('const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)', 0.001912377524066643)\n", + "('TStream &operator>>(TStream &s, RowReader &reader)', 0.001912377524066643)\n", + "('RowReader &operator>>(RowReader &reader, int &rhs)', 0.001912377524066643)\n", + "('RowReader &operator>>(RowReader &reader, long &rhs)', 0.001912377524066643)\n", + "('RowReader &operator>>(RowReader &reader, float &rhs)', 0.001912377524066643)\n", + "('RowReader &operator>>(RowReader &reader, double &rhs)', 0.001912377524066643)\n", + "('RowReader &operator>>(RowReader &reader, bool &rhs)', 0.001912377524066643)\n", + "('_SILENCE_STDEXT_ALLOCATORS_DEPRECATION_WARNING', 0.001912377524066643)\n", + "('_DECLARE_ENUM', 0.001912377524066643)\n", + "('_DECLARE_ENUM_DEFAULT', 0.001912377524066643)\n" ] } ], diff --git a/persper/analytics/analyzer2.py b/persper/analytics/analyzer2.py index c6c4d2727d1..9fa582ea14c 100644 --- a/persper/analytics/analyzer2.py +++ b/persper/analytics/analyzer2.py @@ -1,10 +1,12 @@ import asyncio from abc import ABC -from typing import Union +import collections.abc +from typing import Union, Set from git import Commit, Diff, DiffIndex, Repo -from persper.analytics.git_tools import EMPTY_TREE_SHA, diff_with_commit, get_contents +from persper.analytics.git_tools import (EMPTY_TREE_SHA, diff_with_commit, + get_contents) from persper.analytics.graph_server import CommitSeekingMode, GraphServer @@ -19,8 +21,25 @@ def __init__(self, repositoryRoot: str, graphServer: GraphServer, self._terminalCommit: Commit = self._repo.rev_parse(terminalCommit) self._firstParentOnly = firstParentOnly self._visitedCommits = set() + self._s_visitedCommits = _ReadOnlySet(self._visitedCommits) self._observer: AnalyzerObserver = emptyAnalyzerObserver + def __getstate__(self): + state = self.__dict__.copy() + state.pop("_repo", None) + state.pop("_s_visitedCommits", None) + state["_originCommit"] = self._originCommit.hexsha if self._originCommit else None + state["_terminalCommit"] = self._terminalCommit.hexsha if self._terminalCommit else None + state.pop("_observer", None) + return state + + def __setstate__(self, state): + self.__dict__.update(state) + self._repo = Repo(self._repositoryRoot) + self.originCommit = state["_originCommit"] + self.terminalCommit = state["_terminalCommit"] + self._s_visitedCommits = _ReadOnlySet(self._visitedCommits) + @property def observer(self): """ @@ -71,6 +90,13 @@ def firstParentOnly(self, value: bool): def graph(self): return self._graphServer.get_graph() + @property + def visitedCommits(self) -> Set[str]: + """ + Gets a set of visited commits, identified by their their SHA. + """ + return self._s_visitedCommits + async def analyze(self): graphServerLastCommit = EMPTY_TREE_SHA commitSpec = self._terminalCommit @@ -80,7 +106,8 @@ async def analyze(self): topo_order=True, reverse=True, first_parent=self._firstParentOnly): def printCommitStatus(status: str): message = commit.message.strip()[:32] - print("Commit {0} ({1}): {2}".format(commit.hexsha, message, status)) + print("Commit {0} ({1}): {2}".format( + commit.hexsha, message, status)) if commit.hexsha in self._visitedCommits: printCommitStatus("Already visited.") continue @@ -100,7 +127,8 @@ def printCommitStatus(status: str): else: parent: Commit = commit.parents[0] if graphServerLastCommit != parent.hexsha: - printCommitStatus("Rewind to parent: {0}.".format(parent.hexsha)) + printCommitStatus( + "Rewind to parent: {0}.".format(parent.hexsha)) # jumping to the parent commit first await self._analyzeCommit(parent, graphServerLastCommit, CommitSeekingMode.Rewind) # then go on with current commit @@ -225,3 +253,17 @@ class _EmptyAnalyzerObserverType(AnalyzerObserver): """ An AnalyzerObserver instance that does nothing in their notification methods. """ + + +class _ReadOnlySet(collections.abc.Set): + def __init__(self, underlyingSet: collections.abc.Set): + self._underlyingSet = underlyingSet + + def __contains__(self, x): + return x in self._underlyingSet + + def __len__(self): + return len(self._underlyingSet) + + def __iter__(self): + return self._underlyingSet.__iter__() diff --git a/persper/analytics/lsp_graph_server/__init__.py b/persper/analytics/lsp_graph_server/__init__.py index c94f8f7d002..4650eb99403 100644 --- a/persper/analytics/lsp_graph_server/__init__.py +++ b/persper/analytics/lsp_graph_server/__init__.py @@ -37,7 +37,6 @@ class LspClientGraphServer(GraphServer): def __init__(self, workspaceRoot: str, languageServerCommand: Union[str, List[str]] = None, dumpLogs: bool = False, - dumpGraphs: bool = False, graph: CallCommitGraph = None): """ workspaceRoot: root of the temporary workspace path. LSP workspace and intermediate repository files @@ -65,7 +64,6 @@ def __init__(self, workspaceRoot: str, self._callGraphManager: CallGraphManager = None self._lastFileWrittenTime: datetime = None self._dumpLogs = dumpLogs - self._dumpGraphs = dumpGraphs # [(oldPath, newPath, addedLines, removedLines), ...] # added/removedLines := [[startLine, modifiedLines], ...] self._stashedPatches: List[Tuple[PurePath, PurePath, List[Tuple[int, int]], List[Tuple[int, int]]]] = [] @@ -168,8 +166,6 @@ async def end_commit(self, hexsha): # update vetices & edges if self._commitSeekingMode != CommitSeekingMode.Rewind: await self.updateGraph() - if self._dumpGraphs: - self._callGraph.dumpTo("Graph-" + hexsha + ".txt") # calculate added lines if self._commitSeekingMode == CommitSeekingMode.NormalForward: diff --git a/persper/analytics/lsp_graph_server/ccls.py b/persper/analytics/lsp_graph_server/ccls.py index 5974c4d7443..8e18382f9bb 100644 --- a/persper/analytics/lsp_graph_server/ccls.py +++ b/persper/analytics/lsp_graph_server/ccls.py @@ -160,10 +160,9 @@ class CclsGraphServer(LspClientGraphServer): def __init__(self, workspaceRoot: str, cacheRoot: str = None, languageServerCommand: Union[str, List[str]] = None, dumpLogs: bool = False, - dumpGraphs: bool = False, graph: CallCommitGraph = None): super().__init__(workspaceRoot, languageServerCommand=languageServerCommand, - dumpLogs=dumpLogs, dumpGraphs=dumpGraphs, graph=graph) + dumpLogs=dumpLogs, graph=graph) self._cacheRoot = Path(cacheRoot).resolve() if cacheRoot else self._workspaceRoot.joinpath(".ccls-cache") self._c_requireScopeDefinitionMatch = True diff --git a/test/test_analytics/test_analyzer_lsp_ccls.py b/test/test_analytics/test_analyzer_lsp_ccls.py index 4bd6f79dce8..0b903fb9e2e 100644 --- a/test/test_analytics/test_analyzer_lsp_ccls.py +++ b/test/test_analytics/test_analyzer_lsp_ccls.py @@ -51,6 +51,9 @@ def createCclsGraphServer(): @pytest.mark.asyncio async def testFeatureBranchFirstParent(): + """ + Tests test_feature_branch repos, only on topical branch. + """ repoPath = prepareRepo("test_feature_branch") graphServer = createCclsGraphServer() analyzer = Analyzer(repoPath, graphServer, firstParentOnly=True) @@ -63,6 +66,9 @@ async def testFeatureBranchFirstParent(): @pytest.mark.asyncio async def testFeatureBranch(): + """ + Tests test_feature_branch repos, on all branches. + """ repoPath = prepareRepo("test_feature_branch") graphServer = createCclsGraphServer() analyzer = Analyzer(repoPath, graphServer, firstParentOnly=False) From 9f6ee5b493b5d383993bb66922c420b5b851575c Mon Sep 17 00:00:00 2001 From: xinyan Date: Sat, 16 Feb 2019 23:34:31 +0800 Subject: [PATCH 36/84] Update lsp_graph_server\README.md. --- persper/analytics/lsp_graph_server/README.md | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/persper/analytics/lsp_graph_server/README.md b/persper/analytics/lsp_graph_server/README.md index ffb9b38992a..91e01216e45 100644 --- a/persper/analytics/lsp_graph_server/README.md +++ b/persper/analytics/lsp_graph_server/README.md @@ -1,8 +1,24 @@ # lsp_graph_server To try out the graph server backed by LSP, especially the [ccls](https://github.com/MaskRay/ccls)-based one, you need -* Compile [ccls-prime](https://github.com/Persper/ccls-prime), the customized ccls fork for graph server. -* Place the compiled binary under `bin` folder of the repository root. +* Compile [MaskRay/ccls](https://github.com/MaskRay/ccls), the customized ccls fork for graph server. + +* Place the compiled binary under `/bin` folder of the repository root, i.e. `/bin/ccls` or `/bin/ccls.exe`. + +## Work with notebook + * In the repository root, run `pipenv run ./tools/repo_creater/create_repo.py test/cpp_test_repo/` to create a cpp test repo. + * `jupyter notebook`, then open `notebooks/lsp-ccls.ipynb` + * Execute all the cells + +## Work with unit tests + +* Open a shell under `/test/test_analytics`, run + + ```powershell + pipenv run pytest test_analyzer_lsp_ccls.py + ``` + + * The test results are compared against baseline (by commit) in `/test/test_analytics/baseline`. \ No newline at end of file From 3ad7f80b52b8a9ed31ed7bdb55332a79b1a9e9f1 Mon Sep 17 00:00:00 2001 From: xinyan Date: Sun, 17 Feb 2019 23:57:43 +0800 Subject: [PATCH 37/84] Minor adjustment on logging message. --- persper/analytics/analyzer2.py | 11 ++++++++--- persper/analytics/lsp_graph_server/__init__.py | 1 + 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/persper/analytics/analyzer2.py b/persper/analytics/analyzer2.py index 9fa582ea14c..06a62096051 100644 --- a/persper/analytics/analyzer2.py +++ b/persper/analytics/analyzer2.py @@ -97,17 +97,21 @@ def visitedCommits(self) -> Set[str]: """ return self._s_visitedCommits - async def analyze(self): + async def analyze(self, maxAnalyzedCommits=1000): graphServerLastCommit = EMPTY_TREE_SHA commitSpec = self._terminalCommit if self._originCommit: commitSpec = self._originCommit.hexsha + ".." + self._terminalCommit.hexsha + analyzedCommits = 0 for commit in self._repo.iter_commits(commitSpec, topo_order=True, reverse=True, first_parent=self._firstParentOnly): def printCommitStatus(status: str): message = commit.message.strip()[:32] - print("Commit {0} ({1}): {2}".format( - commit.hexsha, message, status)) + # note the commit # here only indicates the ordinal of current commit in current analysis session + print("Commit #{0} {1} ({2}): {3}".format(analyzedCommits, commit.hexsha, message, status)) + + if maxAnalyzedCommits and analyzedCommits > maxAnalyzedCommits: + print("Max analyzed commits reached.") if commit.hexsha in self._visitedCommits: printCommitStatus("Already visited.") continue @@ -135,6 +139,7 @@ def printCommitStatus(status: str): printCommitStatus("Going forward.") await self._analyzeCommit(commit, parent, CommitSeekingMode.NormalForward) graphServerLastCommit = commit.hexsha + analyzedCommits += 1 async def _analyzeCommit(self, commit: Union[Commit, str], parentCommit: Union[Commit, str], seekingMode: CommitSeekingMode): diff --git a/persper/analytics/lsp_graph_server/__init__.py b/persper/analytics/lsp_graph_server/__init__.py index 4650eb99403..61421763f5a 100644 --- a/persper/analytics/lsp_graph_server/__init__.py +++ b/persper/analytics/lsp_graph_server/__init__.py @@ -84,6 +84,7 @@ def __setstate__(self, state): def start_commit(self, hexsha: str, seeking_mode: CommitSeekingMode, author_name: str, author_email: str, commit_message: str): + _logger.info("Start commit: %s %s (%s)", hexsha, commit_message[:32].strip(), seeking_mode) self._commitSeekingMode = seeking_mode if seeking_mode != CommitSeekingMode.Rewind: self._ccgraph.add_commit(hexsha, author_name, author_email, commit_message) From 844e004736d4295fcb0801f2928d2ba6da2e9c98 Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Mon, 18 Feb 2019 20:28:16 -0800 Subject: [PATCH 38/84] Specify version number for python-jsonrpc-server since newer version renames top-level package --- Pipfile | 2 +- Pipfile.lock | 410 +++++++++++++++++++++++++-------------------------- 2 files changed, 204 insertions(+), 208 deletions(-) diff --git a/Pipfile b/Pipfile index a232e639c21..b956277f56d 100644 --- a/Pipfile +++ b/Pipfile @@ -18,7 +18,7 @@ pydot = "*" ipykernel = "*" matplotlib = "*" antlr4-python3-runtime = "*" -python-jsonrpc-server = "*" +python-jsonrpc-server = "==0.0.2" pytest-asyncio = "*" [dev-packages] diff --git a/Pipfile.lock b/Pipfile.lock index b672acae202..b77ca0f4828 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "d04772a318aa545b5bfd2b3281d2433c3b6fb3d9e626ee57ba5c45540f45db16" + "sha256": "891f5ac4e93119d7cf21aa2fbae2258c92280cd7e32e9b5630726d999c8a6b3e" }, "pipfile-spec": 6, "requires": { @@ -18,17 +18,25 @@ "default": { "antlr4-python3-runtime": { "hashes": [ - "sha256:1b26b72c4492cef310542da10bf6b2ab4aa1775618fc6003f75b55ae9eaa3fd3" + "sha256:168cdcec8fb9152e84a87ca6fd261b3d54c8f6358f42ab3b813b14a7193bb50b" ], "index": "pypi", - "version": "==4.7.1" + "version": "==4.7.2" + }, + "appnope": { + "hashes": [ + "sha256:5b26757dc6f79a3b7dc9fab95359328d5747fcb2409d331ea66d0272b90ab2a0", + "sha256:8b995ffe925347a2138d7ac0fe77155e4311a0ea6d6da4f5128fe4b3cbe5ed71" + ], + "markers": "sys_platform == 'darwin'", + "version": "==0.1.0" }, "atomicwrites": { "hashes": [ - "sha256:0312ad34fcad8fac3704d441f7b317e50af620823353ec657a53e981f92920c0", - "sha256:ec9ae8adaae229e4f8446952d204a3e4b5fdd2d099f9be3aaf556120135fb3ee" + "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4", + "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6" ], - "version": "==1.2.1" + "version": "==1.3.0" }, "attrs": { "hashes": [ @@ -46,10 +54,10 @@ }, "certifi": { "hashes": [ - "sha256:339dc09518b07e2fa7eda5450740925974815557727d6bd35d319c1524a04a4c", - "sha256:6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a" + "sha256:47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7", + "sha256:993f830721089fef441cdfeb4b2c8c9df86f0c63239f06bd025a76a7daddb033" ], - "version": "==2018.10.15" + "version": "==2018.11.29" }, "chardet": { "hashes": [ @@ -58,14 +66,6 @@ ], "version": "==3.0.4" }, - "colorama": { - "hashes": [ - "sha256:05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d", - "sha256:f8ac84de7840f5b9c4e3347b3c1eaa50f7e49c2b07596221daec5edaabbd7c48" - ], - "markers": "sys_platform == 'win32'", - "version": "==0.4.1" - }, "cycler": { "hashes": [ "sha256:1d8a5ae1ff6c5cf9b93e8811e581232ad8920aeec647c37316ceac982b08cb2d", @@ -75,10 +75,10 @@ }, "decorator": { "hashes": [ - "sha256:2c51dff8ef3c447388fe5e4453d24a2bf128d3a4c32af3fabef1f01c6851ab82", - "sha256:c39efa13fbdeb4506c476c9b3babf6a718da943dab7811c206005a4a956c080c" + "sha256:33cd704aea07b4c28b3eb2c97d288a06918275dac0ecebdaf1bc8a48d98adb9e", + "sha256:cabb249f4710888a2fc0e13e9a16c343d932033718ff62e1e9bc93a9d3a9122b" ], - "version": "==4.3.0" + "version": "==4.3.2" }, "et-xmlfile": { "hashes": [ @@ -109,10 +109,10 @@ }, "idna": { "hashes": [ - "sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e", - "sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16" + "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", + "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c" ], - "version": "==2.7" + "version": "==2.8" }, "ipykernel": { "hashes": [ @@ -124,10 +124,10 @@ }, "ipython": { "hashes": [ - "sha256:a5781d6934a3341a1f9acb4ea5acdc7ea0a0855e689dbe755d070ca51e995435", - "sha256:b10a7ddd03657c761fc503495bc36471c8158e3fc948573fb9fe82a7029d8efd" + "sha256:06de667a9e406924f97781bda22d5d76bfb39762b678762d86a466e63f65dc39", + "sha256:5d3e020a6b5f29df037555e5c45ab1088d6a7cf3bd84f47e0ba501eeb0c3ec82" ], - "version": "==7.1.1" + "version": "==7.3.0" }, "ipython-genutils": { "hashes": [ @@ -145,26 +145,17 @@ }, "jedi": { "hashes": [ - "sha256:0191c447165f798e6a730285f2eee783fff81b0d3df261945ecb80983b5c3ca7", - "sha256:b7493f73a2febe0dc33d51c99b474547f7f6c0b2c8fb2b21f453eef204c12148" - ], - "version": "==0.13.1" - }, - "jsonpickle": { - "hashes": [ - "sha256:8b6212f1155f43ce67fa945efae6d010ed059f3ca5ed377aa070e5903d45b722", - "sha256:d43ede55b3d9b5524a8e11566ea0b11c9c8109116ef6a509a1b619d2041e7397", - "sha256:ed4adf0d14564c56023862eabfac211cf01211a20c5271896c8ab6f80c68086c" + "sha256:571702b5bd167911fe9036e5039ba67f820d6502832285cde8c881ab2b2149fd", + "sha256:c8481b5e59d34a5c7c42e98f6625e633f6ef59353abea6437472c7ec2093f191" ], - "index": "pypi", - "version": "==1.0" + "version": "==0.13.2" }, "jupyter-client": { "hashes": [ - "sha256:27befcf0446b01e29853014d6a902dd101ad7d7f94e2252b1adca17c3466b761", - "sha256:59e6d791e22a8002ad0e80b78c6fd6deecab4f9e1b1aa1a22f4213de271b29ea" + "sha256:b5f9cb06105c1d2d30719db5ffb3ea67da60919fb68deaefa583deccd8813551", + "sha256:c44411eb1463ed77548bc2d5ec0d744c9b81c4a542d9637c7a52824e2121b987" ], - "version": "==5.2.3" + "version": "==5.2.4" }, "jupyter-core": { "hashes": [ @@ -208,39 +199,35 @@ }, "lxml": { "hashes": [ - "sha256:02bc220d61f46e9b9d5a53c361ef95e9f5e1d27171cd461dddb17677ae2289a5", - "sha256:22f253b542a342755f6cfc047fe4d3a296515cf9b542bc6e261af45a80b8caf6", - "sha256:2f31145c7ff665b330919bfa44aacd3a0211a76ca7e7b441039d2a0b0451e415", - "sha256:36720698c29e7a9626a0dc802ef8885f8f0239bfd1689628ecd459a061f2807f", - "sha256:438a1b0203545521f6616132bfe0f4bca86f8a401364008b30e2b26ec408ce85", - "sha256:4815892904c336bbaf73dafd54f45f69f4021c22b5bad7332176bbf4fb830568", - "sha256:5be031b0f15ad63910d8e5038b489d95a79929513b3634ad4babf77100602588", - "sha256:5c93ae37c3c588e829b037fdfbd64a6e40c901d3f93f7beed6d724c44829a3ad", - "sha256:60842230678674cdac4a1cf0f707ef12d75b9a4fc4a565add4f710b5fcf185d5", - "sha256:62939a8bb6758d1bf923aa1c13f0bcfa9bf5b2fc0f5fa917a6e25db5fe0cfa4e", - "sha256:75830c06a62fe7b8fe3bbb5f269f0b308f19f3949ac81cfd40062f47c1455faf", - "sha256:81992565b74332c7c1aff6a913a3e906771aa81c9d0c68c68113cffcae45bc53", - "sha256:8c892fb0ee52c594d9a7751c7d7356056a9682674b92cc1c4dc968ff0f30c52f", - "sha256:9d862e3cf4fc1f2837dedce9c42269c8c76d027e49820a548ac89fdcee1e361f", - "sha256:a623965c086a6e91bb703d4da62dabe59fe88888e82c4117d544e11fd74835d6", - "sha256:a7783ab7f6a508b0510490cef9f857b763d796ba7476d9703f89722928d1e113", - "sha256:aab09fbe8abfa3b9ce62aaf45aca2d28726b1b9ee44871dbe644050a2fff4940", - "sha256:abf181934ac3ef193832fb973fd7f6149b5c531903c2ec0f1220941d73eee601", - "sha256:ae07fa0c115733fce1e9da96a3ac3fa24801742ca17e917e0c79d63a01eeb843", - "sha256:b9c78242219f674ab645ec571c9a95d70f381319a23911941cd2358a8e0521cf", - "sha256:bccb267678b870d9782c3b44d0cefe3ba0e329f9af8c946d32bf3778e7a4f271", - "sha256:c4df4d27f4c93b2cef74579f00b1d3a31a929c7d8023f870c4b476f03a274db4", - "sha256:caf0e50b546bb60dfa99bb18dfa6748458a83131ecdceaf5c071d74907e7e78a", - "sha256:d3266bd3ac59ac4edcd5fa75165dee80b94a3e5c91049df5f7c057ccf097551c", - "sha256:db0d213987bcd4e6d41710fb4532b22315b0d8fb439ff901782234456556aed1", - "sha256:dbbd5cf7690a40a9f0a9325ab480d0fccf46d16b378eefc08e195d84299bfae1", - "sha256:e16e07a0ec3a75b5ee61f2b1003c35696738f937dc8148fbda9fe2147ccb6e61", - "sha256:e175a006725c7faadbe69e791877d09936c0ef2cf49d01b60a6c1efcb0e8be6f", - "sha256:edd9c13a97f6550f9da2236126bb51c092b3b1ce6187f2bd966533ad794bbb5e", - "sha256:fa39ea60d527fbdd94215b5e5552f1c6a912624521093f1384a491a8ad89ad8b" + "sha256:0537eee4902e8bf4f41bfee8133f7edf96533dd175930a12086d6a40d62376b2", + "sha256:0562ec748abd230ab87d73384e08fa784f9b9cee89e28696087d2d22c052cc27", + "sha256:09e91831e749fbf0f24608694e4573be0ef51430229450c39c83176cc2e2d353", + "sha256:1ae4c0722fc70c0d4fba43ae33c2885f705e96dce1db41f75ae14a2d2749b428", + "sha256:1c630c083d782cbaf1f7f37f6cac87bda9cff643cf2803a5f180f30d97955cef", + "sha256:2fe74e3836bd8c0fa7467ffae05545233c7f37de1eb765cacfda15ad20c6574a", + "sha256:37af783c2667ead34a811037bda56a0b142ac8438f7ed29ae93f82ddb812fbd6", + "sha256:3f2d9eafbb0b24a33f56acd16f39fc935756524dcb3172892721c54713964c70", + "sha256:47d8365a8ef14097aa4c65730689be51851b4ade677285a3b2daa03b37893e26", + "sha256:510e904079bc56ea784677348e151e1156040dbfb736f1d8ea4b9e6d0ab2d9f4", + "sha256:58d0851da422bba31c7f652a7e9335313cf94a641aa6d73b8f3c67602f75b593", + "sha256:7940d5c2185ffb989203dacbb28e6ae88b4f1bb25d04e17f94b0edd82232bcbd", + "sha256:7cf39bb3a905579836f7a8f3a45320d9eb22f16ab0c1e112efb940ced4d057a5", + "sha256:9563a23c1456c0ab550c087833bc13fcc61013a66c6420921d5b70550ea312bf", + "sha256:95b392952935947e0786a90b75cc33388549dcb19af716b525dae65b186138fc", + "sha256:983129f3fd3cef5c3cf067adcca56e30a169656c00fcc6c648629dbb850b27fa", + "sha256:a0b75b1f1854771844c647c464533def3e0a899dd094a85d1d4ed72ecaaee93d", + "sha256:b5db89cc0ef624f3a81214b7961a99f443b8c91e88188376b6b322fd10d5b118", + "sha256:c0a7751ba1a4bfbe7831920d98cee3ce748007eab8dfda74593d44079568219a", + "sha256:c0c5a7d4aafcc30c9b6d8613a362567e32e5f5b708dc41bc3a81dac56f8af8bb", + "sha256:d4d63d85eacc6cb37b459b16061e1f100d154bee89dc8d8f9a6128a5a538e92e", + "sha256:da5e7e941d6e71c9c9a717c93725cda0708c2474f532e3680ac5e39ec57d224d", + "sha256:dccad2b3c583f036f43f80ac99ee212c2fa9a45151358d55f13004d095e683b2", + "sha256:df46307d39f2aeaafa1d25309b8a8d11738b73e9861f72d4d0a092528f498baa", + "sha256:e70b5e1cb48828ddd2818f99b1662cb9226dc6f57d07fc75485405c77da17436", + "sha256:ea825562b8cd057cbc9810d496b8b5dec37a1e2fc7b27bc7c1e72ce94462a09a" ], "index": "pypi", - "version": "==4.2.5" + "version": "==4.3.1" }, "matplotlib": { "hashes": [ @@ -263,11 +250,11 @@ }, "more-itertools": { "hashes": [ - "sha256:c187a73da93e7a8acc0001572aebc7e3c69daf7bf6881a2cea10650bd4420092", - "sha256:c476b5d3a34e12d40130bc2f935028b5f636df8f372dc2c1c01dc19681b2039e", - "sha256:fcbfeaea0be121980e15bc97b3817b5202ca73d0eae185b4550cbfce2a3ebb3d" + "sha256:0125e8f60e9e031347105eb1682cef932f5e97d7b9a1a28d9bf00c22a5daef40", + "sha256:590044e3942351a1bdb1de960b739ff4ce277960f2425ad4509446dbace8d9d1" ], - "version": "==4.3.0" + "markers": "python_version > '2.7'", + "version": "==6.0.0" }, "networkx": { "hashes": [ @@ -285,51 +272,54 @@ }, "numpy": { "hashes": [ - "sha256:0df89ca13c25eaa1621a3f09af4c8ba20da849692dcae184cb55e80952c453fb", - "sha256:154c35f195fd3e1fad2569930ca51907057ae35e03938f89a8aedae91dd1b7c7", - "sha256:18e84323cdb8de3325e741a7a8dd4a82db74fde363dce32b625324c7b32aa6d7", - "sha256:1e8956c37fc138d65ded2d96ab3949bd49038cc6e8a4494b1515b0ba88c91565", - "sha256:23557bdbca3ccbde3abaa12a6e82299bc92d2b9139011f8c16ca1bb8c75d1e95", - "sha256:24fd645a5e5d224aa6e39d93e4a722fafa9160154f296fd5ef9580191c755053", - "sha256:36e36b6868e4440760d4b9b44587ea1dc1f06532858d10abba98e851e154ca70", - "sha256:3d734559db35aa3697dadcea492a423118c5c55d176da2f3be9c98d4803fc2a7", - "sha256:416a2070acf3a2b5d586f9a6507bb97e33574df5bd7508ea970bbf4fc563fa52", - "sha256:4a22dc3f5221a644dfe4a63bf990052cc674ef12a157b1056969079985c92816", - "sha256:4d8d3e5aa6087490912c14a3c10fbdd380b40b421c13920ff468163bc50e016f", - "sha256:4f41fd159fba1245e1958a99d349df49c616b133636e0cf668f169bce2aeac2d", - "sha256:561ef098c50f91fbac2cc9305b68c915e9eb915a74d9038ecf8af274d748f76f", - "sha256:56994e14b386b5c0a9b875a76d22d707b315fa037affc7819cda08b6d0489756", - "sha256:73a1f2a529604c50c262179fcca59c87a05ff4614fe8a15c186934d84d09d9a5", - "sha256:7da99445fd890206bfcc7419f79871ba8e73d9d9e6b82fe09980bc5bb4efc35f", - "sha256:99d59e0bcadac4aa3280616591fb7bcd560e2218f5e31d5223a2e12a1425d495", - "sha256:a4cc09489843c70b22e8373ca3dfa52b3fab778b57cf81462f1203b0852e95e3", - "sha256:a61dc29cfca9831a03442a21d4b5fd77e3067beca4b5f81f1a89a04a71cf93fa", - "sha256:b1853df739b32fa913cc59ad9137caa9cc3d97ff871e2bbd89c2a2a1d4a69451", - "sha256:b1f44c335532c0581b77491b7715a871d0dd72e97487ac0f57337ccf3ab3469b", - "sha256:b261e0cb0d6faa8fd6863af26d30351fd2ffdb15b82e51e81e96b9e9e2e7ba16", - "sha256:c857ae5dba375ea26a6228f98c195fec0898a0fd91bcf0e8a0cae6d9faf3eca7", - "sha256:cf5bb4a7d53a71bb6a0144d31df784a973b36d8687d615ef6a7e9b1809917a9b", - "sha256:db9814ff0457b46f2e1d494c1efa4111ca089e08c8b983635ebffb9c1573361f", - "sha256:df04f4bad8a359daa2ff74f8108ea051670cafbca533bb2636c58b16e962989e", - "sha256:ecf81720934a0e18526177e645cbd6a8a21bb0ddc887ff9738de07a1df5c6b61", - "sha256:edfa6fba9157e0e3be0f40168eb142511012683ac3dc82420bee4a3f3981b30e" + "sha256:0cdbbaa30ae69281b18dd995d3079c4e552ad6d5426977f66b9a2a95f11f552a", + "sha256:2b0cca1049bd39d1879fa4d598624cafe82d35529c72de1b3d528d68031cdd95", + "sha256:31d3fe5b673e99d33d70cfee2ea8fe8dccd60f265c3ed990873a88647e3dd288", + "sha256:34dd4922aab246c39bf5df03ca653d6265e65971deca6784c956bf356bca6197", + "sha256:384e2dfa03da7c8d54f8f934f61b6a5e4e1ebb56a65b287567629d6c14578003", + "sha256:392e2ea22b41a22c0289a88053204b616181288162ba78e6823e1760309d5277", + "sha256:4341a39fc085f31a583be505eabf00e17c619b469fef78dc7e8241385bfddaa4", + "sha256:45080f065dcaa573ebecbfe13cdd86e8c0a68c4e999aa06bd365374ea7137706", + "sha256:485cb1eb4c9962f4cd042fed9424482ec1d83fee5dc2ef3f2552ac47852cb259", + "sha256:575cefd28d3e0da85b0864506ae26b06483ee4a906e308be5a7ad11083f9d757", + "sha256:62784b35df7de7ca4d0d81c5b6af5983f48c5cdef32fc3635b445674e56e3266", + "sha256:69c152f7c11bf3b4fc11bc4cc62eb0334371c0db6844ebace43b7c815b602805", + "sha256:6ccfdcefd287f252cf1ea7a3f1656070da330c4a5658e43ad223269165cdf977", + "sha256:7298fbd73c0b3eff1d53dc9b9bdb7add8797bb55eeee38c8ccd7906755ba28af", + "sha256:79463d918d1bf3aeb9186e3df17ddb0baca443f41371df422f99ee94f4f2bbfe", + "sha256:8bbee788d82c0ac656536de70e817af09b7694f5326b0ef08e5c1014fcb96bb3", + "sha256:a863957192855c4c57f60a75a1ac06ce5362ad18506d362dd807e194b4baf3ce", + "sha256:ae602ba425fb2b074e16d125cdce4f0194903da935b2e7fe284ebecca6d92e76", + "sha256:b13faa258b20fa66d29011f99fdf498641ca74a0a6d9266bc27d83c70fea4a6a", + "sha256:c2c39d69266621dd7464e2bb740d6eb5abc64ddc339cc97aa669f3bb4d75c103", + "sha256:e9c88f173d31909d881a60f08a8494e63f1aff2a4052476b24d4f50e82c47e24", + "sha256:f1a29267ac29fff0913de0f11f3a9edfcd3f39595f467026c29376fad243ebe3", + "sha256:f69dde0c5a137d887676a8129373e44366055cf19d1b434e853310c7a1e68f93" ], "index": "pypi", - "version": "==1.15.4" + "version": "==1.16.1" }, "openpyxl": { "hashes": [ - "sha256:8b0c2a44f394a7a913a2e7cdcc1dc601d5f45c59b85a356e591e2ac5463e21e7" + "sha256:70da6b45a5925285b6a3d93570b45f4402eb2d335740163a58eef533b139565c" ], "index": "pypi", - "version": "==2.5.11" + "version": "==2.6.0" }, "parso": { "hashes": [ - "sha256:35704a43a3c113cce4de228ddb39aab374b8004f4f2407d070b6a2ca784ce8a2", - "sha256:895c63e93b94ac1e1690f5fdd40b65f07c8171e3e53cbd7793b5b96c0e0a7f24" + "sha256:4580328ae3f548b358f4901e38c0578229186835f0fa0846e47369796dd5bcc9", + "sha256:68406ebd7eafe17f8e40e15a84b56848eccbf27d7c1feb89e93d8fca395706db" + ], + "version": "==0.3.4" + }, + "pexpect": { + "hashes": [ + "sha256:2a8e88259839571d1251d278476f3eec5db26deb73a70be5ed5dc5435e418aba", + "sha256:3fbd41d4caf27fa4a377bfd16fef87271099463e6fa73e92a52f92dfee5d425b" ], - "version": "==0.3.1" + "markers": "sys_platform != 'win32'", + "version": "==4.6.0" }, "pickleshare": { "hashes": [ @@ -340,18 +330,25 @@ }, "pluggy": { "hashes": [ - "sha256:447ba94990e8014ee25ec853339faf7b0fc8050cdc3289d4d71f7f410fb90095", - "sha256:bde19360a8ec4dfd8a20dcb811780a30998101f078fc7ded6162f0076f50508f" + "sha256:8ddc32f03971bfdf900a81961a48ccf2fb677cf7715108f85295c67405798616", + "sha256:980710797ff6a041e9a73a5787804f848996ecaa6f8a1b1e08224a5894f2074a" ], - "version": "==0.8.0" + "version": "==0.8.1" }, "prompt-toolkit": { "hashes": [ - "sha256:c1d6aff5252ab2ef391c2fe498ed8c088066f66bc64a8d5c095bbf795d9fec34", - "sha256:d4c47f79b635a0e70b84fdb97ebd9a274203706b1ee5ed44c10da62755cf3ec9", - "sha256:fd17048d8335c1e6d5ee403c3569953ba3eb8555d710bfc548faf0712666ea39" + "sha256:88002cc618cacfda8760c4539e76c3b3f148ecdb7035a3d422c7ecdc90c2a3ba", + "sha256:c6655a12e9b08edb8cf5aeab4815fd1e1bdea4ad73d3bbf269cf2e0c4eb75d5e", + "sha256:df5835fb8f417aa55e5cafadbaeb0cf630a1e824aad16989f9f0493e679ec010" + ], + "version": "==2.0.8" + }, + "ptyprocess": { + "hashes": [ + "sha256:923f299cc5ad920c68f2bc0bc98b75b9f838b93b599941a6b63ddbc2476394c0", + "sha256:d7cc528d76e76342423ca640335bd3633420dc1366f258cb31d05e865ef5ca1f" ], - "version": "==2.0.7" + "version": "==0.6.0" }, "py": { "hashes": [ @@ -362,40 +359,48 @@ }, "pydot": { "hashes": [ - "sha256:33babe795de6ea2a26c18455e6d8a78b1dbbea6c41363a6136d9f95e9ce78425", - "sha256:dbb8c123ea6ea6cf09eca2403e597244a960b31e70c2a5c8a2a8a2e2594ce191" + "sha256:67be714300c78fda5fd52f79ec994039e3f76f074948c67b5ff539b433ad354f", + "sha256:d49c9d4dd1913beec2a997f831543c8cbd53e535b1a739e921642fe416235f01" ], "index": "pypi", - "version": "==1.3.0" + "version": "==1.4.1" }, "pygments": { "hashes": [ - "sha256:6301ecb0997a52d2d31385e62d0a4a4cf18d2f2da7054a5ddad5c366cd39cee7", - "sha256:82666aac15622bd7bb685a4ee7f6625dd716da3ef7473620c192c0168aae64fc" + "sha256:5ffada19f6203563680669ee7f53b64dabbeb100eb51b61996085e99c03b284a", + "sha256:e8218dd399a61674745138520d0d4cf2621d7e032439341bc3f647bff125818d" ], - "version": "==2.3.0" + "version": "==2.3.1" }, "pyparsing": { "hashes": [ - "sha256:40856e74d4987de5d01761a22d1621ae1c7f8774585acae358aa5c5936c6c90b", - "sha256:f353aab21fd474459d97b709e527b5571314ee5f067441dc9f88e33eecd96592" + "sha256:66c9268862641abcac4a96ba74506e594c884e3f57690a696d21ad8210ed667a", + "sha256:f6c5ef0d7480ad048c054c37632c67fca55299990fff127850181659eea33fc3" ], - "version": "==2.3.0" + "version": "==2.3.1" }, "pytest": { "hashes": [ - "sha256:1d131cc532be0023ef8ae265e2a779938d0619bb6c2510f52987ffcba7fa1ee4", - "sha256:ca4761407f1acc85ffd1609f464ca20bb71a767803505bd4127d0e45c5a50e23" + "sha256:067a1d4bf827ffdd56ad21bd46674703fce77c5957f6c1eef731f6146bfcef1c", + "sha256:9687049d53695ad45cf5fdc7bbd51f0c49f1ea3ecfc4b7f3fde7501b541f17f4" ], "index": "pypi", - "version": "==4.0.1" + "version": "==4.3.0" + }, + "pytest-asyncio": { + "hashes": [ + "sha256:9fac5100fd716cbecf6ef89233e8590a4ad61d729d1732e0a96b84182df1daaf", + "sha256:d734718e25cfc32d2bf78d346e99d33724deeba774cc4afdf491530c6184b63b" + ], + "index": "pypi", + "version": "==0.10.0" }, "python-dateutil": { "hashes": [ - "sha256:063df5763652e21de43de7d9e00ccf239f953a832941e37be541614732cdfc93", - "sha256:88f9287c0174266bb0d8cedd395cfba9c58e87e5ad86b2ce58859bc11be3cf02" + "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb", + "sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e" ], - "version": "==2.7.5" + "version": "==2.8.0" }, "python-jsonrpc-server": { "hashes": [ @@ -436,78 +441,78 @@ }, "requests": { "hashes": [ - "sha256:65b3a120e4329e33c9889db89c80976c5272f56ea92d3e74da8a463992e3ff54", - "sha256:ea881206e59f41dbd0bd445437d792e43906703fff75ca8ff43ccdb11f33f263" + "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e", + "sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b" ], "index": "pypi", - "version": "==2.20.1" + "version": "==2.21.0" }, "scikit-learn": { "hashes": [ - "sha256:1ccbc5d9bdd933f3f75a3e8f0a60eac764388be84cee9e66bfd51d16992996f0", - "sha256:3281dd405f15f8647e617062003b229b29dd74e966bfd1c033be720897402c54", - "sha256:40c8f8f2e72dabb183820c7d77caa7b826d87d919056a02507c941f0e583c83e", - "sha256:48cef3af610f83460a4419de6c77757fc3b687da4ad39aedcbe3a92c6714b0ae", - "sha256:5d801633c69e67f215c1b796a7040ba33e2c6b3bab3d335c2caeca8a24679081", - "sha256:635978ca918da035a60e20d390273803417a1fc691290e93d9d5852a016a2bc3", - "sha256:68df1b389647f300f6f93535b0eb94efacbf5e670982c1936885e7981409dd3f", - "sha256:6aa8b434de69c2507ee2bb874642f7bbfedb0a96c234a9129b8957e109d3bd47", - "sha256:7c4569c7ca504db5cd1f4d8dfca6500bbf6f1d0e25618831f25d9f9ab8b4e33d", - "sha256:82743bf812380ed767a9c3c263a8f0d76e7af93abd2404ee199bf46e57e56f4f", - "sha256:8d15d61d2b6324d99f135cb8ef95d1475367a4ebb3b9a1fc0f5b53a16c21974a", - "sha256:95f892fa68105f6bcce0f206262d3810924f3865abd93927eb41949adb50e538", - "sha256:9ebea27859a26c97ef33b6d07dc36190890a1ca4988eac317544a61001b90e92", - "sha256:9ee7e147fcb7355ea646916f13eacb71f89c90ccbf9b067078f14c4675cc0003", - "sha256:aae03d8e640e2cd58471add3759f2002c1d726548f53a70840a7b6f330210bc2", - "sha256:b8891cd8489aa622f2789a4a48b8e99e068bfef00653ae907a8fc89ee6c54682", - "sha256:c1fd93ba6b684c3624a7f9b8e5afc6c78bc381fd2607504860c0367a7055994d", - "sha256:c2a1ce4e405ab61857c4dba89aa997522a9098e4cbc383fa95805ae6be8c772e", - "sha256:c8482ff66457a1b148973240c0eaa0456b62b8b8e5e26615f7f00ba08cc5c132", - "sha256:cb439dc1d8ee60f441594db1a12543c4e91260cd548b7ad9bc58bff3916f8dde", - "sha256:cc723aa0de67f0398821e3961ce0dcad32f276564c6fff2fc6061cc885a89c9e", - "sha256:cfb3ffdc7098b7328b120bc5819d81045c59e0392e7221441b0ac50ed7853a73", - "sha256:e045a5445e772cafe4fee5b3954947af7984a3057dcac0407ec55e8bab91bbee", - "sha256:e45a49f3bd713540b5607b77e8fc7057a289a795c5fe182333b286a4d3e0338b", - "sha256:e8010c4fe8f1d3625c27a89625b35906869651094b5ef5a0b22cd8514501d7d8", - "sha256:f231c2d0e397309ef18b62a8adfe914de38e335c6f8fc06a3655d5ae8b524d69", - "sha256:fa1869c18fef812e321b9ed875519daefb3a7ea016ba1392526d231a7994e81c", - "sha256:fc4d3cff012c1aa2459e5745cdd3df31a30e2ff27dcbfadf1999e16dd8497a6e" - ], - "version": "==0.20.1" + "sha256:05d061606657af85365b5f71484e3362d924429edde17a90068960843ad597f5", + "sha256:071317afbb5c67fa493635376ddd724b414290255cbf6947c1155846956e93f7", + "sha256:0d03aaf19a25e59edac3099cda6879ba05129f0fa1e152e23b728ccd36104f57", + "sha256:1665ea0d4b75ef24f5f2a9d1527b7296eeabcbe3a1329791c954541e2ebde5a2", + "sha256:24eccb0ff31f84e88e00936c09197735ef1dcabd370aacb10e55dbc8ee464a78", + "sha256:27b48cabacce677a205e6bcda1f32bdc968fbf40cd2aa0a4f52852f6997fce51", + "sha256:2c51826b9daa87d7d356bebd39f8665f7c32e90e3b21cbe853d6c7f0d6b0d23b", + "sha256:3116299d392bd1d054655fa2a740e7854de87f1d573fa85503e64494e52ac795", + "sha256:3771861abe1fd1b2bbeaec7ba8cfca58fdedd75d790f099960e5332af9d1ff7a", + "sha256:473ba7d9a5eaec47909ee83d74b4a3be47a44505c5189d2cab67c0418cd030f1", + "sha256:621e2c91f9afde06e9295d128cb15cb6fc77dc00719393e9ec9d47119895b0d4", + "sha256:645865462c383e5faad473b93145a8aee97d839c9ad1fd7a17ae54ec8256d42b", + "sha256:80e2276d4869d302e84b7c03b5bac4a67f6cd331162e62ae775a3e5855441a60", + "sha256:84d2cfe0dee3c22b26364266d69850e0eb406d99714045929875032f91d3c918", + "sha256:87ea9ace7fe811638dfc39b850b60887509b8bfc93c4006d5552fa066d04ddc7", + "sha256:a4d1e535c75881f668010e6e53dfeb89dd50db85b05c5c45af1991c8b832d757", + "sha256:a4f14c4327d2e44567bfb3a0bee8c55470f820bc9a67af3faf200abd8ed79bf2", + "sha256:a7b3c24e193e8c6eaeac075b5d0bb0a7fea478aa2e4b991f6a7b030fc4fd410d", + "sha256:ab2919aca84f1ac6ef60a482148eec0944364ab1832e63f28679b16f9ef279c8", + "sha256:b0f79d5ff74f3c68a4198ad5b4dfa891326b5ce272dd064d11d572b25aae5b43", + "sha256:bc5bc7c7ee2572a1edcb51698a6caf11fae554194aaab9a38105d9ec419f29e6", + "sha256:bc5c750d548795def79576533f8f0f065915f17f48d6e443afce2a111f713747", + "sha256:c68969c30b3b2c1fe07c1376110928eade61da4fc29c24c9f1a89435a7d08abe", + "sha256:d3b4f791d2645fe936579d61f1ff9b5dcf0c8f50db7f0245ca8f16407d7a5a46", + "sha256:dac0cd9fdd8ac6dd6108a10558e2e0ca1b411b8ea0a3165641f9ab0b4322df4e", + "sha256:eb7ddbdf33eb822fdc916819b0ab7009d954eb43c3a78e7dd2ec5455e074922a", + "sha256:ed537844348402ed53420187b3a6948c576986d0b2811a987a49613b6a26f29e", + "sha256:fcca54733e692fe03b8584f7d4b9344f4b6e3a74f5b326c6e5f5e9d2504bdce7" + ], + "version": "==0.20.2" }, "scipy": { "hashes": [ - "sha256:0611ee97296265af4a21164a5323f8c1b4e8e15c582d3dfa7610825900136bb7", - "sha256:08237eda23fd8e4e54838258b124f1cd141379a5f281b0a234ca99b38918c07a", - "sha256:0e645dbfc03f279e1946cf07c9c754c2a1859cb4a41c5f70b25f6b3a586b6dbd", - "sha256:0e9bb7efe5f051ea7212555b290e784b82f21ffd0f655405ac4f87e288b730b3", - "sha256:108c16640849e5827e7d51023efb3bd79244098c3f21e4897a1007720cb7ce37", - "sha256:340ef70f5b0f4e2b4b43c8c8061165911bc6b2ad16f8de85d9774545e2c47463", - "sha256:3ad73dfc6f82e494195144bd3a129c7241e761179b7cb5c07b9a0ede99c686f3", - "sha256:3b243c77a822cd034dad53058d7c2abf80062aa6f4a32e9799c95d6391558631", - "sha256:404a00314e85eca9d46b80929571b938e97a143b4f2ddc2b2b3c91a4c4ead9c5", - "sha256:423b3ff76957d29d1cce1bc0d62ebaf9a3fdfaf62344e3fdec14619bb7b5ad3a", - "sha256:42d9149a2fff7affdd352d157fa5717033767857c11bd55aa4a519a44343dfef", - "sha256:625f25a6b7d795e8830cb70439453c9f163e6870e710ec99eba5722775b318f3", - "sha256:698c6409da58686f2df3d6f815491fd5b4c2de6817a45379517c92366eea208f", - "sha256:729f8f8363d32cebcb946de278324ab43d28096f36593be6281ca1ee86ce6559", - "sha256:8190770146a4c8ed5d330d5b5ad1c76251c63349d25c96b3094875b930c44692", - "sha256:878352408424dffaa695ffedf2f9f92844e116686923ed9aa8626fc30d32cfd1", - "sha256:8b984f0821577d889f3c7ca8445564175fb4ac7c7f9659b7c60bef95b2b70e76", - "sha256:8f841bbc21d3dad2111a94c490fb0a591b8612ffea86b8e5571746ae76a3deac", - "sha256:c22b27371b3866c92796e5d7907e914f0e58a36d3222c5d436ddd3f0e354227a", - "sha256:d0cdd5658b49a722783b8b4f61a6f1f9c75042d0e29a30ccb6cacc9b25f6d9e2", - "sha256:d40dc7f494b06dcee0d303e51a00451b2da6119acbeaccf8369f2d29e28917ac", - "sha256:d8491d4784aceb1f100ddb8e31239c54e4afab8d607928a9f7ef2469ec35ae01", - "sha256:dfc5080c38dde3f43d8fbb9c0539a7839683475226cf83e4b24363b227dfe552", - "sha256:e24e22c8d98d3c704bb3410bce9b69e122a8de487ad3dbfe9985d154e5c03a40", - "sha256:e7a01e53163818d56eabddcafdc2090e9daba178aad05516b20c6591c4811020", - "sha256:ee677635393414930541a096fc8e61634304bb0153e4e02b75685b11eba14cae", - "sha256:f0521af1b722265d824d6ad055acfe9bd3341765735c44b5a4d0069e189a0f40", - "sha256:f25c281f12c0da726c6ed00535ca5d1622ec755c30a3f8eafef26cf43fede694" + "sha256:014cb900c003b5ac81a53f2403294e8ecf37aedc315b59a6b9370dce0aa7627a", + "sha256:281a34da34a5e0de42d26aed692ab710141cad9d5d218b20643a9cb538ace976", + "sha256:588f9cc4bfab04c45fbd19c1354b5ade377a8124d6151d511c83730a9b6b2338", + "sha256:5a10661accd36b6e2e8855addcf3d675d6222006a15795420a39c040362def66", + "sha256:628f60be272512ca1123524969649a8cb5ae8b31cca349f7c6f8903daf9034d7", + "sha256:6dcc43a88e25b815c2dea1c6fac7339779fc988f5df8396e1de01610604a7c38", + "sha256:70e37cec0ac0fe95c85b74ca4e0620169590fd5d3f44765f3c3a532cedb0e5fd", + "sha256:7274735fb6fb5d67d3789ddec2cd53ed6362539b41aa6cc0d33a06c003aaa390", + "sha256:78e12972e144da47326958ac40c2bd1c1cca908edc8b01c26a36f9ffd3dce466", + "sha256:790cbd3c8d09f3a6d9c47c4558841e25bac34eb7a0864a9def8f26be0b8706af", + "sha256:79792c8fe8e9d06ebc50fe23266522c8c89f20aa94ac8e80472917ecdce1e5ba", + "sha256:865afedf35aaef6df6344bee0de391ee5e99d6e802950a237f9fb9b13e441f91", + "sha256:870fd401ec7b64a895cff8e206ee16569158db00254b2f7157b4c9a5db72c722", + "sha256:963815c226b29b0176d5e3d37fc9de46e2778ce4636a5a7af11a48122ef2577c", + "sha256:9726791484f08e394af0b59eb80489ad94d0a53bbb58ab1837dcad4d58489863", + "sha256:9de84a71bb7979aa8c089c4fb0ea0e2ed3917df3fb2a287a41aaea54bbad7f5d", + "sha256:b2c324ddc5d6dbd3f13680ad16a29425841876a84a1de23a984236d1afff4fa6", + "sha256:b86ae13c597fca087cb8c193870507c8916cefb21e52e1897da320b5a35075e5", + "sha256:ba0488d4dbba2af5bf9596b849873102d612e49a118c512d9d302ceafa36e01a", + "sha256:d78702af4102a3a4e23bb7372cec283e78f32f5573d92091aa6aaba870370fe1", + "sha256:def0e5d681dd3eb562b059d355ae8bebe27f5cc455ab7c2b6655586b63d3a8ea", + "sha256:e085d1babcb419bbe58e2e805ac61924dac4ca45a07c9fa081144739e500aa3c", + "sha256:e2cfcbab37c082a5087aba5ff00209999053260441caadd4f0e8f4c2d6b72088", + "sha256:e742f1f5dcaf222e8471c37ee3d1fd561568a16bb52e031c25674ff1cf9702d5", + "sha256:f06819b028b8ef9010281e74c59cb35483933583043091ed6b261bb1540f11cc", + "sha256:f15f2d60a11c306de7700ee9f65df7e9e463848dbea9c8051e293b704038da60", + "sha256:f31338ee269d201abe76083a990905473987371ff6f3fdb76a3f9073a361cf37", + "sha256:f6b88c8d302c3dac8dff7766955e38d670c82e0d79edfc7eae47d6bb2c186594" ], "index": "pypi", - "version": "==1.1.0" + "version": "==1.2.1" }, "singledispatch": { "hashes": [ @@ -518,10 +523,10 @@ }, "six": { "hashes": [ - "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9", - "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb" + "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", + "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" ], - "version": "==1.11.0" + "version": "==1.12.0" }, "sklearn": { "hashes": [ @@ -571,14 +576,5 @@ "version": "==0.1.7" } }, - "develop": { - "v": { - "hashes": [ - "sha256:605a936bb41413ea696d92acc74dcf1a1eadde03e73b13655591c2b9074b9dd8", - "sha256:c2d77319519fc8b48bafd0677a9117d2717d266fa8a7512ad2158a6a40b31c01" - ], - "index": "pypi", - "version": "==4.0.0" - } - } + "develop": {} } From 31ba13f10a066d975ae21dc6daa4023ed5da87a0 Mon Sep 17 00:00:00 2001 From: xinyan Date: Tue, 19 Feb 2019 23:37:35 +0800 Subject: [PATCH 39/84] Fix bug with maxAnalyzedCommits in analyzer2. --- persper/analytics/analyzer2.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/persper/analytics/analyzer2.py b/persper/analytics/analyzer2.py index 06a62096051..f8e3a5626d7 100644 --- a/persper/analytics/analyzer2.py +++ b/persper/analytics/analyzer2.py @@ -110,8 +110,9 @@ def printCommitStatus(status: str): # note the commit # here only indicates the ordinal of current commit in current analysis session print("Commit #{0} {1} ({2}): {3}".format(analyzedCommits, commit.hexsha, message, status)) - if maxAnalyzedCommits and analyzedCommits > maxAnalyzedCommits: + if maxAnalyzedCommits and analyzedCommits >= maxAnalyzedCommits: print("Max analyzed commits reached.") + break if commit.hexsha in self._visitedCommits: printCommitStatus("Already visited.") continue From e0ddf2b95c7577c1b3d2e2b10145b87731f65e00 Mon Sep 17 00:00:00 2001 From: xinyan Date: Wed, 20 Feb 2019 00:01:10 +0800 Subject: [PATCH 40/84] Fix unit test baseline of cpp_test_repo. --- persper/analytics/lsp_graph_server/README.md | 7 +- .../baseline/cpp_test_repo/A.g.json | 182 +++++++++++----- .../baseline/cpp_test_repo/B.g.json | 196 ++++++++++++----- .../baseline/cpp_test_repo/C.g.json | 201 ++++++++++++----- .../baseline/cpp_test_repo/D.g.json | 206 +++++++++++++----- 5 files changed, 580 insertions(+), 212 deletions(-) diff --git a/persper/analytics/lsp_graph_server/README.md b/persper/analytics/lsp_graph_server/README.md index 91e01216e45..df961faf602 100644 --- a/persper/analytics/lsp_graph_server/README.md +++ b/persper/analytics/lsp_graph_server/README.md @@ -18,7 +18,12 @@ To try out the graph server backed by LSP, especially the [ccls](https://github. * Open a shell under `/test/test_analytics`, run ```powershell + # run all of the tests pipenv run pytest test_analyzer_lsp_ccls.py + # or run a single test + pipenv run pytest test_analyzer_lsp_ccls.py::testFeatureBranch ``` - * The test results are compared against baseline (by commit) in `/test/test_analytics/baseline`. \ No newline at end of file + * The test results are compared against baseline (by commit) in `/test/test_analytics/baseline`. + + * If there are assertion errors during testing, you can see the actual run result in `/test/test_analytics/actualdump`. \ No newline at end of file diff --git a/test/test_analytics/baseline/cpp_test_repo/A.g.json b/test/test_analytics/baseline/cpp_test_repo/A.g.json index 2bd1b9aacb1..019e2272079 100644 --- a/test/test_analytics/baseline/cpp_test_repo/A.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/A.g.json @@ -1,50 +1,102 @@ { "edges": { - "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { + "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)|->|TStream OpenAndValidate(const TPath arg1)": { "addedBy": "A", "weight": null }, - "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|enum class StringComparison : int {}": { + "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)|->|void ConfigurationParser::Load(std::istream &inputStream)": { "addedBy": "A", "weight": null }, - "bool Equal(const int &lhs, const int &rhs, int comparision)|->|enum class StringComparison : int {}": { + "ConfigurationParser::ConfigurationParser(std::istream &inputStream)|->|void ConfigurationParser::Load(std::istream &inputStream)": { "addedBy": "A", "weight": null }, - "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)|->|const char *FriendlyNameOf()": { + "RowReader &operator>>(RowReader &reader, bool &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { "addedBy": "A", "weight": null }, - "const char *FriendlyNameOf(const std::type_index &type)|->|const char *FriendlyNameOf()": { + "RowReader &operator>>(RowReader &reader, bool &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { "addedBy": "A", "weight": null }, - "const char *FriendlyNameOf(const type_info &type)|->|const char *FriendlyNameOf()": { + "RowReader &operator>>(RowReader &reader, bool &rhs)|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { + "addedBy": "A", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, bool &rhs)|->|enum class StringComparison : int {}": { + "addedBy": "A", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, double &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { + "addedBy": "A", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, double &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { + "addedBy": "A", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, float &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { + "addedBy": "A", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, float &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { + "addedBy": "A", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, int &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { + "addedBy": "A", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, long &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { + "addedBy": "A", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, long &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { "addedBy": "A", "weight": null }, - "int &operator>>(int &reader, bool &rhs)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { "addedBy": "A", "weight": null }, - "int &operator>>(int &reader, bool &rhs)|->|enum class StringComparison : int {}": { + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const|->|enum class StringComparison : int {}": { "addedBy": "A", "weight": null }, - "int &operator>>(int &reader, bool &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { "addedBy": "A", "weight": null }, - "int &operator>>(int &reader, double &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "bool Confirm(const std::string &prompt)|->|bool Confirm(const std::string &prompt)": { "addedBy": "A", "weight": null }, - "int &operator>>(int &reader, float &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { "addedBy": "A", "weight": null }, - "int &operator>>(int &reader, long &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)|->|enum class StringComparison : int {}": { + "addedBy": "A", + "weight": null + }, + "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)|->|const char *FriendlyNameOf()": { + "addedBy": "A", + "weight": null + }, + "const char *FriendlyNameOf(const std::type_index &type)|->|const char *FriendlyNameOf()": { + "addedBy": "A", + "weight": null + }, + "const char *FriendlyNameOf(const type_info &type)|->|const char *FriendlyNameOf()": { + "addedBy": "A", + "weight": null + }, + "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { + "addedBy": "A", + "weight": null + }, + "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { "addedBy": "A", "weight": null }, @@ -52,7 +104,15 @@ "addedBy": "A", "weight": null }, - "void ReportException(const int &ex, int level)|->|void ReportException(const int &ex, int level)": { + "int main(int argc, char *argv[])|->|bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const": { + "addedBy": "A", + "weight": null + }, + "void ConfigurationParser::Load(std::istream &inputStream)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { + "addedBy": "A", + "weight": null + }, + "void ReportException(const std::exception &ex, int level)|->|void ReportException(const std::exception &ex, int level)": { "addedBy": "A", "weight": null } @@ -106,33 +166,51 @@ }, "size": null }, + "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)": { + "history": { + "A": 5 + }, + "size": null + }, + "ConfigurationParser::ConfigurationParser(std::istream &inputStream)": { + "history": { + "A": 4 + }, + "size": null + }, "RowReader &operator>>(RowReader &reader, bool &rhs)": { "history": { - "A": 1 + "A": 25 }, "size": null }, "RowReader &operator>>(RowReader &reader, double &rhs)": { "history": { - "A": 1 + "A": 7 }, "size": null }, "RowReader &operator>>(RowReader &reader, float &rhs)": { "history": { - "A": 1 + "A": 7 }, "size": null }, "RowReader &operator>>(RowReader &reader, int &rhs)": { "history": { - "A": 1 + "A": 7 }, "size": null }, "RowReader &operator>>(RowReader &reader, long &rhs)": { "history": { - "A": 1 + "A": 7 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, std::string &rhs)": { + "history": { + "A": 17 }, "size": null }, @@ -142,6 +220,18 @@ }, "size": null }, + "TEnum operator&(TEnum lhs, TEnum rhs)": { + "history": { + "A": 1 + }, + "size": null + }, + "TEnum operator|(TEnum lhs, TEnum rhs)": { + "history": { + "A": 1 + }, + "size": null + }, "TStream &operator>>(TStream &s, RowReader &reader)": { "history": { "A": 1 @@ -172,7 +262,7 @@ }, "size": null }, - "_RangeToEnumerable RangeToEnumerable(const int range)": { + "_RangeToEnumerable RangeToEnumerable(const std::pair range)": { "history": { "A": 1 }, @@ -184,21 +274,21 @@ }, "size": null }, - "bool Confirm(const int &prompt)": { + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const": { "history": { - "A": 17 + "A": 16 }, "size": null }, - "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)": { + "bool Confirm(const std::string &prompt)": { "history": { - "A": 1 + "A": 18 }, "size": null }, - "bool Equal(const int &lhs, const int &rhs, int comparision)": { + "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { "history": { - "A": 29 + "A": 30 }, "size": null }, @@ -208,7 +298,7 @@ }, "size": null }, - "bool pointer_kind_of(const int obj)": { + "bool pointer_kind_of(const std::shared_ptr obj)": { "history": { "A": 1 }, @@ -238,67 +328,61 @@ }, "size": null }, - "enum class StringComparison : int {}": { + "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const": { "history": { - "A": 6 + "A": 12 }, "size": null }, - "inline int to_string(const int &value)": { + "enum class StringComparison : int {}": { "history": { - "A": 4 + "A": 6 }, "size": null }, - "int &operator>>(int &reader, bool &rhs)": { + "inline std::string to_string(const std::pair &value)": { "history": { - "A": 24 + "A": 4 }, "size": null }, - "int &operator>>(int &reader, double &rhs)": { + "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const": { "history": { - "A": 6 + "A": 12 }, "size": null }, - "int &operator>>(int &reader, float &rhs)": { + "int main(int argc, char *argv[])": { "history": { - "A": 6 + "A": 7 }, "size": null }, - "int &operator>>(int &reader, int &rhs)": { + "std::shared_ptr safe_pointer_cast(const std::shared_ptr &obj)": { "history": { - "A": 6 + "A": 1 }, "size": null }, - "int &operator>>(int &reader, long &rhs)": { + "std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { "history": { "A": 6 }, "size": null }, - "int StreamStatusToString(const TStream &stream)": { + "std::string StreamStatusToString(const TStream &stream)": { "history": { "A": 1 }, "size": null }, - "int main(int argc, char *argv[])": { + "void ConfigurationParser::Load(std::istream &inputStream)": { "history": { - "A": 7 - }, - "size": null - }, - "int safe_pointer_cast(const int &obj)": { - "history": { - "A": 1 + "A": 23 }, "size": null }, - "void ReportException(const int &ex, int level)": { + "void ReportException(const std::exception &ex, int level)": { "history": { "A": 21 }, diff --git a/test/test_analytics/baseline/cpp_test_repo/B.g.json b/test/test_analytics/baseline/cpp_test_repo/B.g.json index 2d4d789f1b3..f504ab4e582 100644 --- a/test/test_analytics/baseline/cpp_test_repo/B.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/B.g.json @@ -1,50 +1,102 @@ { "edges": { - "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { + "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)|->|TStream OpenAndValidate(const TPath arg1)": { "addedBy": "A", "weight": null }, - "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|enum class StringComparison : int {}": { + "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)|->|void ConfigurationParser::Load(std::istream &inputStream)": { "addedBy": "A", "weight": null }, - "bool Equal(const int &lhs, const int &rhs, int comparision)|->|enum class StringComparison : int {}": { - "addedBy": "B", + "ConfigurationParser::ConfigurationParser(std::istream &inputStream)|->|void ConfigurationParser::Load(std::istream &inputStream)": { + "addedBy": "A", "weight": null }, - "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)|->|const char *FriendlyNameOf()": { + "RowReader &operator>>(RowReader &reader, bool &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { "addedBy": "A", "weight": null }, - "const char *FriendlyNameOf(const std::type_index &type)|->|const char *FriendlyNameOf()": { + "RowReader &operator>>(RowReader &reader, bool &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { "addedBy": "A", "weight": null }, - "const char *FriendlyNameOf(const type_info &type)|->|const char *FriendlyNameOf()": { + "RowReader &operator>>(RowReader &reader, bool &rhs)|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { + "addedBy": "A", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, bool &rhs)|->|enum class StringComparison : int {}": { + "addedBy": "A", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, double &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { + "addedBy": "A", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, double &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { + "addedBy": "A", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, float &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { + "addedBy": "A", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, float &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { + "addedBy": "A", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, int &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { + "addedBy": "A", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, long &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { + "addedBy": "A", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, long &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { + "addedBy": "A", + "weight": null + }, + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { + "addedBy": "A", + "weight": null + }, + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const|->|enum class StringComparison : int {}": { "addedBy": "A", "weight": null }, - "int &operator>>(int &reader, bool &rhs)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { "addedBy": "A", "weight": null }, - "int &operator>>(int &reader, bool &rhs)|->|enum class StringComparison : int {}": { + "bool Confirm(const std::string &prompt)|->|bool Confirm(const std::string &prompt)": { + "addedBy": "B", + "weight": null + }, + "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { + "addedBy": "B", + "weight": null + }, + "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)|->|enum class StringComparison : int {}": { + "addedBy": "B", + "weight": null + }, + "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)|->|const char *FriendlyNameOf()": { "addedBy": "A", "weight": null }, - "int &operator>>(int &reader, bool &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "const char *FriendlyNameOf(const std::type_index &type)|->|const char *FriendlyNameOf()": { "addedBy": "A", "weight": null }, - "int &operator>>(int &reader, double &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "const char *FriendlyNameOf(const type_info &type)|->|const char *FriendlyNameOf()": { "addedBy": "A", "weight": null }, - "int &operator>>(int &reader, float &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { "addedBy": "A", "weight": null }, - "int &operator>>(int &reader, long &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { "addedBy": "A", "weight": null }, @@ -52,11 +104,27 @@ "addedBy": "B", "weight": null }, - "int main(int argc, char *argv[])|->|void ReportException(const int &ex, int level)": { + "int main(int argc, char *argv[])|->|bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const": { + "addedBy": "B", + "weight": null + }, + "int main(int argc, char *argv[])|->|double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const": { + "addedBy": "B", + "weight": null + }, + "int main(int argc, char *argv[])|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { "addedBy": "B", "weight": null }, - "void ReportException(const int &ex, int level)|->|void ReportException(const int &ex, int level)": { + "int main(int argc, char *argv[])|->|void ReportException(const std::exception &ex, int level)": { + "addedBy": "B", + "weight": null + }, + "void ConfigurationParser::Load(std::istream &inputStream)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { + "addedBy": "A", + "weight": null + }, + "void ReportException(const std::exception &ex, int level)|->|void ReportException(const std::exception &ex, int level)": { "addedBy": "B", "weight": null } @@ -110,33 +178,51 @@ }, "size": null }, + "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)": { + "history": { + "A": 5 + }, + "size": null + }, + "ConfigurationParser::ConfigurationParser(std::istream &inputStream)": { + "history": { + "A": 4 + }, + "size": null + }, "RowReader &operator>>(RowReader &reader, bool &rhs)": { "history": { - "A": 1 + "A": 25 }, "size": null }, "RowReader &operator>>(RowReader &reader, double &rhs)": { "history": { - "A": 1 + "A": 7 }, "size": null }, "RowReader &operator>>(RowReader &reader, float &rhs)": { "history": { - "A": 1 + "A": 7 }, "size": null }, "RowReader &operator>>(RowReader &reader, int &rhs)": { "history": { - "A": 1 + "A": 7 }, "size": null }, "RowReader &operator>>(RowReader &reader, long &rhs)": { "history": { - "A": 1 + "A": 7 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, std::string &rhs)": { + "history": { + "A": 17 }, "size": null }, @@ -146,6 +232,18 @@ }, "size": null }, + "TEnum operator&(TEnum lhs, TEnum rhs)": { + "history": { + "A": 1 + }, + "size": null + }, + "TEnum operator|(TEnum lhs, TEnum rhs)": { + "history": { + "A": 1 + }, + "size": null + }, "TStream &operator>>(TStream &s, RowReader &reader)": { "history": { "A": 1 @@ -176,7 +274,7 @@ }, "size": null }, - "_RangeToEnumerable RangeToEnumerable(const int range)": { + "_RangeToEnumerable RangeToEnumerable(const std::pair range)": { "history": { "A": 1 }, @@ -188,21 +286,21 @@ }, "size": null }, - "bool Confirm(const int &prompt)": { + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const": { "history": { - "A": 17 + "A": 16 }, "size": null }, - "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)": { + "bool Confirm(const std::string &prompt)": { "history": { - "A": 1 + "A": 18 }, "size": null }, - "bool Equal(const int &lhs, const int &rhs, int comparision)": { + "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { "history": { - "A": 29 + "A": 30 }, "size": null }, @@ -212,7 +310,7 @@ }, "size": null }, - "bool pointer_kind_of(const int obj)": { + "bool pointer_kind_of(const std::shared_ptr obj)": { "history": { "A": 1 }, @@ -242,68 +340,62 @@ }, "size": null }, - "enum class StringComparison : int {}": { + "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const": { "history": { - "A": 6 + "A": 12 }, "size": null }, - "inline int to_string(const int &value)": { + "enum class StringComparison : int {}": { "history": { - "A": 4 + "A": 6 }, "size": null }, - "int &operator>>(int &reader, bool &rhs)": { + "inline std::string to_string(const std::pair &value)": { "history": { - "A": 24 + "A": 4 }, "size": null }, - "int &operator>>(int &reader, double &rhs)": { + "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const": { "history": { - "A": 6 + "A": 12 }, "size": null }, - "int &operator>>(int &reader, float &rhs)": { + "int main(int argc, char *argv[])": { "history": { - "A": 6 + "A": 7, + "B": 4 }, "size": null }, - "int &operator>>(int &reader, int &rhs)": { + "std::shared_ptr safe_pointer_cast(const std::shared_ptr &obj)": { "history": { - "A": 6 + "A": 1 }, "size": null }, - "int &operator>>(int &reader, long &rhs)": { + "std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { "history": { "A": 6 }, "size": null }, - "int StreamStatusToString(const TStream &stream)": { + "std::string StreamStatusToString(const TStream &stream)": { "history": { "A": 1 }, "size": null }, - "int main(int argc, char *argv[])": { + "void ConfigurationParser::Load(std::istream &inputStream)": { "history": { - "A": 7, - "B": 4 - }, - "size": null - }, - "int safe_pointer_cast(const int &obj)": { - "history": { - "A": 1 + "A": 23 }, "size": null }, - "void ReportException(const int &ex, int level)": { + "void ReportException(const std::exception &ex, int level)": { "history": { "A": 21 }, diff --git a/test/test_analytics/baseline/cpp_test_repo/C.g.json b/test/test_analytics/baseline/cpp_test_repo/C.g.json index 4d25d6e1f11..88834d24b6f 100644 --- a/test/test_analytics/baseline/cpp_test_repo/C.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/C.g.json @@ -1,50 +1,102 @@ { "edges": { - "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { + "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)|->|TStream OpenAndValidate(const TPath arg1)": { "addedBy": "C", "weight": null }, - "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|enum class StringComparison : int {}": { + "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)|->|void ConfigurationParser::Load(std::istream &inputStream)": { "addedBy": "C", "weight": null }, - "bool Equal(const int &lhs, const int &rhs, int comparision)|->|enum class StringComparison : int {}": { + "ConfigurationParser::ConfigurationParser(std::istream &inputStream)|->|void ConfigurationParser::Load(std::istream &inputStream)": { "addedBy": "C", "weight": null }, - "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)|->|const char *FriendlyNameOf()": { + "RowReader &operator>>(RowReader &reader, bool &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { "addedBy": "C", "weight": null }, - "const char *FriendlyNameOf(const std::type_index &type)|->|const char *FriendlyNameOf()": { + "RowReader &operator>>(RowReader &reader, bool &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { "addedBy": "C", "weight": null }, - "const char *FriendlyNameOf(const type_info &type)|->|const char *FriendlyNameOf()": { + "RowReader &operator>>(RowReader &reader, bool &rhs)|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { + "addedBy": "C", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, bool &rhs)|->|enum class StringComparison : int {}": { + "addedBy": "C", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, double &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { + "addedBy": "C", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, double &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { + "addedBy": "C", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, float &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { + "addedBy": "C", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, float &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { + "addedBy": "C", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, int &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { + "addedBy": "C", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, long &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { + "addedBy": "C", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, long &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { + "addedBy": "C", + "weight": null + }, + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { + "addedBy": "C", + "weight": null + }, + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const|->|enum class StringComparison : int {}": { "addedBy": "C", "weight": null }, - "int &operator>>(int &reader, bool &rhs)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { "addedBy": "C", "weight": null }, - "int &operator>>(int &reader, bool &rhs)|->|enum class StringComparison : int {}": { + "bool Confirm(const std::string &prompt)|->|bool Confirm(const std::string &prompt)": { "addedBy": "C", "weight": null }, - "int &operator>>(int &reader, bool &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { "addedBy": "C", "weight": null }, - "int &operator>>(int &reader, double &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)|->|enum class StringComparison : int {}": { "addedBy": "C", "weight": null }, - "int &operator>>(int &reader, float &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)|->|const char *FriendlyNameOf()": { "addedBy": "C", "weight": null }, - "int &operator>>(int &reader, long &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "const char *FriendlyNameOf(const std::type_index &type)|->|const char *FriendlyNameOf()": { + "addedBy": "C", + "weight": null + }, + "const char *FriendlyNameOf(const type_info &type)|->|const char *FriendlyNameOf()": { + "addedBy": "C", + "weight": null + }, + "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { + "addedBy": "C", + "weight": null + }, + "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { "addedBy": "C", "weight": null }, @@ -52,11 +104,27 @@ "addedBy": "C", "weight": null }, - "int main(int argc, char *argv[])|->|void ReportException(const int &ex, int level)": { + "int main(int argc, char *argv[])|->|bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const": { + "addedBy": "C", + "weight": null + }, + "int main(int argc, char *argv[])|->|double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const": { + "addedBy": "B", + "weight": null + }, + "int main(int argc, char *argv[])|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { "addedBy": "B", "weight": null }, - "void ReportException(const int &ex, int level)|->|void ReportException(const int &ex, int level)": { + "int main(int argc, char *argv[])|->|void ReportException(const std::exception &ex, int level)": { + "addedBy": "B", + "weight": null + }, + "void ConfigurationParser::Load(std::istream &inputStream)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { + "addedBy": "C", + "weight": null + }, + "void ReportException(const std::exception &ex, int level)|->|void ReportException(const std::exception &ex, int level)": { "addedBy": "C", "weight": null } @@ -110,33 +178,52 @@ }, "size": null }, + "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)": { + "history": { + "A": 5 + }, + "size": null + }, + "ConfigurationParser::ConfigurationParser(std::istream &inputStream)": { + "history": { + "A": 4 + }, + "size": null + }, "RowReader &operator>>(RowReader &reader, bool &rhs)": { "history": { - "A": 1 + "A": 25, + "C": 4 }, "size": null }, "RowReader &operator>>(RowReader &reader, double &rhs)": { "history": { - "A": 1 + "A": 7 }, "size": null }, "RowReader &operator>>(RowReader &reader, float &rhs)": { "history": { - "A": 1 + "A": 7 }, "size": null }, "RowReader &operator>>(RowReader &reader, int &rhs)": { "history": { - "A": 1 + "A": 7 }, "size": null }, "RowReader &operator>>(RowReader &reader, long &rhs)": { "history": { - "A": 1 + "A": 7 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, std::string &rhs)": { + "history": { + "A": 17 }, "size": null }, @@ -146,6 +233,18 @@ }, "size": null }, + "TEnum operator&(TEnum lhs, TEnum rhs)": { + "history": { + "A": 1 + }, + "size": null + }, + "TEnum operator|(TEnum lhs, TEnum rhs)": { + "history": { + "A": 1 + }, + "size": null + }, "TStream &operator>>(TStream &s, RowReader &reader)": { "history": { "A": 1 @@ -176,7 +275,7 @@ }, "size": null }, - "_RangeToEnumerable RangeToEnumerable(const int range)": { + "_RangeToEnumerable RangeToEnumerable(const std::pair range)": { "history": { "A": 1 }, @@ -188,21 +287,22 @@ }, "size": null }, - "bool Confirm(const int &prompt)": { + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const": { "history": { - "A": 17 + "A": 16, + "C": 4 }, "size": null }, - "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)": { + "bool Confirm(const std::string &prompt)": { "history": { - "A": 1 + "A": 18 }, "size": null }, - "bool Equal(const int &lhs, const int &rhs, int comparision)": { + "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { "history": { - "A": 29, + "A": 30, "C": 2 }, "size": null @@ -213,7 +313,7 @@ }, "size": null }, - "bool pointer_kind_of(const int obj)": { + "bool pointer_kind_of(const std::shared_ptr obj)": { "history": { "A": 1 }, @@ -243,71 +343,64 @@ }, "size": null }, - "enum class StringComparison : int {}": { + "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const": { "history": { - "A": 6, - "C": 2 + "A": 12 }, "size": null }, - "inline int to_string(const int &value)": { + "enum class StringComparison : int {}": { "history": { - "A": 4 + "A": 6, + "C": 2 }, "size": null }, - "int &operator>>(int &reader, bool &rhs)": { + "inline std::string to_string(const std::pair &value)": { "history": { - "A": 24, - "C": 4 + "A": 4 }, "size": null }, - "int &operator>>(int &reader, double &rhs)": { + "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const": { "history": { - "A": 6 + "A": 12 }, "size": null }, - "int &operator>>(int &reader, float &rhs)": { + "int main(int argc, char *argv[])": { "history": { - "A": 6 + "A": 7, + "B": 4, + "C": 4 }, "size": null }, - "int &operator>>(int &reader, int &rhs)": { + "std::shared_ptr safe_pointer_cast(const std::shared_ptr &obj)": { "history": { - "A": 6 + "A": 1 }, "size": null }, - "int &operator>>(int &reader, long &rhs)": { + "std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { "history": { "A": 6 }, "size": null }, - "int StreamStatusToString(const TStream &stream)": { + "std::string StreamStatusToString(const TStream &stream)": { "history": { "A": 1 }, "size": null }, - "int main(int argc, char *argv[])": { - "history": { - "A": 7, - "B": 4, - "C": 4 - }, - "size": null - }, - "int safe_pointer_cast(const int &obj)": { + "void ConfigurationParser::Load(std::istream &inputStream)": { "history": { - "A": 1 + "A": 23 }, "size": null }, - "void ReportException(const int &ex, int level)": { + "void ReportException(const std::exception &ex, int level)": { "history": { "A": 21 }, diff --git a/test/test_analytics/baseline/cpp_test_repo/D.g.json b/test/test_analytics/baseline/cpp_test_repo/D.g.json index 74f9d08d417..3e271ea882d 100644 --- a/test/test_analytics/baseline/cpp_test_repo/D.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/D.g.json @@ -1,50 +1,102 @@ { "edges": { - "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { + "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)|->|TStream OpenAndValidate(const TPath arg1)": { "addedBy": "D", "weight": null }, - "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)|->|enum class StringComparison : int {}": { + "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)|->|void ConfigurationParser::Load(std::istream &inputStream)": { "addedBy": "D", "weight": null }, - "bool Equal(const int &lhs, const int &rhs, int comparision)|->|enum class StringComparison : int {}": { + "ConfigurationParser::ConfigurationParser(std::istream &inputStream)|->|void ConfigurationParser::Load(std::istream &inputStream)": { "addedBy": "D", "weight": null }, - "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)|->|const char *FriendlyNameOf()": { + "RowReader &operator>>(RowReader &reader, bool &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { "addedBy": "D", "weight": null }, - "const char *FriendlyNameOf(const std::type_index &type)|->|const char *FriendlyNameOf()": { + "RowReader &operator>>(RowReader &reader, bool &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { "addedBy": "D", "weight": null }, - "const char *FriendlyNameOf(const type_info &type)|->|const char *FriendlyNameOf()": { + "RowReader &operator>>(RowReader &reader, bool &rhs)|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { + "addedBy": "D", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, bool &rhs)|->|enum class StringComparison : int {}": { + "addedBy": "D", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, double &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { + "addedBy": "D", + "weight": null + }, + "RowReader &operator>>(RowReader &reader, double &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { "addedBy": "D", "weight": null }, - "int &operator>>(int &reader, bool &rhs)|->|bool Equal(const int &lhs, const int &rhs, int comparision)": { + "RowReader &operator>>(RowReader &reader, float &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { "addedBy": "D", "weight": null }, - "int &operator>>(int &reader, bool &rhs)|->|enum class StringComparison : int {}": { + "RowReader &operator>>(RowReader &reader, float &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { "addedBy": "D", "weight": null }, - "int &operator>>(int &reader, bool &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "RowReader &operator>>(RowReader &reader, int &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { "addedBy": "D", "weight": null }, - "int &operator>>(int &reader, double &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "RowReader &operator>>(RowReader &reader, long &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { "addedBy": "D", "weight": null }, - "int &operator>>(int &reader, float &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "RowReader &operator>>(RowReader &reader, long &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { "addedBy": "D", "weight": null }, - "int &operator>>(int &reader, long &rhs)|->|int &operator>>(int &reader, int &rhs)": { + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { + "addedBy": "D", + "weight": null + }, + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const|->|enum class StringComparison : int {}": { + "addedBy": "D", + "weight": null + }, + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { + "addedBy": "D", + "weight": null + }, + "bool Confirm(const std::string &prompt)|->|bool Confirm(const std::string &prompt)": { + "addedBy": "D", + "weight": null + }, + "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { + "addedBy": "D", + "weight": null + }, + "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)|->|enum class StringComparison : int {}": { + "addedBy": "D", + "weight": null + }, + "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)|->|const char *FriendlyNameOf()": { + "addedBy": "D", + "weight": null + }, + "const char *FriendlyNameOf(const std::type_index &type)|->|const char *FriendlyNameOf()": { + "addedBy": "D", + "weight": null + }, + "const char *FriendlyNameOf(const type_info &type)|->|const char *FriendlyNameOf()": { + "addedBy": "D", + "weight": null + }, + "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { + "addedBy": "D", + "weight": null + }, + "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { "addedBy": "D", "weight": null }, @@ -52,11 +104,27 @@ "addedBy": "C", "weight": null }, - "int main(int argc, char *argv[])|->|void ReportException(const int &ex, int level)": { + "int main(int argc, char *argv[])|->|bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const": { + "addedBy": "C", + "weight": null + }, + "int main(int argc, char *argv[])|->|double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const": { + "addedBy": "B", + "weight": null + }, + "int main(int argc, char *argv[])|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { + "addedBy": "B", + "weight": null + }, + "int main(int argc, char *argv[])|->|void ReportException(const std::exception &ex, int level)": { "addedBy": "B", "weight": null }, - "void ReportException(const int &ex, int level)|->|void ReportException(const int &ex, int level)": { + "void ConfigurationParser::Load(std::istream &inputStream)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { + "addedBy": "D", + "weight": null + }, + "void ReportException(const std::exception &ex, int level)|->|void ReportException(const std::exception &ex, int level)": { "addedBy": "D", "weight": null } @@ -110,33 +178,53 @@ }, "size": null }, + "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)": { + "history": { + "A": 5 + }, + "size": null + }, + "ConfigurationParser::ConfigurationParser(std::istream &inputStream)": { + "history": { + "A": 4 + }, + "size": null + }, "RowReader &operator>>(RowReader &reader, bool &rhs)": { "history": { - "A": 1 + "A": 25, + "C": 4, + "D": 4 }, "size": null }, "RowReader &operator>>(RowReader &reader, double &rhs)": { "history": { - "A": 1 + "A": 7 }, "size": null }, "RowReader &operator>>(RowReader &reader, float &rhs)": { "history": { - "A": 1 + "A": 7 }, "size": null }, "RowReader &operator>>(RowReader &reader, int &rhs)": { "history": { - "A": 1 + "A": 7 }, "size": null }, "RowReader &operator>>(RowReader &reader, long &rhs)": { "history": { - "A": 1 + "A": 7 + }, + "size": null + }, + "RowReader &operator>>(RowReader &reader, std::string &rhs)": { + "history": { + "A": 17 }, "size": null }, @@ -146,6 +234,18 @@ }, "size": null }, + "TEnum operator&(TEnum lhs, TEnum rhs)": { + "history": { + "A": 1 + }, + "size": null + }, + "TEnum operator|(TEnum lhs, TEnum rhs)": { + "history": { + "A": 1 + }, + "size": null + }, "TStream &operator>>(TStream &s, RowReader &reader)": { "history": { "A": 1 @@ -176,7 +276,7 @@ }, "size": null }, - "_RangeToEnumerable RangeToEnumerable(const int range)": { + "_RangeToEnumerable RangeToEnumerable(const std::pair range)": { "history": { "A": 1 }, @@ -188,21 +288,23 @@ }, "size": null }, - "bool Confirm(const int &prompt)": { + "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const": { "history": { - "A": 17 + "A": 16, + "C": 4, + "D": 4 }, "size": null }, - "bool Equal(const int &lhs, const int &rhs, StringComparison comparision)": { + "bool Confirm(const std::string &prompt)": { "history": { - "A": 1 + "A": 18 }, "size": null }, - "bool Equal(const int &lhs, const int &rhs, int comparision)": { + "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { "history": { - "A": 29, + "A": 30, "C": 2, "D": 2 }, @@ -214,7 +316,7 @@ }, "size": null }, - "bool pointer_kind_of(const int obj)": { + "bool pointer_kind_of(const std::shared_ptr obj)": { "history": { "A": 1 }, @@ -244,6 +346,12 @@ }, "size": null }, + "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const": { + "history": { + "A": 12 + }, + "size": null + }, "enum class StringComparison : int {}": { "history": { "A": 6, @@ -252,66 +360,52 @@ }, "size": null }, - "inline int to_string(const int &value)": { + "inline std::string to_string(const std::pair &value)": { "history": { "A": 4 }, "size": null }, - "int &operator>>(int &reader, bool &rhs)": { + "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const": { "history": { - "A": 24, - "C": 4, - "D": 4 + "A": 12 }, "size": null }, - "int &operator>>(int &reader, double &rhs)": { - "history": { - "A": 6 - }, - "size": null - }, - "int &operator>>(int &reader, float &rhs)": { + "int main(int argc, char *argv[])": { "history": { - "A": 6 + "A": 7, + "B": 4, + "C": 4, + "D": 7 }, "size": null }, - "int &operator>>(int &reader, int &rhs)": { + "std::shared_ptr safe_pointer_cast(const std::shared_ptr &obj)": { "history": { - "A": 6 + "A": 1 }, "size": null }, - "int &operator>>(int &reader, long &rhs)": { + "std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { "history": { "A": 6 }, "size": null }, - "int StreamStatusToString(const TStream &stream)": { + "std::string StreamStatusToString(const TStream &stream)": { "history": { "A": 1 }, "size": null }, - "int main(int argc, char *argv[])": { + "void ConfigurationParser::Load(std::istream &inputStream)": { "history": { - "A": 7, - "B": 4, - "C": 4, - "D": 7 - }, - "size": null - }, - "int safe_pointer_cast(const int &obj)": { - "history": { - "A": 1 + "A": 23 }, "size": null }, - "void ReportException(const int &ex, int level)": { + "void ReportException(const std::exception &ex, int level)": { "history": { "A": 21 }, From 19c1b766286afb5d14328a65674ac765d88be5bb Mon Sep 17 00:00:00 2001 From: xinyan Date: Wed, 20 Feb 2019 23:41:44 +0800 Subject: [PATCH 41/84] Add docs in graph_server.py. --- persper/analytics/graph_server.py | 134 +++++++++++++++++++++++++++--- 1 file changed, 121 insertions(+), 13 deletions(-) diff --git a/persper/analytics/graph_server.py b/persper/analytics/graph_server.py index d685b49ad1d..f70f1415df3 100644 --- a/persper/analytics/graph_server.py +++ b/persper/analytics/graph_server.py @@ -1,6 +1,7 @@ from abc import ABC from abc import abstractmethod -from enum import Enum +from aenum import Enum +from persper.analytics.call_commit_graph import CallCommitGraph JS_FILENAME_REGEXES = [ r'.+\.js$', @@ -28,17 +29,96 @@ class CommitSeekingMode(Enum): - NormalForward = 0 - MergeCommit = 1 - Rewind = 2 + """ + Describes how `Analyzer` has reached the current commit. + """ + _init_ = "value __doc__" + NormalForward = 0, """ + The current commit has been reached because `Analyzer` is going to analyze this commit. + `GraphServer` implementation should update its working tree according to the subsequent + `update_graph` calls, as well as the call commit graph preserved inside `GraphServer`. + """ + MergeCommit = 1, """ + The current commit has been reached because `Analyzer` is going to analyze this commit. + However, the current commit is a merge commit. Some commit graph traits might be + updated differently from NormalForward case; for example, we still update edges, + but we don't update node history in this commit. + """ + Rewind = 2, """ + The current commit has been reached because `Analyzer` is tracing back (or more generally, "jumping") + to the parent commit (A^) of certain commit (A). Usually there should be no changes to the commit graph + preserved in the GraphServer during going through the diff of this commit. But yet GraphServer should update + its workspace tree accordingly, because the next commit shall be the "certain commit" (A) to be analyzed + either as `NormalForward` or `MergeCommit`. + To ensure GraphServer can correctly obtain the file change information, Analyzer + will go to its parent commit (A^) first + """ class GraphServer(ABC): + r""" + Provides implementation-specific ability to build call commit graph via + some or all of the commits in a repository. + + `analyzer2.Analyzer` is the consumer of this class. It will ensure the methods be called + in the following order: + + ``` + lastCommit = EMPTY_TREE_SHA + for commit in commits: + start_commit(commit) + for oldFileName, fileName, fileDiff in compareCommit(lastCommit, commit): + filter_file(oldFileName) + filter_file(fileName) + update_graph(oldFileName, fileName, fileDiff) + end_commit(commit) + lastCommit = commit + ``` + + `Analyzer` will visit a range of commits in certain order (though it's usually + topology order because we may reduce as more Rewinds as possible), and it will + indicate how it has reached the current commit in `start_commit`. Because the + commit history is not linear, we may sometimes need to move backwards (rewind) + in the commit tree. In that case, we need to ensure `GraphServer` can always + get the correct file diff from __parent__ commit. For example, in a commit tree + like this + ``` + A -- B -- C -- D -- E -- F + \ -- a -- b -- / + ``` + where `master` == `F`, `Analyzer` may visit the commits in the following order + * A (NormalForward) + * B (NormalForward) + * C (NormalForward) + * D (NormalForward) + * B (Rewind) + * a (NormalForward) + * b (NormalForward) + * E (MergeCommit) + * F (NormalForward) + + When implementing this class, you should ensure to + * Update both the edge (call relation) and node history for `NormalForward` commits + * Update edge but keep node history untouched for `MergeCommit` commits + * Do not update call commit graph for `Rewind` commits + + The existence of `Rewind` commits is only to simplify the node history generation + for `GraphServer`, because node history means the diff to the __parent__ commit. + + If there is any Error raised in any of the implementation methods, the status of + `GraphSever` will be unspecified. It's suggested the consumer of `GraphServer` + create a new instance of it. + + When overriding this class, some of the methods may be implemented either as + synchronous or asynchronous (with `asyncio`, or `async def`). You will find the + note on the methods respectively. + """ def register_commit(self, hexsha, author_name, author_email, commit_message): """ - Deprecated. Use start_commit instead. + Deprecated. Do not override this method. + Override start_commit instead. :return: a status code, success or failure """ raise NotImplementedError() @@ -47,27 +127,55 @@ def register_commit(self, hexsha, author_name, author_email, def update_graph(self, old_filename: str, old_src: str, new_filename: str, new_src: str, patch: bytes): """ - Update the graph with a single-file patch - :param old_filename: the path to a file that the commit modifies - :param old_src: the source code of the file before the commit - :param new_filename: the path to the file after the commit - :param new_src: the source code of the file after the commit - :param patch: the raw patch generated by GitPython diff - :return a status code + Notifies `GraphServer` a file has been changed in this commit. + params + :param old_filename: the path to a file that the commit modifies + :param old_src: the source code of the file before the commit + :param new_filename: the path to the file after the commit + :param new_src: the source code of the file after the commit + :param patch: the raw patch generated by GitPython diff + + remarks + This method can be implemented as async method. + The name of this function is kept for backward-compatibility. + It's up to implementation to decide whether to update the + call commit graph on the fly, or to only make necessary work + tree modifications in this method, and update the call commit + graph in whole in `end_commit` method. You may also choose to + update some part of the graph in `update_graph`, and the rest + in `end_commit`. """ pass def start_commit(self, hexsha: str, seeking_mode: CommitSeekingMode, author_name: str, author_email: str, commit_message: str): + """ + Called when the `Analyzer` has reached a new commit. + params + hexsha hex SHA of the commit. + seeking_mode describes how this commit has been reached. + See CommitSeekingMode for more details on the meaning of each value. + commit_message commit summary. + + remarks + When implementing this method, you might want to preserve `seeking_mode` as a class field + so you may have access to this value in `update_graph` & `end_commit` implementations. + """ # default implementation for backwards compatibility if seeking_mode == CommitSeekingMode.NormalForward: self.register_commit(hexsha, author_name, author_email, commit_message) def end_commit(self, hexsha: str): + """ + Called when the `Analyzer` is going to leave this commit. + + remarks + This method can be implemented as async method. + """ pass @abstractmethod - def get_graph(self): + def get_graph(self) -> CallCommitGraph: """ Retrieve the graph :return: A CallCommitGraph object From 9996bd9c9784932a94cbbe847cbf3f124f8704e6 Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Wed, 20 Feb 2019 10:06:52 -0800 Subject: [PATCH 42/84] Add aenum to dependencies --- Pipfile | 1 + Pipfile.lock | 73 +++++++++++++++++++++++++++++----------------------- 2 files changed, 42 insertions(+), 32 deletions(-) diff --git a/Pipfile b/Pipfile index b956277f56d..3829aaf7910 100644 --- a/Pipfile +++ b/Pipfile @@ -20,6 +20,7 @@ matplotlib = "*" antlr4-python3-runtime = "*" python-jsonrpc-server = "==0.0.2" pytest-asyncio = "*" +aenum = "*" [dev-packages] diff --git a/Pipfile.lock b/Pipfile.lock index b77ca0f4828..0b4ea397982 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "891f5ac4e93119d7cf21aa2fbae2258c92280cd7e32e9b5630726d999c8a6b3e" + "sha256": "a347c42c8dbb2a243f5b89229aa7069d8421023259c6c764de0cfcd86ca8c512" }, "pipfile-spec": 6, "requires": { @@ -16,6 +16,15 @@ ] }, "default": { + "aenum": { + "hashes": [ + "sha256:3df9b84cce5dc9ed77c337079f97b66c44c0053eb87d6f4d46b888dc45801e38", + "sha256:7a77c205c4bc9d7fe9bd73b3193002d724aebf5909fa0d297534208953891ec8", + "sha256:a3208e4b28db3a7b232ff69b934aef2ea1bf27286d9978e1e597d46f490e4687" + ], + "index": "pypi", + "version": "==2.1.2" + }, "antlr4-python3-runtime": { "hashes": [ "sha256:168cdcec8fb9152e84a87ca6fd261b3d54c8f6358f42ab3b813b14a7193bb50b" @@ -337,11 +346,11 @@ }, "prompt-toolkit": { "hashes": [ - "sha256:88002cc618cacfda8760c4539e76c3b3f148ecdb7035a3d422c7ecdc90c2a3ba", - "sha256:c6655a12e9b08edb8cf5aeab4815fd1e1bdea4ad73d3bbf269cf2e0c4eb75d5e", - "sha256:df5835fb8f417aa55e5cafadbaeb0cf630a1e824aad16989f9f0493e679ec010" + "sha256:11adf3389a996a6d45cc277580d0d53e8a5afd281d0c9ec71b28e6f121463780", + "sha256:2519ad1d8038fd5fc8e770362237ad0364d16a7650fb5724af6997ed5515e3c1", + "sha256:977c6583ae813a37dc1c2e1b715892461fcbdaa57f6fc62f33a528c4886c8f55" ], - "version": "==2.0.8" + "version": "==2.0.9" }, "ptyprocess": { "hashes": [ @@ -411,33 +420,33 @@ }, "pyzmq": { "hashes": [ - "sha256:25a0715c8f69cf72f67cfe5a68a3f3ed391c67c063d2257bec0fe7fc2c7f08f8", - "sha256:2bab63759632c6b9e0d5bf19cc63c3b01df267d660e0abcf230cf0afaa966349", - "sha256:30ab49d99b24bf0908ebe1cdfa421720bfab6f93174e4883075b7ff38cc555ba", - "sha256:32c7ca9fc547a91e3c26fc6080b6982e46e79819e706eb414dd78f635a65d946", - "sha256:41219ae72b3cc86d97557fe5b1ef5d1adc1057292ec597b50050874a970a39cf", - "sha256:4b8c48a9a13cea8f1f16622f9bd46127108af14cd26150461e3eab71e0de3e46", - "sha256:55724997b4a929c0d01b43c95051318e26ddbae23565018e138ae2dc60187e59", - "sha256:65f0a4afae59d4fc0aad54a917ab599162613a761b760ba167d66cc646ac3786", - "sha256:6f88591a8b246f5c285ee6ce5c1bf4f6bd8464b7f090b1333a446b6240a68d40", - "sha256:75022a4c60dcd8765bb9ca32f6de75a0ec83b0d96e0309dc479f4c7b21f26cb7", - "sha256:76ea493bfab18dcb090d825f3662b5612e2def73dffc196d51a5194b0294a81d", - "sha256:7b60c045b80709e4e3c085bab9b691e71761b44c2b42dbb047b8b498e7bc16b3", - "sha256:8e6af2f736734aef8ed6f278f9f552ec7f37b1a6b98e59b887484a840757f67d", - "sha256:9ac2298e486524331e26390eac14e4627effd3f8e001d4266ed9d8f1d2d31cce", - "sha256:9ba650f493a9bc1f24feca1d90fce0e5dd41088a252ac9840131dfbdbf3815ca", - "sha256:a02a4a385e394e46012dc83d2e8fd6523f039bb52997c1c34a2e0dd49ed839c1", - "sha256:a3ceee84114d9f5711fa0f4db9c652af0e4636c89eabc9b7f03a3882569dd1ed", - "sha256:a72b82ac1910f2cf61a49139f4974f994984475f771b0faa730839607eeedddf", - "sha256:ab136ac51027e7c484c53138a0fab4a8a51e80d05162eb7b1585583bcfdbad27", - "sha256:c095b224300bcac61e6c445e27f9046981b1ac20d891b2f1714da89d34c637c8", - "sha256:c5cc52d16c06dc2521340d69adda78a8e1031705924e103c0eb8fc8af861d810", - "sha256:d612e9833a89e8177f8c1dc68d7b4ff98d3186cd331acd616b01bbdab67d3a7b", - "sha256:e828376a23c66c6fe90dcea24b4b72cd774f555a6ee94081670872918df87a19", - "sha256:e9767c7ab2eb552796440168d5c6e23a99ecaade08dda16266d43ad461730192", - "sha256:ebf8b800d42d217e4710d1582b0c8bff20cdcb4faad7c7213e52644034300924" - ], - "version": "==17.1.2" + "sha256:07a03450418694fb07e76a0191b6bc9f411afc8e364ca2062edcf28bb0e51c63", + "sha256:15f0bf7cd80020f165635595e197603aedb37fddf4164ad5ae226afc43242f7b", + "sha256:1756dc72e192c670490e38c788c3a35f901adc74ee436e5131d5a3e85fdd7dc6", + "sha256:1d1eb490da54679d724b08ef3ee04530849023670c4ba7e400ed2cdf906720c4", + "sha256:228402625796821f08706f58cc42a3c51c9897d723550babaefe4feec2b6dacc", + "sha256:264ac9dcee6a7af2bce4b61f2d19e5926118a5caa629b50f107ef6318670a364", + "sha256:2b5a43da65f5dec857184d5c2ce13b80071019e96358f146bdecff7238765bc9", + "sha256:3928534fa00a2aabfcfdb439c08ba37fbe99ab0cf57776c8db8d2b73a51693ba", + "sha256:3d2a295b1086d450981f73d3561ac204a0cc9c8ded386a4a34327d918f3b1d0a", + "sha256:411def5b4cbe6111856040a55c8048df113882e90c57ce88de4a48f0189441ac", + "sha256:4b77e96a7ffc1c5e08eaf274db554f227b31717d086adca1bb42b12ef35a7194", + "sha256:4c87fa3e449e1f4ab9170cdfe8213dc0ba34a11b160e6adecafa892e451a29b6", + "sha256:4fd8621a309db6ec23ef1369f43cdf7a9b0dc217d8ff9ca4095a6e932b379bda", + "sha256:54fe55a1694ffe608c8e4c5183e83cab7a91f3e5c84bd6f188868d6676c12aba", + "sha256:60acabd86808a16a895a247fd2bf7a127284a33562d79687bb5df163cff068b2", + "sha256:618887be4ad754228c0cbba7631f6574608b4430fe93974e6322324f1304fdac", + "sha256:69130efb6efa936de601cb135a8a4eec1caccd4ea2b784237145ff4075c2d3ae", + "sha256:6e7f78eeac82140bde7e60e975c6e6b1b678a4dd377782ab63319c1c78bf3aa1", + "sha256:6ee760cdb84e43574da6b3f2f1fc1251e8acf87253900d28a06451c5f5de39e9", + "sha256:75c87f1dc1e65cea4b709f2ebc78fa18d4b475e41463502aec9cd26208b88e0f", + "sha256:97cb1b7cd2c46e87b0a26651eccd2bbb8c758035efd1635ebb81ac36aa76a88c", + "sha256:abfa774dbadacc849121ed92eae05189d226daab583388b499472e1bbb17ef69", + "sha256:ae3d2627d74195ddc95675f2f814aca998381b73dc4341b9e10e3e191e1bdb0b", + "sha256:b30c339eb58355f51f4f54dd61d785f1ff58c86bca1c3a5916977631d121867b", + "sha256:cbabdced5b137cd56aa22633f13ac5690029a0ad43ab6c05f53206e489178362" + ], + "version": "==18.0.0" }, "requests": { "hashes": [ From bf4851cfb105ecb806ad7422b29377fb43460f14 Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Wed, 20 Feb 2019 11:12:04 -0800 Subject: [PATCH 43/84] Fix the bug of using _cur_index() when adding a new commit --- persper/analytics/call_commit_graph.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/persper/analytics/call_commit_graph.py b/persper/analytics/call_commit_graph.py index 967ccb805f3..4f1bb89de61 100644 --- a/persper/analytics/call_commit_graph.py +++ b/persper/analytics/call_commit_graph.py @@ -52,7 +52,7 @@ def commits(self): return self._digraph.graph['commitList'] def add_commit(self, hexsha, author_name, author_email, commit_message): - self._current_commit_id = self._commit_id_generator(self._cur_cindex(), hexsha, commit_message) + self._current_commit_id = self._commit_id_generator(self._next_cindex(), hexsha, commit_message) self._digraph.graph['commitList'].append({ 'id': self._current_commit_id, 'hexsha': hexsha, 'authorName': author_name, @@ -63,6 +63,9 @@ def add_commit(self, hexsha, author_name, author_email, commit_message): def _cur_cindex(self): return len(self.commits()) - 1 + def _next_cindex(self): + return self._cur_cindex() + 1 + def reset(self): self._digraph = nx.DiGraph(commitList=[]) From 68e2561aa56e5515a70d2b2b1d7a18e724e5f387 Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Wed, 20 Feb 2019 11:13:36 -0800 Subject: [PATCH 44/84] Fix outdated tests for awaiting for Analyzer.analyze --- test/test_analytics/test_analyzer.py | 5 +++-- test/test_analytics/test_call_commit_graph.py | 6 ++++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/test/test_analytics/test_analyzer.py b/test/test_analytics/test_analyzer.py index a5921bd81b7..75c4c5b4a48 100644 --- a/test/test_analytics/test_analyzer.py +++ b/test/test_analytics/test_analyzer.py @@ -25,8 +25,9 @@ def az(): return Analyzer(repo_path, CGraphServer(C_FILENAME_REGEXES)) -def test_az_basic(az): - az.analyze(from_beginning=True) +@pytest.mark.asyncio +async def test_az_basic(az): + await az.analyze(from_beginning=True) ccgraph = az.get_graph() history_truth = { diff --git a/test/test_analytics/test_call_commit_graph.py b/test/test_analytics/test_call_commit_graph.py index 7a49b6f3dac..73cf4325b93 100644 --- a/test/test_analytics/test_call_commit_graph.py +++ b/test/test_analytics/test_call_commit_graph.py @@ -1,4 +1,5 @@ import os +import pytest import shutil import subprocess from math import isclose @@ -94,7 +95,8 @@ def test_call_commit_graph(): assert(isclose(dev_drs3[second_commit['authorEmail']], 0.201, rel_tol=1e-2)) -def test_black_set(): +@pytest.mark.asyncio +async def test_black_set(): """ The CRLF commit: https://github.com/bitcoin/bitcoin/commit/0a61b0df1224a5470bcddab302bc199ca5a9e356 Its parent: https://github.com/bitcoin/bitcoin/commit/5b721607b1057df4dfe97f80d235ed372312f398 @@ -109,7 +111,7 @@ def test_black_set(): crlf_sha = '0a61b0df1224a5470bcddab302bc199ca5a9e356' ggparent_sha = '7d7797b141dbd4ed9db1dda94684beb3395c2534' rev = ggparent_sha + '..' + crlf_sha - az.analyze(rev=rev) + await az.analyze(rev=rev) ccgraph = az.get_graph() devdict = ccgraph.commit_devranks(0.85) devdict2 = ccgraph.commit_devranks(0.85, black_set=set([crlf_sha])) From a2db213d6e37503911df38ed43f8b4b2784a0636 Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Wed, 20 Feb 2019 11:14:20 -0800 Subject: [PATCH 45/84] Ignore warnings in third party libraries --- test/pytest.ini | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 test/pytest.ini diff --git a/test/pytest.ini b/test/pytest.ini new file mode 100644 index 00000000000..10805d1b1ee --- /dev/null +++ b/test/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +filterwarnings = + ignore:inspect\.getargspec\(\) is deprecated:DeprecationWarning From 653582ada9108fd6c4f1560743388c732fcc2dbc Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Wed, 27 Feb 2019 17:23:26 -0800 Subject: [PATCH 46/84] Fix patch parser when there's more content after \No newline at the end of file --- persper/analytics/patch_parser.py | 3 ++ test/test_analytics/example5.patch | 9 ++++++ test/test_analytics/example6.patch | 38 +++++++++++++++++++++++ test/test_analytics/test_detect_change.py | 26 +++++++++++++--- 4 files changed, 71 insertions(+), 5 deletions(-) create mode 100644 test/test_analytics/example5.patch create mode 100644 test/test_analytics/example6.patch diff --git a/persper/analytics/patch_parser.py b/persper/analytics/patch_parser.py index e3d9e116d06..03094f614be 100644 --- a/persper/analytics/patch_parser.py +++ b/persper/analytics/patch_parser.py @@ -69,6 +69,9 @@ def parse(self, text): self.start_add() else: self.start_add() + elif line.startswith('\\'): + # Ignore \No newline at the end of file + pass else: # print("in blank") if self.in_add: diff --git a/test/test_analytics/example5.patch b/test/test_analytics/example5.patch new file mode 100644 index 00000000000..c698ffad768 --- /dev/null +++ b/test/test_analytics/example5.patch @@ -0,0 +1,9 @@ +--- patch_error.c 2019-02-27 16:20:31.000000000 -0800 ++++ new_patch_error.c 2019-02-27 16:15:41.000000000 -0800 +@@ -9,4 +9,4 @@ + } + if(*eqaul1 == '\0' && *eqaul2 == '\0' ){return 0;} + else {return -1}; +-} ++} +\ No newline at end of file diff --git a/test/test_analytics/example6.patch b/test/test_analytics/example6.patch new file mode 100644 index 00000000000..c85e4882871 --- /dev/null +++ b/test/test_analytics/example6.patch @@ -0,0 +1,38 @@ +diff --git a/main.go b/main.go +index 5398e6d..2a81399 100644 +--- a/main.go ++++ b/main.go +@@ -9,22 +9,20 @@ type animal interface { + + type cat int + type dog int +- + func (c cat) printInfo(){ + fmt.Println("a cat") + } + +-func (d dog) printInfo(){ ++func (c dog) printInfo(){ + fmt.Println("a dog") + } +- ++func invoke(a animal){ ++ a.printInfo() ++} + func main() { +- var a animal + var c cat +- a=c +- a.printInfo() +- //other type +- var d dog +- a=d +- a.printInfo() +-} +\ No newline at end of file ++ var d dog ++ //as value convert ++ invoke(c) ++ invoke(d) ++} + diff --git a/test/test_analytics/test_detect_change.py b/test/test_analytics/test_detect_change.py index b5f44694ff6..3d50dbbb429 100644 --- a/test/test_analytics/test_detect_change.py +++ b/test/test_analytics/test_detect_change.py @@ -47,11 +47,12 @@ def test_detect_change(): assert(changed_result == get_changed_functions( *func_ranges_result, *parsing_result)) + def test_patch_parser(): parser = PatchParser() patch2_truth = ( - [[0, 6]], + [[0, 6]], [] ) with open(os.path.join(dir_path, 'example2.patch'), 'r') as f: @@ -63,7 +64,7 @@ def test_patch_parser(): # https://github.com/UltimateBeaver/test_feature_branch/commit/caaac10f604ea7ac759c2147df8fb2b588ee2a27 patch3_truth = ( [[10, 4], [12, 1], [14, 1], [17, 13]], - [[9, 10], [12, 12], [14, 14]] + [[9, 10], [12, 12], [14, 14]] ) with open(os.path.join(dir_path, 'example3.patch'), 'r') as f: example3_patch = f.read() @@ -82,6 +83,21 @@ def test_patch_parser(): assert(parsing_result == patch4_truth) - - - +def test_no_newline_at_the_end_of_file(): + parser = PatchParser() + patch5_truth = ( + [[12, 1]], [[12, 12]] + ) + with open(os.path.join(dir_path, 'example5.patch'), 'r') as f: + example5_patch = f.read() + parsing_result = parser.parse(example5_patch) + assert(parsing_result == patch5_truth) + + patch6_truth = ( + [[17, 1], [20, 3], [30, 5]], + [[12, 12], [17, 17], [20, 20], [22, 22], [24, 30]] + ) + with open(os.path.join(dir_path, 'example6.patch'), 'r') as f: + example6_patch = f.read() + parsing_result = parser.parse(example6_patch) + assert(parsing_result == patch6_truth) From 50f8dbb7ea9e610caba99070e6264d0dc7a9ca4a Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Wed, 27 Feb 2019 17:24:49 -0800 Subject: [PATCH 47/84] Upgrade CallCommitGraph to v1.1 --- persper/analytics/c.py | 51 +++++++++--- persper/analytics/call_commit_graph.py | 77 +++++++++++------- persper/analytics/call_graph/c.py | 6 +- test/test_analytics/test_analyzer.py | 79 ++++++++++++++----- test/test_analytics/test_call_commit_graph.py | 10 +-- 5 files changed, 155 insertions(+), 68 deletions(-) diff --git a/persper/analytics/c.py b/persper/analytics/c.py index 2f5a8f29fa7..e3d26b5f0ec 100644 --- a/persper/analytics/c.py +++ b/persper/analytics/c.py @@ -8,6 +8,35 @@ from persper.analytics.call_commit_graph import CallCommitGraph +def function_change_stats(old_ast, new_ast, patch, patch_parser, ranges_func): + """ + Parse old/new source files and extract the change info for all functions + """ + adds, dels = patch_parser(patch) + + forward_stats = {} + bckward_stats = {} + + if old_ast is not None: + forward_stats = get_changed_functions( + *ranges_func(old_ast), adds, dels, separate=True) + + if new_ast is not None: + inv_adds, inv_dels = inverse_diff(adds, dels) + bckward_stats = get_changed_functions( + *ranges_func(new_ast), inv_adds, inv_dels, separate=True) + + # merge forward and backward stats + for func, fstat in bckward_stats.items(): + if func not in forward_stats: + forward_stats[func] = { + 'adds': fstat['dels'], + 'dels': fstat['adds'] + } + + return forward_stats + + class CGraphServer(GraphServer): def __init__(self, filename_regex_strs): self._ccgraph = CallCommitGraph() @@ -21,30 +50,28 @@ def register_commit(self, hexsha, author_name, author_email, def update_graph(self, old_filename, old_src, new_filename, new_src, patch): ast_list = [] - forward_stats = {} - bckward_stats = {} - adds, dels = self._parse_patch(patch) + old_ast = None + new_ast = None + # Parse source codes into ASTs if old_src: old_ast = transform_src_to_tree(old_src) if old_ast is None: return -1 - forward_stats = get_changed_functions( - *get_func_ranges_c(old_ast), adds, dels) - if new_src: new_ast = transform_src_to_tree(new_src) if new_ast is None: return -1 - ast_list = [new_ast] - inv_adds, inv_dels = inverse_diff(adds, dels) - bckward_stats = get_changed_functions( - *get_func_ranges_c(new_ast), inv_adds, inv_dels) - bckward_stats.update(forward_stats) - update_graph(self._ccgraph, ast_list, bckward_stats) + # Compute function change stats + change_stats = function_change_stats(old_ast, new_ast, patch, + self._parse_patch, + get_func_ranges_c) + + # Update call-commit graph + update_graph(self._ccgraph, ast_list, change_stats) return 0 def get_graph(self): diff --git a/persper/analytics/call_commit_graph.py b/persper/analytics/call_commit_graph.py index 4f1bb89de61..4f14eaa65b3 100644 --- a/persper/analytics/call_commit_graph.py +++ b/persper/analytics/call_commit_graph.py @@ -30,14 +30,22 @@ def fromHexsha(ordinal: int, hexsha: str, message: str): class CallCommitGraph: - def __init__(self, node_link_data=None, commit_id_generator=CommitIdGenerators.fromOrdinal): + def __init__(self, node_link_data=None, commit_id_generator=CommitIdGenerators.fromHexsha): if node_link_data: self._digraph = json_graph.node_link_graph(node_link_data) else: - self._digraph = nx.DiGraph(commitList=[]) + self._digraph = self._new_graph() self._commit_id_generator = commit_id_generator self._current_commit_id = None + def reset(self): + self._digraph = self._new_graph() + + def _new_graph(self): + """Create a new nx.DiGraph for underlying storage + with appropriate arguments""" + return nx.DiGraph(commits={}) + # Read-only access def nodes(self, data=False): return self._digraph.nodes(data=data) @@ -49,15 +57,22 @@ def edges(self, data=False): # Read-only access def commits(self): # https://networkx.github.io/documentation/stable/tutorial.html#graph-attributes - return self._digraph.graph['commitList'] + return self._digraph.graph['commits'] + + # Read-only access + def __contains__(self, node): + return node in self._digraph - def add_commit(self, hexsha, author_name, author_email, commit_message): - self._current_commit_id = self._commit_id_generator(self._next_cindex(), hexsha, commit_message) - self._digraph.graph['commitList'].append({ + def add_commit(self, hexsha, author_name, author_email, message): + # TODO: remove `id` in a commit object + self._current_commit_id = self._commit_id_generator(self._next_cindex(), hexsha, message) + self._digraph.graph['commits'][hexsha] = { 'id': self._current_commit_id, - 'hexsha': hexsha, 'authorName': author_name, - 'authorEmail': author_email, 'message': commit_message - }) + 'hexsha': hexsha, + 'authorName': author_name, + 'authorEmail': author_email, + 'message': message + } # The index of the commit being analyzed def _cur_cindex(self): @@ -66,33 +81,38 @@ def _cur_cindex(self): def _next_cindex(self): return self._cur_cindex() + 1 - def reset(self): - self._digraph = nx.DiGraph(commitList=[]) - - def __contains__(self, node): - return node in self._digraph - - def add_node(self, node): - self._digraph.add_node(node, size=None, history={}) + # TODO: remove the default value of files + def add_node(self, node, files=[]): + self._digraph.add_node(node, size=None, history={}, files=files) # add_node must be called on source and target first def add_edge(self, source, target): + if source not in self._digraph: + raise ValueError("Error: caller %s does not exist in call-commit graph." % source) + if target not in self._digraph: + raise ValueError("Error: callee %s does not exist in call-commit graph." % target) self._digraph.add_edge(source, target, addedBy=self._current_commit_id, weight=None) - def update_node_history(self, node, size): + def update_node_history(self, node, num_adds, num_dels): node_history = self._get_node_history(node) - # A commit might update a node's history more than once + # A commit might update a node's history more than once when + # a single FunctionNode corresponds to more than one actual functions if self._current_commit_id in node_history: - node_history[self._current_commit_id] += size + node_history[self._current_commit_id]['adds'] += num_adds + node_history[self._current_commit_id]['dels'] += num_dels else: - node_history[self._current_commit_id] = size + node_history[self._current_commit_id] = {'adds': num_adds, 'dels': num_dels} # read/write access to node history are thourgh this function def _get_node_history(self, node): return self._digraph.nodes[node]['history'] + def update_node_files(self, node, new_files): + self._digraph.nodes[node]['files'] = new_files + + # TODO: provide other options for computing a node's size def _set_all_nodes_size(self, black_set=None): """ Compute node size after nodes have been added to the graph node size is currently defined as the total number lines of edits @@ -103,12 +123,12 @@ def _set_all_nodes_size(self, black_set=None): node_history = self._get_node_history(node) if black_set is not None: size = 0 - for cindex, csize in node_history.items(): - sha = self.commits()[cindex]['hexsha'] + for cid, chist in node_history.items(): + sha = self.commits()[cid]['hexsha'] if sha not in black_set: - size += csize + size += (chist['adds'] + chist['dels']) else: - size = sum(node_history.values()) + size = sum([chist['adds'] + chist['dels'] for chist in node_history.values()]) # set default size to 1 to avoid zero division error if size == 0: @@ -149,8 +169,9 @@ def commit_devranks(self, alpha, black_set=None): if len(history) == 0: continue - for cindex, csize in history.items(): - sha = self.commits()[cindex]['hexsha'] + for cid, chist in history.items(): + csize = chist['adds'] + chist['dels'] + sha = self.commits()[cid]['hexsha'] if black_set is None or sha not in black_set: dr = (csize / size) * func_devranks[func] if sha in commit_devranks: @@ -169,7 +190,7 @@ def developer_devranks(self, alpha, black_set=None): developer_devranks = {} commit_devranks = self.commit_devranks(alpha, black_set=black_set) - for commit in self.commits(): + for commit in self.commits().values(): sha = commit['hexsha'] email = commit['authorEmail'] diff --git a/persper/analytics/call_graph/c.py b/persper/analytics/call_graph/c.py index dca03678b02..40c1bc1b581 100644 --- a/persper/analytics/call_graph/c.py +++ b/persper/analytics/call_graph/c.py @@ -90,11 +90,11 @@ def update_graph(ccgraph, ast_list, change_stats): ccgraph.add_node(callee_name) ccgraph.add_edge(caller_name, callee_name) - for func_name, change_size in change_stats.items(): - if func_name not in ccgraph: + for func, fstat in change_stats.items(): + if func not in ccgraph: print("%s in change_stats but not in ccgraph" % func_name) continue - ccgraph.update_node_history(func_name, change_size) + ccgraph.update_node_history(func, fstat['adds'], fstat['dels']) def get_func_ranges_c(root): diff --git a/test/test_analytics/test_analyzer.py b/test/test_analytics/test_analyzer.py index 75c4c5b4a48..07063fa65cc 100644 --- a/test/test_analytics/test_analyzer.py +++ b/test/test_analytics/test_analyzer.py @@ -26,39 +26,78 @@ def az(): @pytest.mark.asyncio -async def test_az_basic(az): +async def test_analyzer_master_only(az): await az.analyze(from_beginning=True) ccgraph = az.get_graph() history_truth = { - 'K': {'display': 5}, - 'F': {'display': 14, 'count': 12}, - 'E': {'append': 29, 'add': 11}, - 'D': {'str_replace': 26}, - 'C': {'str_append_chr': 34, 'str_equals': 1}, - # Commit 'B' adds function "str_append_chr" for 7 lines - # Is it thought to be 5 lines because of inperfect diff - 'B': {'str_append': 9, 'str_append_chr': 5, 'str_equals': 11}, - 'A': {'str_append': 7, 'str_len': 6}, + 'K': { + 'display': {'adds': 0, 'dels': 5} + }, + 'F': { + 'display': {'adds': 14, 'dels': 0}, + 'count': {'adds': 12, 'dels': 0} + }, + 'E': { + 'append': {'adds': 29, 'dels': 0}, + 'add': {'adds': 11, 'dels': 0} + }, + 'D': { + 'str_replace': {'adds': 26, 'dels': 0} + }, + # TODO: fix \No newline at the end of file + 'C': { + 'str_append_chr': {'adds': 30, 'dels': 4}, + 'str_equals': {'adds': 0, 'dels': 1} + }, + # Commit `B` is an example of imperfect diff, + # it removes `str_append` and adds a new function `str_append_chr` + # but because they are too similar, + # diff doesn't separate these changes into two chunks + # please see here: https://github.com/UltimateBeaver/test_feature_branch/commit/caaac10f604ea7ac759c2147df8fb2b588ee2a27 + 'B': { + 'str_append': {'adds': 6, 'dels': 3}, + 'str_append_chr': {'adds': 3, 'dels': 2}, + 'str_equals': {'adds': 11, 'dels': 0} + }, + 'A': { + 'str_append': {'adds': 7, 'dels': 0}, + 'str_len': {'adds': 6, 'dels': 0} + }, - # branch J from commit A, merge back through F - 'J': {'count': 12, 'display': 14}, + # # branch J from commit A, merge back through F + # 'J': { + # 'count': {'adds': 12, 'dels': 0}, + # 'display': {'adds': 14, 'dels': 0} + # }, - # branch G from commit B, merge back through D - 'G': {'str_equals': 1, 'str_replace': 26}, + # # TODO: fix \No newline at the end of file + # # branch G from commit B, merge back through D + # 'G': { + # 'str_equals': {'adds': 0, 'dels': 1}, + # 'str_replace': {'adds': 26, 'dels': 0} + # }, - # branch H from commit D, merge back through E - 'I': {'add': 5, 'append': 35, 'insert': 25}, - 'H': {'add': 16, 'append': 12, 'insert': 25}, + # # branch H from commit D, merge back through E + # 'H': { + # 'add': {'adds': 16, 'dels': 0}, + # 'append': {'adds': 12, 'dels': 0}, + # 'insert': {'adds': 25, 'dels': 0} + # }, + # 'I': { + # 'add': {'adds': 0, 'dels': 5}, + # 'append': {'adds': 26, 'dels': 9}, + # 'insert': {'adds': 0, 'dels': 25} + # }, } commits = ccgraph.commits() for func, data in ccgraph.nodes(data=True): history = data['history'] - for cindex, csize in history.items(): - commit_message = commits[cindex]['message'] - assert(csize == history_truth[commit_message.strip()][func]) + for cid, chist in history.items(): + message = commits[cid]['message'] + assert(chist == history_truth[message.strip()][func]) edges_truth = [ # Edges existing in final snapshot diff --git a/test/test_analytics/test_call_commit_graph.py b/test/test_analytics/test_call_commit_graph.py index 73cf4325b93..e9e6b758396 100644 --- a/test/test_analytics/test_call_commit_graph.py +++ b/test/test_analytics/test_call_commit_graph.py @@ -24,9 +24,9 @@ def test_call_commit_graph(): first_commit['authorEmail'], first_commit['message']) ccgraph.add_node('f1') - ccgraph.update_node_history('f1', 10) + ccgraph.update_node_history('f1', 10, 0) ccgraph.add_node('f2') - ccgraph.update_node_history('f2', 10) + ccgraph.update_node_history('f2', 10, 0) ccgraph.add_edge('f1', 'f2') func_drs = ccgraph.function_devranks(0.85) @@ -48,7 +48,7 @@ def test_call_commit_graph(): second_commit['authorEmail'], second_commit['message']) ccgraph.add_node('f3') - ccgraph.update_node_history('f3', 10) + ccgraph.update_node_history('f3', 10, 0) ccgraph.add_edge('f1', 'f3') func_drs2 = ccgraph.function_devranks(0.85) @@ -73,11 +73,11 @@ def test_call_commit_graph(): third_commit['authorEmail'], third_commit['message']) ccgraph.add_node('f4') - ccgraph.update_node_history('f4', 10) + ccgraph.update_node_history('f4', 10, 0) ccgraph.add_edge('f2', 'f4') ccgraph.add_node('f5') - ccgraph.update_node_history('f5', 10) + ccgraph.update_node_history('f5', 10, 0) ccgraph.add_edge('f2', 'f5') func_drs3 = ccgraph.function_devranks(0.85) From 70fa86a010c722520a35d0220a9e9d330d5dce15 Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Wed, 27 Feb 2019 17:53:25 -0800 Subject: [PATCH 48/84] Add files attribute to function node --- persper/analytics/call_graph/c.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/persper/analytics/call_graph/c.py b/persper/analytics/call_graph/c.py index 40c1bc1b581..ab4499ef13e 100644 --- a/persper/analytics/call_graph/c.py +++ b/persper/analytics/call_graph/c.py @@ -69,13 +69,18 @@ def handle_call(call_node): def update_graph(ccgraph, ast_list, change_stats): for ast in ast_list: + filename = ast.attrib['filename'] for function in ast.findall('./srcml:function', namespaces=ns): caller_name, _, _ = handle_function(function) if not caller_name: continue if caller_name not in ccgraph: - ccgraph.add_node(caller_name) + ccgraph.add_node(caller_name, [filename]) + else: + files = ccgraph.nodes()[caller_name]['files'] + if filename not in files: + ccgraph.update_node_files(caller_name, files + [filename]) for call in function.xpath('.//srcml:call', namespaces=ns): try: @@ -87,7 +92,9 @@ def update_graph(ccgraph, ast_list, change_stats): continue if callee_name not in ccgraph: - ccgraph.add_node(callee_name) + # Pass [] to files argument since we don't know + # which file this node belongs to + ccgraph.add_node(callee_name, []) ccgraph.add_edge(caller_name, callee_name) for func, fstat in change_stats.items(): From 237d96caf02a2ce1bdc505ff6b2ffa88f5c084b2 Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Thu, 28 Feb 2019 14:33:54 -0800 Subject: [PATCH 49/84] Update README with test coverage info --- Pipfile | 1 + Pipfile.lock | 166 ++++++++++++++++++++++++++++++++------------------- README.md | 16 +++++ 3 files changed, 122 insertions(+), 61 deletions(-) diff --git a/Pipfile b/Pipfile index 3829aaf7910..f394eb45afc 100644 --- a/Pipfile +++ b/Pipfile @@ -21,6 +21,7 @@ antlr4-python3-runtime = "*" python-jsonrpc-server = "==0.0.2" pytest-asyncio = "*" aenum = "*" +pytest-cov = "*" [dev-packages] diff --git a/Pipfile.lock b/Pipfile.lock index 0b4ea397982..ed93767c09c 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "a347c42c8dbb2a243f5b89229aa7069d8421023259c6c764de0cfcd86ca8c512" + "sha256": "5c5de74c0a4afd69763b8b6896cd965013bbbda3f78de7544a2df4e2a9bcae74" }, "pipfile-spec": 6, "requires": { @@ -75,6 +75,42 @@ ], "version": "==3.0.4" }, + "coverage": { + "hashes": [ + "sha256:09e47c529ff77bf042ecfe858fb55c3e3eb97aac2c87f0349ab5a7efd6b3939f", + "sha256:0a1f9b0eb3aa15c990c328535655847b3420231af299386cfe5efc98f9c250fe", + "sha256:0cc941b37b8c2ececfed341444a456912e740ecf515d560de58b9a76562d966d", + "sha256:10e8af18d1315de936d67775d3a814cc81d0747a1a0312d84e27ae5610e313b0", + "sha256:1b4276550b86caa60606bd3572b52769860a81a70754a54acc8ba789ce74d607", + "sha256:1e8a2627c48266c7b813975335cfdea58c706fe36f607c97d9392e61502dc79d", + "sha256:2b224052bfd801beb7478b03e8a66f3f25ea56ea488922e98903914ac9ac930b", + "sha256:447c450a093766744ab53bf1e7063ec82866f27bcb4f4c907da25ad293bba7e3", + "sha256:46101fc20c6f6568561cdd15a54018bb42980954b79aa46da8ae6f008066a30e", + "sha256:4710dc676bb4b779c4361b54eb308bc84d64a2fa3d78e5f7228921eccce5d815", + "sha256:510986f9a280cd05189b42eee2b69fecdf5bf9651d4cd315ea21d24a964a3c36", + "sha256:5535dda5739257effef56e49a1c51c71f1d37a6e5607bb25a5eee507c59580d1", + "sha256:5a7524042014642b39b1fcae85fb37556c200e64ec90824ae9ecf7b667ccfc14", + "sha256:5f55028169ef85e1fa8e4b8b1b91c0b3b0fa3297c4fb22990d46ff01d22c2d6c", + "sha256:6694d5573e7790a0e8d3d177d7a416ca5f5c150742ee703f3c18df76260de794", + "sha256:6831e1ac20ac52634da606b658b0b2712d26984999c9d93f0c6e59fe62ca741b", + "sha256:77f0d9fa5e10d03aa4528436e33423bfa3718b86c646615f04616294c935f840", + "sha256:828ad813c7cdc2e71dcf141912c685bfe4b548c0e6d9540db6418b807c345ddd", + "sha256:85a06c61598b14b015d4df233d249cd5abfa61084ef5b9f64a48e997fd829a82", + "sha256:8cb4febad0f0b26c6f62e1628f2053954ad2c555d67660f28dfb1b0496711952", + "sha256:a5c58664b23b248b16b96253880b2868fb34358911400a7ba39d7f6399935389", + "sha256:aaa0f296e503cda4bc07566f592cd7a28779d433f3a23c48082af425d6d5a78f", + "sha256:ab235d9fe64833f12d1334d29b558aacedfbca2356dfb9691f2d0d38a8a7bfb4", + "sha256:b3b0c8f660fae65eac74fbf003f3103769b90012ae7a460863010539bb7a80da", + "sha256:bab8e6d510d2ea0f1d14f12642e3f35cefa47a9b2e4c7cea1852b52bc9c49647", + "sha256:c45297bbdbc8bb79b02cf41417d63352b70bcb76f1bbb1ee7d47b3e89e42f95d", + "sha256:d19bca47c8a01b92640c614a9147b081a1974f69168ecd494687c827109e8f42", + "sha256:d64b4340a0c488a9e79b66ec9f9d77d02b99b772c8b8afd46c1294c1d39ca478", + "sha256:da969da069a82bbb5300b59161d8d7c8d423bc4ccd3b410a9b4d8932aeefc14b", + "sha256:ed02c7539705696ecb7dc9d476d861f3904a8d2b7e894bd418994920935d36bb", + "sha256:ee5b8abc35b549012e03a7b1e86c09491457dba6c94112a2482b18589cc2bdb9" + ], + "version": "==4.5.2" + }, "cycler": { "hashes": [ "sha256:1d8a5ae1ff6c5cf9b93e8811e581232ad8920aeec647c37316ceac982b08cb2d", @@ -154,10 +190,10 @@ }, "jedi": { "hashes": [ - "sha256:571702b5bd167911fe9036e5039ba67f820d6502832285cde8c881ab2b2149fd", - "sha256:c8481b5e59d34a5c7c42e98f6625e633f6ef59353abea6437472c7ec2093f191" + "sha256:2bb0603e3506f708e792c7f4ad8fc2a7a9d9c2d292a358fbbd58da531695595b", + "sha256:2c6bcd9545c7d6440951b12b44d373479bf18123a401a52025cf98563fbd826c" ], - "version": "==0.13.2" + "version": "==0.13.3" }, "jupyter-client": { "hashes": [ @@ -208,35 +244,35 @@ }, "lxml": { "hashes": [ - "sha256:0537eee4902e8bf4f41bfee8133f7edf96533dd175930a12086d6a40d62376b2", - "sha256:0562ec748abd230ab87d73384e08fa784f9b9cee89e28696087d2d22c052cc27", - "sha256:09e91831e749fbf0f24608694e4573be0ef51430229450c39c83176cc2e2d353", - "sha256:1ae4c0722fc70c0d4fba43ae33c2885f705e96dce1db41f75ae14a2d2749b428", - "sha256:1c630c083d782cbaf1f7f37f6cac87bda9cff643cf2803a5f180f30d97955cef", - "sha256:2fe74e3836bd8c0fa7467ffae05545233c7f37de1eb765cacfda15ad20c6574a", - "sha256:37af783c2667ead34a811037bda56a0b142ac8438f7ed29ae93f82ddb812fbd6", - "sha256:3f2d9eafbb0b24a33f56acd16f39fc935756524dcb3172892721c54713964c70", - "sha256:47d8365a8ef14097aa4c65730689be51851b4ade677285a3b2daa03b37893e26", - "sha256:510e904079bc56ea784677348e151e1156040dbfb736f1d8ea4b9e6d0ab2d9f4", - "sha256:58d0851da422bba31c7f652a7e9335313cf94a641aa6d73b8f3c67602f75b593", - "sha256:7940d5c2185ffb989203dacbb28e6ae88b4f1bb25d04e17f94b0edd82232bcbd", - "sha256:7cf39bb3a905579836f7a8f3a45320d9eb22f16ab0c1e112efb940ced4d057a5", - "sha256:9563a23c1456c0ab550c087833bc13fcc61013a66c6420921d5b70550ea312bf", - "sha256:95b392952935947e0786a90b75cc33388549dcb19af716b525dae65b186138fc", - "sha256:983129f3fd3cef5c3cf067adcca56e30a169656c00fcc6c648629dbb850b27fa", - "sha256:a0b75b1f1854771844c647c464533def3e0a899dd094a85d1d4ed72ecaaee93d", - "sha256:b5db89cc0ef624f3a81214b7961a99f443b8c91e88188376b6b322fd10d5b118", - "sha256:c0a7751ba1a4bfbe7831920d98cee3ce748007eab8dfda74593d44079568219a", - "sha256:c0c5a7d4aafcc30c9b6d8613a362567e32e5f5b708dc41bc3a81dac56f8af8bb", - "sha256:d4d63d85eacc6cb37b459b16061e1f100d154bee89dc8d8f9a6128a5a538e92e", - "sha256:da5e7e941d6e71c9c9a717c93725cda0708c2474f532e3680ac5e39ec57d224d", - "sha256:dccad2b3c583f036f43f80ac99ee212c2fa9a45151358d55f13004d095e683b2", - "sha256:df46307d39f2aeaafa1d25309b8a8d11738b73e9861f72d4d0a092528f498baa", - "sha256:e70b5e1cb48828ddd2818f99b1662cb9226dc6f57d07fc75485405c77da17436", - "sha256:ea825562b8cd057cbc9810d496b8b5dec37a1e2fc7b27bc7c1e72ce94462a09a" + "sha256:0358b9e9642bc7d39aac5cffe9884a99a5ca68e5e2c1b89e570ed60da9139908", + "sha256:091a359c4dafebbecd3959d9013f1b896b5371859165e4e50b01607a98d9e3e2", + "sha256:1998e4e60603c64bcc35af61b4331ab3af087457900d3980e18d190e17c3a697", + "sha256:2000b4088dee9a41f459fddaf6609bba48a435ce6374bb254c5ccdaa8928c5ba", + "sha256:2afb0064780d8aaf165875be5898c1866766e56175714fa5f9d055433e92d41d", + "sha256:2d8f1d9334a4e3ff176d096c14ded3100547d73440683567d85b8842a53180bb", + "sha256:2e38db22f6a3199fd63675e1b4bd795d676d906869047398f29f38ca55cb453a", + "sha256:3181f84649c1a1ca62b19ddf28436b1b2cb05ae6c7d2628f33872e713994c364", + "sha256:37462170dfd88af8431d04de6b236e6e9c06cda71e2ca26d88ef2332fd2a5237", + "sha256:3a9d8521c89bf6f2a929c3d12ad3ad7392c774c327ea809fd08a13be6b3bc05f", + "sha256:3d0bbd2e1a28b4429f24fd63a122a450ce9edb7a8063d070790092d7343a1aa4", + "sha256:483d60585ce3ee71929cea70949059f83850fa5e12deb9c094ed1c8c2ec73cbd", + "sha256:4888be27d5cba55ce94209baef5bcd7bbd7314a3d17021a5fc10000b3a5f737d", + "sha256:64b0d62e4209170a2a0c404c446ab83b941a0003e96604d2e4f4cb735f8a2254", + "sha256:68010900898fdf139ac08549c4dba8206c584070a960ffc530aebf0c6f2794ef", + "sha256:872ecb066de602a0099db98bd9e57f4cfc1d62f6093d94460c787737aa08f39e", + "sha256:88a32b03f2e4cd0e63f154cac76724709f40b3fc2f30139eb5d6f900521b44ed", + "sha256:b1dc7683da4e67ab2bebf266afa68098d681ae02ce570f0d1117312273d2b2ac", + "sha256:b29e27ce9371810250cb1528a771d047a9c7b0f79630dc7dc5815ff828f4273b", + "sha256:ce197559596370d985f1ce6b7051b52126849d8159040293bf8b98cb2b3e1f78", + "sha256:d45cf6daaf22584eff2175f48f82c4aa24d8e72a44913c5aff801819bb73d11f", + "sha256:e2ff9496322b2ce947ba4a7a5eb048158de9d6f3fe9efce29f1e8dd6878561e6", + "sha256:f7b979518ec1f294a41a707c007d54d0f3b3e1fd15d5b26b7e99b62b10d9a72e", + "sha256:f9c7268e9d16e34e50f8246c4f24cf7353764affd2bc971f0379514c246e3f6b", + "sha256:f9c839806089d79de588ee1dde2dae05dc1156d3355dfeb2b51fde84d9c960ad", + "sha256:ff962953e2389226adc4d355e34a98b0b800984399153c6678f2367b11b4d4b8" ], "index": "pypi", - "version": "==4.3.1" + "version": "==4.3.2" }, "matplotlib": { "hashes": [ @@ -281,32 +317,32 @@ }, "numpy": { "hashes": [ - "sha256:0cdbbaa30ae69281b18dd995d3079c4e552ad6d5426977f66b9a2a95f11f552a", - "sha256:2b0cca1049bd39d1879fa4d598624cafe82d35529c72de1b3d528d68031cdd95", - "sha256:31d3fe5b673e99d33d70cfee2ea8fe8dccd60f265c3ed990873a88647e3dd288", - "sha256:34dd4922aab246c39bf5df03ca653d6265e65971deca6784c956bf356bca6197", - "sha256:384e2dfa03da7c8d54f8f934f61b6a5e4e1ebb56a65b287567629d6c14578003", - "sha256:392e2ea22b41a22c0289a88053204b616181288162ba78e6823e1760309d5277", - "sha256:4341a39fc085f31a583be505eabf00e17c619b469fef78dc7e8241385bfddaa4", - "sha256:45080f065dcaa573ebecbfe13cdd86e8c0a68c4e999aa06bd365374ea7137706", - "sha256:485cb1eb4c9962f4cd042fed9424482ec1d83fee5dc2ef3f2552ac47852cb259", - "sha256:575cefd28d3e0da85b0864506ae26b06483ee4a906e308be5a7ad11083f9d757", - "sha256:62784b35df7de7ca4d0d81c5b6af5983f48c5cdef32fc3635b445674e56e3266", - "sha256:69c152f7c11bf3b4fc11bc4cc62eb0334371c0db6844ebace43b7c815b602805", - "sha256:6ccfdcefd287f252cf1ea7a3f1656070da330c4a5658e43ad223269165cdf977", - "sha256:7298fbd73c0b3eff1d53dc9b9bdb7add8797bb55eeee38c8ccd7906755ba28af", - "sha256:79463d918d1bf3aeb9186e3df17ddb0baca443f41371df422f99ee94f4f2bbfe", - "sha256:8bbee788d82c0ac656536de70e817af09b7694f5326b0ef08e5c1014fcb96bb3", - "sha256:a863957192855c4c57f60a75a1ac06ce5362ad18506d362dd807e194b4baf3ce", - "sha256:ae602ba425fb2b074e16d125cdce4f0194903da935b2e7fe284ebecca6d92e76", - "sha256:b13faa258b20fa66d29011f99fdf498641ca74a0a6d9266bc27d83c70fea4a6a", - "sha256:c2c39d69266621dd7464e2bb740d6eb5abc64ddc339cc97aa669f3bb4d75c103", - "sha256:e9c88f173d31909d881a60f08a8494e63f1aff2a4052476b24d4f50e82c47e24", - "sha256:f1a29267ac29fff0913de0f11f3a9edfcd3f39595f467026c29376fad243ebe3", - "sha256:f69dde0c5a137d887676a8129373e44366055cf19d1b434e853310c7a1e68f93" + "sha256:1980f8d84548d74921685f68096911585fee393975f53797614b34d4f409b6da", + "sha256:22752cd809272671b273bb86df0f505f505a12368a3a5fc0aa811c7ece4dfd5c", + "sha256:23cc40313036cffd5d1873ef3ce2e949bdee0646c5d6f375bf7ee4f368db2511", + "sha256:2b0b118ff547fecabc247a2668f48f48b3b1f7d63676ebc5be7352a5fd9e85a5", + "sha256:3a0bd1edf64f6a911427b608a894111f9fcdb25284f724016f34a84c9a3a6ea9", + "sha256:3f25f6c7b0d000017e5ac55977a3999b0b1a74491eacb3c1aa716f0e01f6dcd1", + "sha256:4061c79ac2230594a7419151028e808239450e676c39e58302ad296232e3c2e8", + "sha256:560ceaa24f971ab37dede7ba030fc5d8fa173305d94365f814d9523ffd5d5916", + "sha256:62be044cd58da2a947b7e7b2252a10b42920df9520fc3d39f5c4c70d5460b8ba", + "sha256:6c692e3879dde0b67a9dc78f9bfb6f61c666b4562fd8619632d7043fb5b691b0", + "sha256:6f65e37b5a331df950ef6ff03bd4136b3c0bbcf44d4b8e99135d68a537711b5a", + "sha256:7a78cc4ddb253a55971115f8320a7ce28fd23a065fc33166d601f51760eecfa9", + "sha256:80a41edf64a3626e729a62df7dd278474fc1726836552b67a8c6396fd7e86760", + "sha256:893f4d75255f25a7b8516feb5766c6b63c54780323b9bd4bc51cdd7efc943c73", + "sha256:972ea92f9c1b54cc1c1a3d8508e326c0114aaf0f34996772a30f3f52b73b942f", + "sha256:9f1d4865436f794accdabadc57a8395bd3faa755449b4f65b88b7df65ae05f89", + "sha256:9f4cd7832b35e736b739be03b55875706c8c3e5fe334a06210f1a61e5c2c8ca5", + "sha256:adab43bf657488300d3aeeb8030d7f024fcc86e3a9b8848741ea2ea903e56610", + "sha256:bd2834d496ba9b1bdda3a6cf3de4dc0d4a0e7be306335940402ec95132ad063d", + "sha256:d20c0360940f30003a23c0adae2fe50a0a04f3e48dc05c298493b51fd6280197", + "sha256:d3b3ed87061d2314ff3659bb73896e622252da52558f2380f12c421fbdee3d89", + "sha256:dc235bf29a406dfda5790d01b998a1c01d7d37f449128c0b1b7d1c89a84fae8b", + "sha256:fb3c83554f39f48f3fa3123b9c24aecf681b1c289f9334f8215c1d3c8e2f6e5b" ], "index": "pypi", - "version": "==1.16.1" + "version": "==1.16.2" }, "openpyxl": { "hashes": [ @@ -339,10 +375,10 @@ }, "pluggy": { "hashes": [ - "sha256:8ddc32f03971bfdf900a81961a48ccf2fb677cf7715108f85295c67405798616", - "sha256:980710797ff6a041e9a73a5787804f848996ecaa6f8a1b1e08224a5894f2074a" + "sha256:19ecf9ce9db2fce065a7a0586e07cfb4ac8614fe96edf628a264b1c70116cf8f", + "sha256:84d306a647cc805219916e62aab89caa97a33a1dd8c342e87a37f91073cd4746" ], - "version": "==0.8.1" + "version": "==0.9.0" }, "prompt-toolkit": { "hashes": [ @@ -361,10 +397,10 @@ }, "py": { "hashes": [ - "sha256:bf92637198836372b520efcba9e020c330123be8ce527e535d185ed4b6f45694", - "sha256:e76826342cefe3c3d5f7e8ee4316b80d1dd8a300781612ddbc765c17ba25a6c6" + "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", + "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53" ], - "version": "==1.7.0" + "version": "==1.8.0" }, "pydot": { "hashes": [ @@ -404,6 +440,14 @@ "index": "pypi", "version": "==0.10.0" }, + "pytest-cov": { + "hashes": [ + "sha256:0ab664b25c6aa9716cbf203b17ddb301932383046082c081b9848a0edf5add33", + "sha256:230ef817450ab0699c6cc3c9c8f7a829c34674456f2ed8df1fe1d39780f7c87f" + ], + "index": "pypi", + "version": "==2.6.1" + }, "python-dateutil": { "hashes": [ "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb", diff --git a/README.md b/README.md index 356a07f37d7..b775f43351d 100644 --- a/README.md +++ b/README.md @@ -79,6 +79,22 @@ pipenv run pytest test/test_analytics You should see all tests passed. +## Report Test Coverage + +We use [coverage.py](https://coverage.readthedocs.io/) and [pytest-cov](https://pytest-cov.readthedocs.io/en/latest/) to compute test coverage: + +``` +# Execution +pytest --cov=persper/ test/test_analytics + +# Reporting +coverage html + +# then visit htmlcov/index.html in your browser +``` + + + ## Interactive mode with jupyter notebook 1. Install Jupyter From fe11b22ca9def106c87feb8f39ed729702d0e90d Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Thu, 28 Feb 2019 15:25:02 -0800 Subject: [PATCH 50/84] Remove deprecated code --- persper/analytics/processor.py | 375 --------------------------------- 1 file changed, 375 deletions(-) delete mode 100644 persper/analytics/processor.py diff --git a/persper/analytics/processor.py b/persper/analytics/processor.py deleted file mode 100644 index df8e2842c70..00000000000 --- a/persper/analytics/processor.py +++ /dev/null @@ -1,375 +0,0 @@ -import os -import time -import pickle -from persper.analytics.git_tools import initialize_repo -from collections import deque -import functools -print = functools.partial(print, flush=True) - -EMPTY_TREE_SHA = '4b825dc642cb6eb9a060e54bf8d69288fbee4904' - - -def _diff_with_first_parent(commit): - if len(commit.parents) == 0: - prev_commit = EMPTY_TREE_SHA - else: - prev_commit = commit.parents[0] - # commit.diff automatically detect renames - return commit.diff(prev_commit, - create_patch=True, R=True, indent_heuristic=True) - - -def _fill_change_type(diff_index): - for diff in diff_index: - if diff.new_file: - diff.change_type = 'A' - elif diff.deleted_file: - diff.change_type = 'D' - elif diff.renamed: - diff.change_type = 'R' - elif (diff.a_blob and diff.b_blob and - (diff.a_blob != diff.b_blob)): - diff.change_type = 'M' - else: - diff.change_type = 'U' - - -def _print_diff_index(diff_index): - print(" ".join([diff.change_type for diff in diff_index])) - - -def _subject(msg): - return msg.split('\n', 1)[0].lstrip().rstrip() - - -class Processor(): - - def __init__(self, repo_path): - self.repo_path = repo_path - self.repo = initialize_repo(repo_path) - self.visited = set() - self.last_processed_commit = None - - def process(self, rev=None, - from_beginning=False, num_commits=None, - from_last_processed=False, end_commit_sha=None, - into_branches=False, - max_branch_length=100, - min_branch_date=None, - checkpoint_interval=100, - skip_work=False, - verbose=True): - """ - This function supports four ways of specifying the - range of commits to process: - - Method 1: rev - Pass `rev` parameter and set both - `from_beginning` and `from_last_processed` to False. - `rev` is the revision specifier which follows - an extended SHA-1 syntax. Please refer to git-rev-parse - for viable options. `rev' should only include commits - on the master branch. - - Method 2: from_beginning & num_commits (optional) - Set `from_beginning` to True and - pass `num_commits` parameter. Using this - method, the function will start from the - very first commit on the master branch and - process the following `num_commits` commits - (also on the master branch). - - Method 3: from_last_processed & num_commits - Set `from_last_processed` to True and pass - `num_commits` parameter. Using this method, the - function will resume processing from succeeding commit of - `self.last_processed_commit` for `num_commits` commits. - - Method 4: from_last_processed & end_commit_sha - Set `from_last_processed` to True and pass - `end_commit_sha` parameter. The range of continued processing - will be `self.last_processed_commit.hexsha..end_commit_sha`. - - Args: - rev: A string, see above. - num_commits: An int, see above. - from_beginning: A boolean flag, see above. - from_last_processed: A boolean flag, see above. - end_commit_sha: A string, see above. - into_branches: A boolean flag, if True, the process function - will operate in two phases. - - In the first phase, a call commit graph is contructed - by traversing the specified range of commits on the master - branch. Merge commits are detected and recorded if the - start commit (on master) and end/merge commit of the - corresponding branch are both within the range of - traversal. Those recorded merge commits do not - get any credits (thus they are not present in - self.history data structure). - - In the second phase, it traverses all the branches detected - in the first phase and assign them due credits. - - max_branch_length: An int, the maximum number of commits - to trace back before abortion. - min_branch_date: A python time object, stop backtracing if - a commit is authored before this time. - checkpoint_interval: An int. - """ - if not from_last_processed: - self._reset_state() - self.merge_commits = deque() - - # Method 2 - if from_beginning: - if num_commits is None: - num_commits = 0 - self.commits = list( - self.repo.iter_commits(first_parent=True))[-num_commits:] - - elif from_last_processed: - if not self.last_processed_commit: - print("No history exists yet, terminated.") - return - - # Method 4 - if end_commit_sha: - rev = self.last_processed_commit.hexsha + '..' + end_commit_sha - self.commits = list(self.repo.iter_commits( - rev, first_parent=True)) - # Method 3 - elif num_commits: - rev = self.last_processed_commit.hexsha + '..master' - self.commits = list(self.repo.iter_commits( - rev, first_parent=True))[-num_commits:] - else: - print("Both end_commit_sha and num_commits are None.") - return - - else: - # Method 1 - self.commits = list(self.repo.iter_commits(rev, first_parent=True)) - - if len(self.commits) > 0: - self.last_processed_commit = self.commits[0] - else: - print("The range specified is empty, terminated.") - return - - counter = 1 - start = time.time() - - # 1st phase - for commit in reversed(self.commits): - sha = commit.hexsha - self.visited.add(sha) - self._start_process_commit(commit) - - if verbose: - print('------ No.{} {} {} {} ------'.format( - counter, sha, _subject(commit.message), - time.strftime( - "%b %d %Y", time.gmtime(commit.authored_date) - )) - ) - else: - print('------ No.{} {} ------'.format(counter, sha)) - if counter % 100 == 0: - print('------ Used time: {} ------'.format( - time.time() - start)) - - if counter % checkpoint_interval == 0: - repo_name = os.path.basename(self.repo_path.rstrip('/')) - self.save(repo_name + '-1st-' + str(counter) + '.pickle') - - if into_branches: - is_merge_commit = len(commit.parents) > 1 - if is_merge_commit: - self.merge_commits.append(commit) - """ - is_merge_commit = self._detect_branch( - commit, max_branch_length, min_branch_date) - """ - else: - is_merge_commit = False - - if not skip_work: - # generate diff_index by diff commit with its first parent - diff_index = _diff_with_first_parent(commit) - - # figure out the change type of each entry in diff_index - _fill_change_type(diff_index) - - if verbose: - _print_diff_index(diff_index) - - for diff in diff_index: - if diff.change_type == 'U': - print('Unknown change type encountered.') - continue - - if diff.change_type == 'A': - self.on_add(diff, commit, is_merge_commit) - - elif diff.change_type == 'D': - self.on_delete(diff, commit, is_merge_commit) - - elif diff.change_type == 'R': - self.on_rename(diff, commit, is_merge_commit) - - else: - self.on_modify(diff, commit, is_merge_commit) - - counter += 1 - - # 2nd phase - if into_branches: - - commit_cnt = 1 - branch_cnt = 1 - start = time.time() - - print('\n------- 2nd phase -------\n') - - while len(self.merge_commits) > 0: - mc = self.merge_commits.popleft() - cur_commit = mc.parents[1] - branch_length = 0 - valid_branch = False - - while True: - - # stop tracing back along this branch - # if cur_commit has been visited - if cur_commit.hexsha in self.visited: - break - - # stop if we have reached time boundary - authored_date = time.gmtime(cur_commit.authored_date) - if min_branch_date and min_branch_date > authored_date: - break - - # stop if we have reached max_branch_length - if branch_length >= max_branch_length: - break - - # stop if we have reached the very first commit - if len(cur_commit.parents) == 0: - break - - # will process at least one commit for this branch - valid_branch = True - - # process this commit - if verbose: - print('------ Commit No.{} '.format(commit_cnt), - 'Branch No.{} {} {} {} ------'.format( - branch_cnt, - cur_commit.hexsha, - _subject(cur_commit.message), - time.strftime( - "%b %d %Y", - time.gmtime(cur_commit.authored_date) - ) - ) - ) - else: - print('------ Commit No.{} '.format(commit_cnt), - 'Branch No.{} {}------'.format( - branch_cnt, cur_commit.hexsha)) - - if commit_cnt % 100 == 0: - print('------ Used time: {} ------'.format( - time.time() - start)) - - if commit_cnt % checkpoint_interval == 0: - repo_name = os.path.basename( - self.repo_path.rstrip('/')) - self.save( - repo_name + '-2nd-' + str(counter) + '.pickle') - - self.visited.add(cur_commit.hexsha) - # add to queue if prev_commit is a merge commit - if len(cur_commit.parents) == 2: - self.merge_commits.append(cur_commit) - - if not skip_work: - self._start_process_commit(cur_commit) - diff_index = _diff_with_first_parent(cur_commit) - _fill_change_type(diff_index) - for diff in diff_index: - if diff.change_type == 'U': - print('Unknown change type encountered.') - continue - if diff.change_type == 'A': - self.on_add2(diff, cur_commit) - elif diff.change_type == 'D': - self.on_delete2(diff, cur_commit) - elif diff.change_type == 'R': - self.on_rename2(diff, cur_commit) - else: - self.on_modify2(diff, cur_commit) - - # get next commit - prev_commit = cur_commit.parents[0] - - cur_commit = prev_commit - branch_length += 1 - commit_cnt += 1 - - if valid_branch: - branch_cnt += 1 - - repo_name = os.path.basename(self.repo_path.rstrip('/')) - self.save(repo_name + '-finished.pickle') - - def _reset_state(self): - self.visited = set() - self.last_processed_commit = None - - def _start_process_commit(self, commit): - pass - - def set_repo_path(self, repo_path): - self.repo_path = repo_path - self.repo = initialize_repo(repo_path) - self.last_processed_commit = self.repo.commit(self.last_sha) - - def on_add(self, diff, commit, is_merge_commit): - return 0 - - def on_delete(self, diff, commit, is_merge_commit): - return 0 - - def on_rename(self, diff, commit, is_merge_commit): - return 0 - - def on_modify(self, diff, commit, is_merge_commit): - return 0 - - def on_add2(self, diff, commit): - return 0 - - def on_delete2(self, diff, commit): - return 0 - - def on_rename2(self, diff, commit): - return 0 - - def on_modify2(self, diff, commit): - return 0 - - def __getstate__(self): - state = { - 'visited': self.visited, - 'last_sha': self.last_processed_commit.hexsha - } - return state - - def __setstate__(self, state): - self.__dict__.update(state) - - def save(self, fname): - with open(fname, 'wb+') as f: - pickle.dump(self, f) From 410f2a90ab81613856d3913b048ca723305a8d46 Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Thu, 28 Feb 2019 15:55:42 -0800 Subject: [PATCH 51/84] Workaround gitpython's limitation for reverse git diff --- README.md | 17 +++-------------- persper/analytics/analyzer.py | 16 ++++++++++------ persper/analytics/analyzer2.py | 2 +- persper/analytics/git_tools.py | 25 +++++++++++++------------ 4 files changed, 27 insertions(+), 33 deletions(-) diff --git a/README.md b/README.md index b775f43351d..9e9c9b6caad 100644 --- a/README.md +++ b/README.md @@ -36,18 +36,7 @@ sudo apt-get install git -y git --version ``` -4. Apply a patch to gitpython - -(Try to) apply a patch to gitpython 2.1.x: - -```bash -pipenv shell -cd misc/ -./apply_patch.py -exit -``` - -5. Add project directory to path +4. Add project directory to path Add the following line to your `~/.bashrc` file. @@ -60,7 +49,7 @@ To update your path for the remainder of the session. source ~/.bashrc ``` -6. Install srcML for parsing C/C++ and Java +5. Install srcML for parsing C/C++ and Java Please download from [here](https://www.srcml.org/#download) and follow the [instructions](http://131.123.42.38/lmcrs/beta/README). @@ -71,7 +60,7 @@ sudo apt install libarchive-dev sudo apt install libcurl4-openssl-dev ``` -7. Check setup correctness +6. Check setup correctness ```bash pipenv run pytest test/test_analytics diff --git a/persper/analytics/analyzer.py b/persper/analytics/analyzer.py index d39e2b543aa..93e28004319 100644 --- a/persper/analytics/analyzer.py +++ b/persper/analytics/analyzer.py @@ -2,11 +2,12 @@ import time import pickle import asyncio -from persper.analytics.git_tools import get_contents, _diff_with_first_parent +from persper.analytics.git_tools import get_contents, diff_with_first_parent, initialize_repo from persper.analytics.iterator import RepoIterator from abc import ABC from git import Commit + def print_overview(commits, branch_commits): print('----- Overview ------') print('# of commits on master: %d' % len(commits)) @@ -61,9 +62,11 @@ class Analyzer: def __init__(self, repo_path, graph_server): self._graph_server = graph_server + self._repo_path = repo_path self._ri = RepoIterator(repo_path) + self._repo = initialize_repo(repo_path) self._ccgraph = None - self._observer:AnalyzerObserver = emptyAnalyzerObserver + self._observer: AnalyzerObserver = emptyAnalyzerObserver @property def observer(self): @@ -127,7 +130,7 @@ async def _analyze_commit(self, commit, server_func): commit.author.name, commit.author.email, commit.message) - diff_index = _diff_with_first_parent(commit) + diff_index = diff_with_first_parent(self._repo, commit) for diff in diff_index: old_fname, new_fname = _get_fnames(diff) @@ -146,10 +149,10 @@ async def _analyze_commit(self, commit, server_func): if old_fname: old_src = get_contents( - self._ri.repo, commit.parents[0], old_fname) + self._repo, commit.parents[0], old_fname) if new_fname: - new_src = get_contents(self._ri.repo, commit, new_fname) + new_src = get_contents(self._repo, commit, new_fname) if old_src or new_src: # todo (hezheng) store the status somewhere for reporting later @@ -182,7 +185,7 @@ def save(self, fname): def autosave(self, phase, idx, checkpoint_interval): if idx % checkpoint_interval == 0: - repo_name = os.path.basename(self._ri.repo_path.rstrip('/')) + repo_name = os.path.basename(self._repo_path.rstrip('/')) fname = repo_name + '-' + phase + '-' + str(idx) + '.pickle' self.save(fname) @@ -194,6 +197,7 @@ def __getstate__(self): def __setstate__(self, state): self.__dict__.update(state) + class AnalyzerObserver(ABC): """ Used to observe the progress of `Analyzer` during its analysis of the target repository. diff --git a/persper/analytics/analyzer2.py b/persper/analytics/analyzer2.py index f8e3a5626d7..13ab72f5f07 100644 --- a/persper/analytics/analyzer2.py +++ b/persper/analytics/analyzer2.py @@ -154,7 +154,7 @@ async def _analyzeCommit(self, commit: Union[Commit, str], parentCommit: Union[C commit.author.name, commit.author.email, commit.message) if asyncio.iscoroutine(result): await result - diff_index = diff_with_commit(commit, parentCommit) + diff_index = diff_with_commit(self._repo, commit, parentCommit) for diff in diff_index: old_fname, new_fname = _get_fnames(diff) diff --git a/persper/analytics/git_tools.py b/persper/analytics/git_tools.py index 42401caa159..192c4345c56 100644 --- a/persper/analytics/git_tools.py +++ b/persper/analytics/git_tools.py @@ -2,24 +2,25 @@ from git import Repo, Commit from typing import Union import sys +import git EMPTY_TREE_SHA = '4b825dc642cb6eb9a060e54bf8d69288fbee4904' -def _diff_with_first_parent(commit): +def diff_with_first_parent(repo: Repo, commit: Commit): if len(commit.parents) == 0: - prev_commit = EMPTY_TREE_SHA + return diff_with_commit(repo, commit, None) else: - prev_commit = commit.parents[0] - # commit.diff automatically detect renames - return commit.diff(prev_commit, - create_patch=True, R=True, indent_heuristic=True) - -def diff_with_commit(current_commit:Commit, base_commit:Union[Commit, str]=None): - localBaseCommit = base_commit - if not localBaseCommit: - localBaseCommit = EMPTY_TREE_SHA - return current_commit.diff(localBaseCommit, create_patch=True, R=True, indent_heuristic=True) + return diff_with_commit(repo, commit, commit.parents[0]) + + +def diff_with_commit(repo: Repo, current_commit: Commit, base_commit_sha: str): + if not base_commit_sha: + base_commit = repo.tree(EMPTY_TREE_SHA) + else: + base_commit = repo.commit(base_commit_sha) + return base_commit.diff(current_commit, create_patch=True, indent_heuristic=True) + def initialize_repo(repo_path): try: From a3f35a8fc0d8fbcc8734a41149e52a52b366a95a Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Thu, 28 Feb 2019 15:57:35 -0800 Subject: [PATCH 52/84] Remove the entire deprecated persper/graphs --- persper/graphs/analyzer.py | 268 --------------- persper/graphs/c.py | 108 ------ persper/graphs/call_commit_graph.py | 368 -------------------- persper/graphs/call_graph/c.py | 150 --------- persper/graphs/call_graph/cpp.py | 51 --- persper/graphs/call_graph/java.py | 411 ----------------------- persper/graphs/call_graph/utils.py | 45 --- persper/graphs/commit_graph.py | 147 -------- persper/graphs/cpp.py | 3 - persper/graphs/cpp_tools.py | 38 --- persper/graphs/detect_change.py | 70 ---- persper/graphs/devrank.py | 58 ---- persper/graphs/git_tools.py | 32 -- persper/graphs/graph_server.py | 81 ----- persper/graphs/graph_server_http.py | 54 --- persper/graphs/inverse_diff.py | 41 --- persper/graphs/iterator.py | 186 ---------- persper/graphs/pagerank.py | 43 --- persper/graphs/parse_patch.py | 45 --- persper/graphs/patch_parser.py | 87 ----- persper/graphs/processor.py | 375 --------------------- persper/graphs/ruby_tools.py | 53 --- persper/graphs/srcml.py | 94 ------ persper/graphs/write_graph_to_dot.py | 31 -- test/test_graphs/example.cc | 100 ------ test/test_graphs/example.patch | 111 ------ test/test_graphs/example2.patch | 8 - test/test_graphs/example3.patch | 32 -- test/test_graphs/example4.patch | 28 -- test/test_graphs/test_analyzer.py | 139 -------- test/test_graphs/test_analyzer_js.py | 87 ----- test/test_graphs/test_callcommitgraph.py | 125 ------- test/test_graphs/test_detect_change.py | 87 ----- test/test_graphs/test_inverse_diff.py | 18 - test/test_graphs/test_iterator.py | 65 ---- test/test_graphs/test_map_id.py | 46 --- test/test_graphs/test_processor.py | 26 -- 37 files changed, 3711 deletions(-) delete mode 100644 persper/graphs/analyzer.py delete mode 100644 persper/graphs/c.py delete mode 100644 persper/graphs/call_commit_graph.py delete mode 100644 persper/graphs/call_graph/c.py delete mode 100644 persper/graphs/call_graph/cpp.py delete mode 100644 persper/graphs/call_graph/java.py delete mode 100644 persper/graphs/call_graph/utils.py delete mode 100644 persper/graphs/commit_graph.py delete mode 100644 persper/graphs/cpp.py delete mode 100644 persper/graphs/cpp_tools.py delete mode 100644 persper/graphs/detect_change.py delete mode 100644 persper/graphs/devrank.py delete mode 100644 persper/graphs/git_tools.py delete mode 100644 persper/graphs/graph_server.py delete mode 100644 persper/graphs/graph_server_http.py delete mode 100644 persper/graphs/inverse_diff.py delete mode 100644 persper/graphs/iterator.py delete mode 100644 persper/graphs/pagerank.py delete mode 100644 persper/graphs/parse_patch.py delete mode 100644 persper/graphs/patch_parser.py delete mode 100644 persper/graphs/processor.py delete mode 100644 persper/graphs/ruby_tools.py delete mode 100755 persper/graphs/srcml.py delete mode 100644 persper/graphs/write_graph_to_dot.py delete mode 100644 test/test_graphs/example.cc delete mode 100644 test/test_graphs/example.patch delete mode 100644 test/test_graphs/example2.patch delete mode 100644 test/test_graphs/example3.patch delete mode 100644 test/test_graphs/example4.patch delete mode 100644 test/test_graphs/test_analyzer.py delete mode 100644 test/test_graphs/test_analyzer_js.py delete mode 100644 test/test_graphs/test_callcommitgraph.py delete mode 100644 test/test_graphs/test_detect_change.py delete mode 100644 test/test_graphs/test_inverse_diff.py delete mode 100644 test/test_graphs/test_iterator.py delete mode 100644 test/test_graphs/test_map_id.py delete mode 100644 test/test_graphs/test_processor.py diff --git a/persper/graphs/analyzer.py b/persper/graphs/analyzer.py deleted file mode 100644 index 66b0ed988d5..00000000000 --- a/persper/graphs/analyzer.py +++ /dev/null @@ -1,268 +0,0 @@ -import os -import time -import pickle -from persper.graphs.devrank import devrank -from persper.graphs.git_tools import get_contents, _diff_with_first_parent -from persper.graphs.iterator import RepoIterator -from persper.util.bidict import bidict - - -def print_overview(commits, branch_commits): - print('----- Overview ------') - print('# of commits on master: %d' % len(commits)) - print('# of commits on branch: %d' % len(branch_commits)) - - -def print_commit_info(phase, idx, commit, start_time, verbose): - if verbose: - print('----- No.%d %s %s %s -----' % - (idx, commit.hexsha, subject_of(commit.message), - time.strftime("%b %d %Y", time.gmtime(commit.authored_date)))) - else: - print('----- No.%d %s on %s -----' % (idx, commit.hexsha, phase)) - - if idx % 100 == 0: - print('------ Used time: %.3f -----' % (time.time() - start_time)) - - -def subject_of(msg): - return msg.split('\n', 1)[0].lstrip().rstrip() - - -def _get_fnames(diff): - if diff.new_file: - # change type 'A' - old_fname = None - new_fname = diff.b_blob.path - elif diff.deleted_file: - # change type 'D' - old_fname = diff.a_blob.path - new_fname = None - elif diff.renamed: - # change type 'R' - old_fname = diff.rename_from - new_fname = diff.rename_to - elif (diff.a_blob and diff.b_blob and - (diff.a_blob != diff.b_blob)): - # change type 'M' - old_fname = new_fname = diff.b_blob.path - else: - # change type 'U' - return None, None - - return old_fname, new_fname - - -def is_merge_commit(commit): - return len(commit.parents) > 1 - - -def _normalize_shares(email_to_share): - share_sum = 0 - for email, share in email_to_share.items(): - share_sum += share - - for email in email_to_share: - email_to_share[email] /= share_sum - - -class Analyzer: - - def __init__(self, repo_path, graph_server): - self.graph_server = graph_server - self.ri = RepoIterator(repo_path) - self.history = {} - self.id_map = {} - self.ordered_shas = [] - self.graph = None - - def analyze(self, rev=None, - from_beginning=False, - num_commits=None, - continue_iter=False, - end_commit_sha=None, - into_branches=False, - max_branch_length=100, - min_branch_date=None, - checkpoint_interval=1000, - verbose=False): - - if not continue_iter: - self.reset_state() - self.graph_server.reset_graph() - - commits, branch_commits = \ - self.ri.iter(rev=rev, - from_beginning=from_beginning, - num_commits=num_commits, - continue_iter=continue_iter, - end_commit_sha=end_commit_sha, - into_branches=into_branches, - max_branch_length=max_branch_length, - min_branch_date=min_branch_date) - - print_overview(commits, branch_commits) - start_time = time.time() - - for idx, commit in enumerate(reversed(commits), 1): - phase = 'main' - print_commit_info(phase, idx, commit, start_time, verbose) - self.analyze_master_commit(commit) - self.autosave(phase, idx, checkpoint_interval) - - for idx, commit in enumerate(branch_commits, 1): - phase = 'branch' - print_commit_info(phase, idx, commit, start_time, verbose) - self.analyze_branch_commit(commit) - self.autosave(phase, idx, checkpoint_interval) - - self.autosave('finished', 0, 1) - - def _analyze_commit(self, commit, ccg_func): - sha = commit.hexsha - self.ordered_shas.append(sha) - self.history[sha] = {} - self.id_map[sha] = {} - diff_index = _diff_with_first_parent(commit) - - for diff in diff_index: - old_fname, new_fname = _get_fnames(diff) - # Cases we don't handle - # 1. Both file names are None - if old_fname is None and new_fname is None: - print('WARNING: unknown change type encountered.') - continue - - # 2. Either old_fname and new_fname doesn't pass filter - if ((old_fname and not self.graph_server.filter_file(old_fname)) or - (new_fname and not self.graph_server.filter_file(new_fname))): - continue - - old_src = new_src = None - - if old_fname: - old_src = get_contents( - self.ri.repo, commit.parents[0], old_fname) - - if new_fname: - new_src = get_contents(self.ri.repo, commit, new_fname) - - if old_src or new_src: - # Delegate actual work to graph_server - id_to_lines, id_map = ccg_func( - old_fname, old_src, new_fname, new_src, diff.diff) - - self.history[sha].update(id_to_lines) - self.id_map[sha].update(id_map) - - def analyze_master_commit(self, commit): - self._analyze_commit(commit, self.graph_server.update_graph) - - def analyze_branch_commit(self, commit): - self._analyze_commit(commit, self.graph_server.parse) - - def reset_state(self): - self.history = {} - self.id_map = {} - self.ordered_shas = [] - self.graph = None - - def build_history(self, - commits, - phase='build-history', - checkpoint_interval=1000, - verbose=False): - """A helper function to access `analyze_branch_commit`""" - print_overview([], commits) - start_time = time.time() - - for idx, commit in enumerate(commits, 1): - print_commit_info(phase, idx, commit, start_time, verbose) - self.analyze_branch_commit(commit) - self.autosave(phase, idx, checkpoint_interval) - - self.autosave(phase, 0, 1) - - def aggregate_id_map(self): - final_map = bidict() - for sha in self.ordered_shas: - for old_fid, new_fid in self.id_map[sha].items(): - if old_fid in final_map.inverse: - # Make a copy so as not to remove list elements during iteration - existing_fids = final_map.inverse[old_fid].copy() - for ex_fid in existing_fids: - final_map[ex_fid] = new_fid - final_map[old_fid] = new_fid - return dict(final_map) - - def cache_graph(self): - self.graph = self.graph_server.get_graph() - - def compute_function_share(self, alpha): - self.cache_graph() - return devrank(self.graph, alpha=alpha) - - def compute_commit_share(self, alpha): - commit_share = {} - func_share = self.compute_function_share(alpha) - final_map = self.aggregate_id_map() - - # Compute final history using final_map - final_history = {} - for sha in self.history: - final_history[sha] = {} - for fid, num_lines in self.history[sha].items(): - if fid in final_map: - final_history[sha][final_map[fid]] = num_lines - else: - final_history[sha][fid] = num_lines - - # add edits by each commit up to compute total edits - total_edits = {} - for sha in final_history: - for fid, num_lines in final_history[sha].items(): - if fid in total_edits: - total_edits[fid] += num_lines - else: - total_edits[fid] = num_lines - - # Propagate to commit level - for sha in final_history: - commit_share[sha] = 0 - for fid in final_history[sha]: - if fid in func_share: - commit_share[sha] += \ - (final_history[sha][fid] / total_edits[fid] * - func_share[fid]) - - return commit_share - - def compute_developer_share(self, alpha): - dev_share = {} - commit_share = self.compute_commit_share(alpha) - - for sha in commit_share: - email = self.ri.repo.commit(sha).author.email - if email in dev_share: - dev_share[email] += commit_share[sha] - else: - dev_share[email] = commit_share[sha] - return dev_share - - def locrank_commits(self): - loc = {} - for sha in self.history: - loc[sha] = 0 - for func in self.history[sha]: - loc[sha] += self.history[sha][func] - return sorted(loc.items(), key=lambda x: x[1], reverse=True) - - def save(self, fname): - with open(fname, 'wb+') as f: - pickle.dump(self, f) - - def autosave(self, phase, idx, checkpoint_interval): - if idx % checkpoint_interval == 0: - repo_name = os.path.basename(self.ri.repo_path.rstrip('/')) - fname = repo_name + '-' + phase + '-' + str(idx) + '.pickle' - self.save(fname) diff --git a/persper/graphs/c.py b/persper/graphs/c.py deleted file mode 100644 index 787fc29b732..00000000000 --- a/persper/graphs/c.py +++ /dev/null @@ -1,108 +0,0 @@ -import re -import networkx as nx -from persper.graphs.patch_parser import PatchParser -from persper.graphs.srcml import transform_src_to_tree -from persper.graphs.call_graph.c import update_call_graph_c, get_func_ranges_c -from persper.graphs.detect_change import get_changed_functions -from persper.graphs.inverse_diff import inverse_diff -from persper.graphs.graph_server import GraphServer - - -class CGraphServer(GraphServer): - def __init__(self, filename_regex_strs): - self._graph = nx.DiGraph() - self._filename_regexes = [re.compile(regex_str) for regex_str in filename_regex_strs] - self._pparser = PatchParser() - - def update_graph(self, old_filename, old_src, new_filename, new_src, patch): - # on add, rename, modify: update_roots = [new_root] - # on delete: update_roots = [] - update_root = [] - - # on add: modified_func = {} - # on rename, modify, delete: modified_func is computed by - # parsing patch and call get_changed_functions - modified_func = {} - - if old_src is not None: - old_root = transform_src_to_tree(old_src) - if old_root is None: - return {}, {} - - modified_func = get_changed_functions( - *get_func_ranges_c(old_root), - *self._parse_patch(patch)) - - if new_src is not None: - new_root = transform_src_to_tree(new_src) - if new_root is None: - return {}, {} - update_root = [new_root] - - # update call graph - # if on delete, then new_func is expected to be an empty dict - new_func = update_call_graph_c(self.graph, update_root, modified_func) - - # return history - return {**new_func, **modified_func}, {} - - def parse(self, old_filename, old_src, new_filename, new_src, patch): - """Return None if there is an error""" - forward_stats = {} - bckward_stats = {} - - adds, dels = self._parse_patch(patch) - if adds is None or dels is None: - return None, {} - - if old_src is not None: - old_root = transform_src_to_tree(old_src) - if old_root is None: - return None, {} - - forward_stats = get_changed_functions( - *get_func_ranges_c(old_root), adds, dels) - - if new_src is not None: - inv_adds, inv_dels = inverse_diff(adds, dels) - new_root = transform_src_to_tree(new_src) - if new_root is None: - return None, {} - - bckward_stats = get_changed_functions( - *get_func_ranges_c(new_root), inv_adds, inv_dels) - - """ - forward_stats and bckward_stats might have different values - for the same function, as an example, please refer to - `str_equals` function in the following link. In this case, - we'll stick with forward_stats (override bckward_stats). - https://github.com/UltimateBeaver/test_feature_branch/commit/364d5cc49aeb2e354da458924ce84c0ab731ac77 - """ - bckward_stats.update(forward_stats) - return bckward_stats, {} - - def get_graph(self): - return self.graph - - def reset_graph(self): - self.graph = nx.DiGraph() - - def filter_file(self, filename): - for regex in self._filename_regexes: - if not regex.match(filename): - return False - return True - - def config(self, param): - pass - - def _parse_patch(self, patch): - adds, dels = None, None - try: - adds, dels = self._pparser.parse(patch.decode('utf-8', 'replace')) - except UnicodeDecodeError: - print("UnicodeDecodeError when parsing patch!") - except: - print("Unknown error when parsing patch!") - return adds, dels diff --git a/persper/graphs/call_commit_graph.py b/persper/graphs/call_commit_graph.py deleted file mode 100644 index 691385d9787..00000000000 --- a/persper/graphs/call_commit_graph.py +++ /dev/null @@ -1,368 +0,0 @@ -import sys -import networkx as nx -from persper.graphs.processor import Processor, _diff_with_first_parent, _fill_change_type -from persper.graphs.patch_parser import PatchParser -from persper.graphs.srcml import transform_src_to_tree -from persper.graphs.detect_change import get_changed_functions -from persper.graphs.call_graph.c import update_call_graph_c, get_func_ranges_c -from persper.graphs.call_graph.java import update_call_graph_java, get_func_ranges_java -from persper.graphs.call_graph.java import prepare_env -from persper.graphs.devrank import devrank -from persper.graphs.git_tools import get_contents - - -def _inverse_diff_result(adds, dels): - """ - >>> adds = [[11, 1], [32, 1]] - >>> dels = [[11, 11], [31, 32]] - >>> _inverse_diff_result(adds, dels) - ([[10, 1], [30, 2]], [[11, 11], [31, 31]]) - """ - diff = 0 - add_ptr, del_ptr = 0, 0 - num_adds, num_dels = len(adds), len(dels) - inv_adds, inv_dels = [], [] - - def _handle_a(a): - nonlocal diff - inv_dels.append([diff + a[0] + 1, diff + a[0] + a[1]]) - diff += a[1] - - def _handle_d(d): - nonlocal diff - inv_adds.append([diff + d[0] - 1, d[1] - d[0] + 1]) - diff -= (d[1] - d[0] + 1) - - while add_ptr < num_adds or del_ptr < num_dels: - if add_ptr < num_adds and del_ptr < num_dels: - if adds[add_ptr][0] < dels[del_ptr][0]: - _handle_a(adds[add_ptr]) - add_ptr += 1 - else: - _handle_d(dels[del_ptr]) - del_ptr += 1 - elif add_ptr < num_adds and del_ptr >= num_dels: - # we have finished dels - _handle_a(adds[add_ptr]) - add_ptr += 1 - else: - # we have finished adds - _handle_d(dels[del_ptr]) - del_ptr += 1 - - return inv_adds, inv_dels - - -def _normalize_shares(email_to_share): - share_sum = 0 - for email, share in email_to_share.items(): - share_sum += share - - for email in email_to_share: - email_to_share[email] /= share_sum - - -class CallCommitGraph(Processor): - - def __init__(self, repo_path, lang='c'): - super().__init__(repo_path) - self.G = None - self.lang = lang - if lang == 'c': - self.exts = ('.c', '.h') - elif lang == 'java': - self.exts = ('.java',) - else: - print("Invalid language option, terminated.") - sys.exit(-1) - self.env = {} - self.history = {} - self.share = {} - self.patch_parser = PatchParser() - - def _reset_state(self): - super()._reset_state() - self.G = nx.DiGraph() - self.history = {} - - def _start_process_commit(self, commit): - self.history[commit.hexsha] = {} - if self.lang == 'java': - new_roots = [] - diff_index = _diff_with_first_parent(commit) - _fill_change_type(diff_index) - for diff in diff_index: - if diff.change_type in ['A', 'M']: - fname = diff.b_blob.path - elif diff.change_type == 'R': - fname = diff.rename_to - else: - continue - - if self.fname_filter(fname): - root = self._get_xml_root(commit, fname) - prepare_env(root, env=self.env) - - def on_add(self, diff, commit, is_merge_commit): - old_fname = None - new_fname = diff.b_blob.path - return self._first_phase(diff, commit, - is_merge_commit, - old_fname=old_fname, - new_fname=new_fname) - - def on_delete(self, diff, commit, is_merge_commit): - old_fname = diff.a_blob.path - new_fname = None - return self._first_phase(diff, commit, - is_merge_commit, - old_fname=old_fname, - new_fname=new_fname) - - def on_rename(self, diff, commit, is_merge_commit): - new_fname = diff.rename_to - old_fname = diff.rename_from - return self._first_phase(diff, commit, - is_merge_commit, - old_fname=old_fname, - new_fname=new_fname) - - def on_modify(self, diff, commit, is_merge_commit): - fname = diff.b_blob.path - return self._first_phase(diff, commit, - is_merge_commit, - old_fname=fname, - new_fname=fname) - - def on_add2(self, diff, commit): - old_fname = None - new_fname = diff.b_blob.path - return self._second_phase(diff, commit, - old_fname=old_fname, - new_fname=new_fname) - - def on_delete2(self, diff, commit): - old_fname = diff.a_blob.path - new_fname = None - return self._second_phase(diff, commit, - old_fname=old_fname, - new_fname=new_fname) - - def on_rename2(self, diff, commit): - new_fname = diff.rename_to - old_fname = diff.rename_from - return self._second_phase(diff, commit, - old_fname=old_fname, - new_fname=new_fname) - - def on_modify2(self, diff, commit): - fname = diff.b_blob.path - return self._second_phase(diff, commit, - old_fname=fname, - new_fname=fname) - - def fname_filter(self, fname): - for ext in self.exts: - if fname.endswith(ext): - return True - return False - - def _get_xml_root(self, commit, fname): - if self.lang == 'c': - return transform_src_to_tree( - get_contents(self.repo, commit, fname)) - elif self.lang == 'java': - return transform_src_to_tree( - get_contents(self.repo, commit, fname), ext='.java') - - def _first_phase(self, diff, commit, is_merge_commit, - old_fname=None, new_fname=None): - - if ((old_fname and self.fname_filter(old_fname)) or - (new_fname and self.fname_filter(new_fname))): - - # on add, rename, modify: update_roots = [new_root] - # on delete: update_roots = [] - update_roots = [] - - # on add: modified_func = {} - # on rename, modify, delete: modified_func is computed by - # parsing patch and call get_changed_functions - modified_func = {} - - # do not need to parse patch if on add - if old_fname is not None: - additions, deletions = self.parse_patch(diff.diff) - if additions is None or deletions is None: - return -1 - - old_root = self._get_xml_root(commit.parents[0], old_fname) - if old_root is None: - return -1 - - if self.lang == 'c': - modified_func = get_changed_functions( - *get_func_ranges_c(old_root), additions, deletions) - elif self.lang == 'java': - modified_func = get_changed_functions( - *get_func_ranges_java(old_root), additions, deletions) - - # parse new src to tree - if new_fname is not None: - new_root = self._get_xml_root(commit, new_fname) - if new_root is None: - return -1 - update_roots.append(new_root) - - # update call graph - # if on delete, then new_func is expected to be an empty dict - if self.lang == 'c': - new_func = update_call_graph_c( - self.G, update_roots, modified_func) - elif self.lang == 'java': - new_func = update_call_graph_java( - self.G, update_roots, modified_func, env=self.env) - - # only update self.history for non-merge commit - if not is_merge_commit: - for func_name in new_func: - self.history[commit.hexsha][func_name] = \ - new_func[func_name] - - for func_name in modified_func: - self.history[commit.hexsha][func_name] = \ - modified_func[func_name] - - return 0 - - def _second_phase(self, diff, commit, old_fname=None, new_fname=None): - - if ((old_fname and self.fname_filter(old_fname)) or - (new_fname and self.fname_filter(new_fname))): - - adds, dels = self.parse_patch(diff.diff) - if adds is None or dels is None: - return -1 - modified_func, inv_modified_func = {}, {} - - if old_fname is not None: - old_root = self._get_xml_root(commit.parents[0], old_fname) - if old_root is None: - return -1 - - if self.lang == 'c': - modified_func = get_changed_functions( - *get_func_ranges_c(old_root), adds, dels) - elif self.lang == 'java': - modified_func = get_changed_functions( - *get_func_ranges_java(old_root), adds, dels) - - if new_fname is not None: - inv_adds, inv_dels = _inverse_diff_result(adds, dels) - new_root = self._get_xml_root(commit, new_fname) - if new_root is None: - return -1 - - if self.lang == 'c': - inv_modified_func = get_changed_functions( - *get_func_ranges_c(new_root), inv_adds, inv_dels) - elif self.lang == 'java': - inv_modified_func = get_changed_functions( - *get_func_ranges_java(new_root), inv_adds, inv_dels) - - for func_name in modified_func: - if func_name in self.G: - self.history[commit.hexsha][func_name] = \ - modified_func[func_name] - - for func_name in inv_modified_func: - if func_name in self.G and func_name not in modified_func: - self.history[commit.hexsha][func_name] = \ - inv_modified_func[func_name] - - def parse_patch(self, patch): - additions, deletions = None, None - try: - additions, deletions = self.patch_parser.parse( - patch.decode('utf-8', 'replace')) - except UnicodeDecodeError: - print("UnicodeDecodeError in function parse_patch!") - except: - print("Unknown error in function parse_patch!") - return additions, deletions - - def update_shares(self, alpha): - self.scores = devrank(self.G, alpha=alpha) - for sha in self.history: - self.share[sha] = 0 - for func_name in self.history[sha]: - if func_name in self.G: - # this condition handles the case where - # func_name is deleted by sha, - # but has never been added or modified before - self.share[sha] += \ - (self.history[sha][func_name] / - self.G.node[func_name]['num_lines']) \ - * self.scores[func_name] - - def devrank_commits(self, alpha): - self.update_shares(alpha) - return sorted(self.share.items(), key=lambda x: x[1], reverse=True) - - def devrank_functions(self, alpha): - self.scores = devrank(self.G, alpha=alpha) - return sorted(self.scores.items(), key=lambda x: x[1], reverse=True) - - def devrank_developers(self, alpha, sha_to_type={}, coefs=[1, 1, 1, 1]): - self.update_shares(alpha) - email_to_share = {} - email_to_name = {} - - hexsha_to_type = {} - for sha, t in sha_to_type.items(): - c = self.repo.commit(sha) - hexsha_to_type[c.hexsha] = t - - for sha in self.history: - if sha in hexsha_to_type: - coef = coefs[int(hexsha_to_type[sha])] - else: - coef = 1 - actor = self.repo.commit(sha).author - email = actor.email - email_to_name[email] = actor.name - if email in email_to_share: - email_to_share[email] += coef * self.share[sha] - else: - email_to_share[email] = coef * self.share[sha] - _normalize_shares(email_to_share) - sorted_shares = sorted(email_to_share.items(), - key=lambda x: x[1], - reverse=True) - return sorted_shares, email_to_name - - def locrank_commits(self): - self.loc = {} - for sha in self.history: - self.loc[sha] = 0 - for func_name in self.history[sha]: - self.loc[sha] += self.history[sha][func_name] - return sorted(self.loc.items(), key=lambda x: x[1], reverse=True) - - def __getstate__(self): - state = super().__getstate__() - state['G'] = self.G - state['history'] = self.history - state['lang'] = self.lang - state['exts'] = self.exts - state['env'] = self.env - return state - - def __setstate__(self, state): - super().__setstate__(state) - self.share = {} - self.patch_parser = PatchParser() - -if __name__ == "__main__": - import doctest - doctest.testmod() diff --git a/persper/graphs/call_graph/c.py b/persper/graphs/call_graph/c.py deleted file mode 100644 index 50ef6604d76..00000000000 --- a/persper/graphs/call_graph/c.py +++ /dev/null @@ -1,150 +0,0 @@ -import networkx as nx -from persper.graphs.call_graph.utils import remove_edges_of_node, ns, line_attr - - -class NotFunctionCallError(Exception): - """Raise for false positive nodes""" - - -def handle_function(func_node): - """Given a node, - return function name and function range (start & end lineno)""" - - name_node = func_node.find('srcml:name', ns) - func_name, start_line = handle_name(name_node) - if not func_name or not start_line: - print('Function name/start not found!') # very unlikely to happen - return None, None, None - - block_node = func_node.find('srcml:block', ns) - if block_node is None: - try: - block_node = func_node.xpath('./following-sibling::srcml:block', - namespaces=ns)[0] - except: - print("Block node not found (in func {})".format(func_name)) - return func_name, None, None - try: - pos_node = block_node.find('pos:position', ns) - end_line = int(pos_node.attrib[line_attr]) - except: - print("Block node doesn't have position node inside!") - return func_name, None, None - - return func_name, start_line, end_line - - -def handle_name(name_node): - """Given an node, - return its text content and position (line)""" - text, line = None, None - if name_node is not None: - text = name_node.text - line = int(name_node.attrib[line_attr]) - return text, line - - -def handle_call(call_node): - """Given an node, return function name being called - - Throws NotFunctionCallException - - Case 1: casting function pointer is not function call - Example: tmp.sa_handler = (void (*)(int)) handler; - - Case 2: function call from struct variable - Example: tty->write(tty) - - """ - name_node = call_node.find('srcml:name', ns) - if name_node is None: - # Case 1 - raise NotFunctionCallError() - callee_name = name_node.text - if callee_name is None: - # Case 2 - callee_name = name_node[-1].text - return callee_name - - -def build_call_graph_c(roots, G=None): - if G is None: - G = nx.DiGraph() - - new_func = {} - func_to_file = {} - for root in roots: - # print('------ ' + root.attrib['filename'] + ' ------') - - for func_node in root.findall('./srcml:function', namespaces=ns): - - caller_name, start_line, end_line = handle_function(func_node) - if not caller_name: - continue - - if start_line and end_line: - num_lines = end_line - start_line + 1 - else: - # default num_lines is 1 - num_lines = 1 - - if caller_name not in G: - # Case 1: hasn't been defined and hasn't been called - new_func[caller_name] = num_lines - G.add_node(caller_name, num_lines=num_lines, defined=True) - elif not G.node[caller_name]['defined']: - # Case 2: has been called but hasn't been defined - new_func[caller_name] = num_lines - G.node[caller_name]['defined'] = True - G.node[caller_name]['num_lines'] = num_lines - else: - # Case 3: has been called and has been defined - # it is modified in the latest commit - # pass because it's not a new function - # so no need to add it to new_func and to - # update G.node[caller_name]['num_lines'] - pass - - func_to_file[caller_name] = root.attrib['filename'] - - # handle all function calls - for call_node in func_node.xpath('.//srcml:call', namespaces=ns): - - try: - callee_name = handle_call(call_node) - except NotFunctionCallError: - continue - except: - print("Callee name not found! (in func %s)" % caller_name) - continue - - if callee_name not in G: - G.add_node(callee_name, num_lines=1, defined=False) - G.add_edge(caller_name, callee_name) - - return G, new_func, func_to_file - - -def update_call_graph_c(G, roots, modified_func): - for func_name in modified_func: - if func_name in G: - remove_edges_of_node(G, func_name, in_edges=False) - G.node[func_name]['num_lines'] += modified_func[func_name] - - # here roots should be constructed from the more recent commit - # new functions and their sizes are stored in new_func dictionary - _, new_func, _ = build_call_graph_c(roots, G) - return new_func - - -def get_func_ranges_c(root): - func_names, func_ranges = [], [] - for func_node in root.findall('./srcml:function', namespaces=ns): - - func_name, start_line, end_line = handle_function(func_node) - if not (func_name and start_line and end_line): - continue - - func_ranges.append([start_line, end_line]) - func_names.append(func_name) - return func_names, func_ranges diff --git a/persper/graphs/call_graph/cpp.py b/persper/graphs/call_graph/cpp.py deleted file mode 100644 index 716146003bc..00000000000 --- a/persper/graphs/call_graph/cpp.py +++ /dev/null @@ -1,51 +0,0 @@ - -ns = {'srcml': 'http://www.srcML.org/srcML/src', 'pos': 'http://www.srcML.org/srcML/position'} - -def get_func_ranges_cpp(root): - func_ranges, func_names = [], [] - for func_node in root.xpath('./srcml:constructor | ./srcml:function', namespaces=ns): - - func_name, start_line, end_line = handle_function(func_node) - if not (func_name and start_line and end_line): - continue - - func_ranges.append([start_line, end_line]) - func_names.append(func_name) - return func_names, func_ranges - -def handle_name(name_node): - func_id, line = None, None - if name_node != None: - if name_node.text: - func_id = name_node.text - line = int(name_node.attrib['{http://www.srcML.org/srcML/position}line']) - else: - try: - # alternative solution is to use - # graphs.call_graph.utils.transform_node_to_src - class_name = name_node[0].text - line = int(name_node[0].attrib['{http://www.srcML.org/srcML/position}line']) - assert(name_node[1].text == "::") - func_name = name_node[2].text - func_id = "{}::{}".format(class_name, func_name) - except: - import pdb - pdb.set_trace() - return func_id, line - -def handle_function(func_node): - - name_node = func_node.find('srcml:name', ns) - func_id, start_line = handle_name(name_node) - if not func_id or not start_line: - print('Function name/start not found!') - return None, None, None - - block_node = func_node.find('srcml:block', ns) - try: - pos_node = block_node.find('pos:position', ns) - end_line = int(pos_node.attrib['{http://www.srcML.org/srcML/position}line']) - except: - return func_id, None, None - - return func_id, start_line, end_line diff --git a/persper/graphs/call_graph/java.py b/persper/graphs/call_graph/java.py deleted file mode 100644 index 9b06f49d53b..00000000000 --- a/persper/graphs/call_graph/java.py +++ /dev/null @@ -1,411 +0,0 @@ -import networkx as nx -from lxml import etree -from persper.graphs.call_graph.utils import transform_node_to_src, remove_edges_of_node -from persper.graphs.call_graph.utils import ns, line_attr - - -def generate_fid(class_name, func_name): - return class_name + ':' + func_name - - -def decompose_fid(func_id): - return func_id.split(':') - - -def get_specifiers(node): - """Helper function to first find all specifier nodes - and then return their texts""" - return [n.text for n in node.findall('./srcml:specifier', ns)] - - -def handle_name_node(name_node): - return transform_node_to_src(name_node).strip() - - """ - child_nodes = name_node.getchildren() - text_of_itself = name_node.text or '' - if len(child_nodes) == 0: - return text_of_itself - else: - child_names = [] - for child_node in child_nodes: - child_names.append(handle_name_node(child_node)) - return text_of_itself + ''.join(child_names) - """ - - -def get_name(node): - """Helper function to first find name node and then parse name""" - return handle_name_node(node.find('srcml:name', ns)) - - -def get_type(node): - """First get type node, then get type node's name node, - finally returns node's type""" - type_node = node.find('srcml:type', ns) - return handle_name_node(type_node.find('srcml:name', ns)) - - -def handle_decl_node(decl_node): - type_node = decl_node.find('srcml:type', ns) - type_name_node = type_node.find('srcml:name', ns) - name_node = decl_node.find('srcml:name', ns) - - try: - type_name = handle_name_node(type_name_node) - var_name = handle_name_node(name_node) - except: - import pdb - pdb.set_trace() - - return type_name, var_name - - -def handle_decl_stmt_node(decl_stmt_node, local_env): - """ - Node Structure: - A node consists of one or more nodes, - each has a node and a node. - The node may or may not has a node, the following - declaration statement is an example: - - int c, char2, char3; - """ - prev_type = None - decl_nodes = decl_stmt_node.findall('./srcml:decl', ns) - for decl_node in decl_nodes: - type_node = decl_node.find('./srcml:type', ns) - type_name_node = type_node.find('./srcml:name', ns) - if type_name_node is None: - type_name = prev_type - else: - type_name = handle_name_node(type_name_node) - var_name = get_name(decl_node) - local_env[var_name] = type_name - prev_type = type_name - - -def handle_call_node(call_node, cl_name, local_env, env): - """Parse a call node and return the identifer of the function being called - Type of calls we handle: - Case 1: doSomething(args) - doSomething is a public/private static/instance - member method of cl_name - Case 2: A a = new A() - A is a class (env), A's constructor function is called in this case - Case 3: a.doSomething(args) - a is an object, could be newly instantiated in this - function (local_env), - or could be passed as a parameter (local_env), - or could be this class's public/private member variable (env) - doSomething could be either a static method or a instance method - Case 4: A.doSomething(args) - A is a class (env) - doSomething is one of A's static methods - Case 5: A.var.doSomething(args) - A is a class (env) - var is a public static member of class A (env) - Case 6: a.var.doSomething(args) - a is an object, could be newly instantiated in this - function (local_env), - or could be passed as a parameter (local_env), - or could be this class' public/private member variable (env) - var is a public (static) member of object a (local_env & env) - - Returns: - A String representing the signature of the function being called - """ - call_name = get_name(call_node) - - names_lst = [n.strip() for n in call_name.split('.')] - callee_func_name = names_lst[-1] - if len(names_lst) == 1: - previous_node = call_node.getprevious() - if previous_node is not None and previous_node.text == 'new': - # Case 1: calling constructor - callee_cl_name = callee_func_name - else: - # Case 2: calling member method - callee_cl_name = cl_name - return generate_fid(callee_cl_name, callee_func_name) - elif len(names_lst) == 2: - niq = names_lst[0] # niq => name in question - # check local_env first - if niq in local_env: - # Case 3 (local_env) - var_name = niq - callee_cl_name = local_env[var_name] - return generate_fid(callee_cl_name, callee_func_name) - elif niq in env[cl_name]['var']: - # case 3 (env) - var_name = niq - callee_cl_name = env[cl_name]['var'][var_name]['type'] - return generate_fid(callee_cl_name, callee_func_name) - elif niq in env: - # Case 4 - return generate_fid(niq, callee_func_name) - else: - # something went wrong, niq is probably a class not in env - # print("WARNING: niq not found in both env and local_env") - return generate_fid(niq, callee_func_name) - else: - # Case 5 or 6 - callee_cl_name = None - if names_lst[0] in local_env: - callee_cl_name = local_env[names_lst[0]] - for n in names_lst[1:-1]: - callee_cl_name = env[callee_cl_name]['var'][n]['type'] - return generate_fid(callee_cl_name, callee_func_name) - elif names_lst[0] in env[cl_name]['var']: - callee_cl_name = env[cl_name]['var'][names_lst[0]]['type'] - for n in names_lst[1:-1]: - callee_cl_name = env[callee_cl_name]['var'][n]['type'] - return generate_fid(callee_cl_name, callee_func_name) - elif names_lst[0] in env: - callee_cl_name = names_lst[0] - for n in names_lst[1:-1]: - callee_cl_name = env[callee_cl_name]['var'][n]['type'] - return generate_fid(callee_cl_name, callee_func_name) - else: - # something went wrong, names_lst[0] is probably a class not in env - # print("WARNING: names_lst[0] not found in both env and local_env") - approx_callee_cl_name = '.'.join(names_lst[:-1]) - return generate_fid(approx_callee_cl_name, callee_func_name) - - -def handle_param_lst_node(param_lst_node): - local_env = {} - param_nodes = param_lst_node.findall('./srcml:parameter', ns) - for param_node in param_nodes: - decl_node = param_node.find('./srcml:decl', ns) - type_name, var_name = handle_decl_node(decl_node) - local_env[var_name] = type_name - return local_env - - -def handle_func_node(func_node, class_name, G, new_func, fid_to_file, env): - """ - Args: - class_name: A String, name of the class this function belongs to - G: A nx.DiGraph object, storing the actual call graph - new_func: A dictionary, mapping a new function's identifer (fid) - to its size - fid_to_file: A dictionary, mapping fid to the file it belongs to - env: A dictionary, storing global environment - - Workflow Summary: - 1. Parse function name and generate fid - 2. Add caller function to call graph G - 3. Initialize local_env by parsing parameter list - 4. Iterate through subnodes of this function in document order - a. For node, parse it and get callee_fid, - add this new edge to call graph G - b. For node, parse it and update local_env - - Node Structure: - node's direct children include , , - , - - TODOs: - 1. Function Overload - a. Primitive type - 2. Polymorphism - 3. Collection - 4. Array - 5. Add logic to remove variable from local_env - 6. Nested class - 7. Anonymous class - """ - name_node = func_node.find('./srcml:name', ns) - block_node = func_node.find('./srcml:block', ns) - block_pos_node = block_node.find('./pos:position', ns) - if block_pos_node is None: - # probably a srcML parsing error - return - param_lst_node = func_node.find('./srcml:parameter_list', ns) - - func_name = get_name(func_node) - caller_fid = generate_fid(class_name, func_name) - start_line = int(name_node.attrib[line_attr]) - end_line = int(block_pos_node.attrib[line_attr]) - num_lines = end_line - start_line + 1 - - # local_env maps variable name to class name - try: - local_env = handle_param_lst_node(param_lst_node) - except: - print("Failed to parse parameter list for %s" % caller_fid) - return - - if caller_fid not in G: - # Case 1: hasn't been defined and hasn't been called - new_func[caller_fid] = num_lines - G.add_node(caller_fid, {'num_lines': num_lines, 'defined': True}) - elif not G.node[caller_fid]['defined']: - # Case 2: has been called but hasn't been defined - new_func[caller_fid] = num_lines - G.node[caller_fid]['defined'] = True - G.node[caller_fid]['num_lines'] = num_lines - else: - # Case 3: has been called and has been defined - # it is modified in the latest commit - # no need to add it to new_func or - # update G.node[caller_fid]['num_lines'] - pass - - fid_to_file[caller_fid] = env[class_name]['filename'] - - for node in block_node.iter('{*}call', '{*}decl_stmt'): - if 'call' in node.tag: - try: - callee_fid = handle_call_node(node, class_name, local_env, env) - except: - print("Excpetion in handle_call_node.") - continue - if callee_fid not in G: - G.add_node(callee_fid, {'num_lines': 1, 'defined': False}) - G.add_edge(caller_fid, callee_fid) - else: - handle_decl_stmt_node(node, local_env) - - -def handle_class_node(class_node, G, new_func, fid_to_file, env): - class_name = get_name(class_node) - - block_node = class_node.find('./srcml:block', ns) - func_nodes = block_node.findall('./srcml:function', ns) - for func_node in func_nodes: - handle_func_node(func_node, class_name, G, new_func, fid_to_file, env) - - -def prepare_env_class(class_node, env): - """ - Official Access Level Tutorial: - https://docs.oracle.com/javase/tutorial/java/javaOO/accesscontrol.html - - Node Structure: - node can have node or node - as its direct child - - Assumptions and TODOs: - 1. We assume every class method has modifiers (package private is rare) - 2. We currently don't distinguish between `protected` and `public` - 3. We don't keep record of methods' return types and arguments' types - """ - class_name = get_name(class_node) - filename = class_node.getparent().attrib['filename'] - # Class members are made of 2 things: - # 1. class's variable - # 2. class's methods - cl_env = {'var': {}, 'method': {}, 'filename': filename} - env[class_name] = cl_env - - # `this` and `super` - cl_env['var']['this'] = {'is_public': False, - 'is_static': False, - 'type': class_name} - super_node = class_node.find('./srcml:super', ns) - if super_node is not None: - extends_node = super_node.find('./srcml:extends', ns) - if extends_node is not None: - super_cl_name = get_name(extends_node) - cl_env['var']['super'] = {'is_public': False, - 'is_static': True, - 'type': super_cl_name} - - block_node = class_node.find('./srcml:block', ns) - - # member variables - decl_stmt_nodes = block_node.findall('./srcml:decl_stmt', ns) - for decl_stmt_node in decl_stmt_nodes: - decl_node = decl_stmt_node.find('./srcml:decl', ns) - var_name = get_name(decl_node) - var_type = get_type(decl_node) - specifiers = get_specifiers(decl_node) - is_public = 'protected' in specifiers or 'public' in specifiers - is_static = 'static' in specifiers - cl_env['var'][var_name] = {'is_public': is_public, - 'is_static': is_static, - 'type': var_type} - - # member methods - func_nodes = block_node.findall('./srcml:function', ns) - for func_node in func_nodes: - func_name = get_name(func_node) - specifiers = get_specifiers(func_node) - is_public = 'protected' in specifiers or 'public' in specifiers - is_static = 'static' in specifiers - cl_env['method'][func_name] = {'is_public': is_public, - 'is_static': is_static} - - -def prepare_env(root, env=None): - """ - env: class_name => [var/method] => [var_name/method_name] - """ - if env is None: - env = {} - - class_nodes = root.findall('./srcml:class', ns) - for class_node in class_nodes: - prepare_env_class(class_node, env) - return env - - -def build_call_graph_java(roots, G=None, env=None): - if G is None: - G = nx.DiGraph() - - new_func = {} - fid_to_file = {} - - # Initialize global environment - for root in roots: - env = prepare_env(root, env=env) - - # Build call graph - for root in roots: - class_nodes = root.xpath('./srcml:class', namespaces=ns) - for class_node in class_nodes: - handle_class_node(class_node, G, new_func, fid_to_file, env) - return G, new_func, fid_to_file, env - - -def update_call_graph_java(G, roots, modified_func, env=None): - for fid in modified_func: - if fid in G: - remove_edges_of_node(G, fid, in_edges=False) - G.node[fid]['num_lines'] += modified_func[fid] - - # here roots should be constructed from the more recent commit - # new functions and their sizes are stored in new_func dictionary - _, new_func, _, _ = build_call_graph_java(roots, G, env=env) - return new_func - - -def get_func_ranges_java(root): - fids, func_ranges = [], [] - for class_node in root.xpath('.//srcml:class', namespaces=ns): - try: - class_name = get_name(class_node) - except: - print("Class doesn't have name.") - continue - - block_node = class_node.find('./srcml:block', ns) - for func_node in block_node.findall('./srcml:function', ns): - try: - func_name = get_name(func_node) - fid = generate_fid(class_name, func_name) - - name_node = func_node.find('./srcml:name', ns) - block_node = func_node.find('./srcml:block', ns) - block_pos_node = block_node.find('./pos:position', ns) - start_line = int(name_node.attrib[line_attr]) - end_line = int(block_pos_node.attrib[line_attr]) - except: - continue - - fids.append(fid) - func_ranges.append([start_line, end_line]) - return fids, func_ranges diff --git a/persper/graphs/call_graph/utils.py b/persper/graphs/call_graph/utils.py deleted file mode 100644 index 32d951967af..00000000000 --- a/persper/graphs/call_graph/utils.py +++ /dev/null @@ -1,45 +0,0 @@ -ns = {'srcml': 'http://www.srcML.org/srcML/src', - 'pos': 'http://www.srcML.org/srcML/position'} - -line_attr = '{http://www.srcML.org/srcML/position}line' - - -def transform_node_to_src(node, s=None): - """Print out the source code of a xml node""" - if s is None: - s = "" - if node.text: - s += node.text - for child in node: - s = transform_node_to_src(child, s) - if node.tail: - s += node.tail - return s - - -def remove_edges_of_node(G, n, in_edges=True, out_edges=True): - """Remove edges of n, but keep the node itself in the graph - - >>> G3 = nx.DiGraph() - >>> G3.add_path([0, 1, 2, 3, 4]) - >>> remove_edges_of_node(G3, 2) - >>> G3.nodes() - [0, 1, 2, 3, 4] - >>> G3.edges() - [(0, 1), (3, 4)] - - """ - try: - nbrs = G._succ[n] - except KeyError: # NetworkXError if not in self - # raise NetworkXError("The node %s is not in the digraph."%(n, )) - print("The node %s is not in the digraph." % n) - return - if out_edges: - for u in nbrs: - del G._pred[u][n] - G._succ[n] = {} - if in_edges: - for u in G._pred[n]: - del G._succ[u][n] - G._pred[n] = {} diff --git a/persper/graphs/commit_graph.py b/persper/graphs/commit_graph.py deleted file mode 100644 index db4d4016e75..00000000000 --- a/persper/graphs/commit_graph.py +++ /dev/null @@ -1,147 +0,0 @@ -from enum import Enum -import argparse -import networkx as nx -import sys -import os -import subprocess -from persper.graphs.parse_patch import parse_patch -from persper.graphs.cpp_tools import get_func_ranges_cpp, fname_filter_cpp -from persper.graphs.ruby_tools import get_func_ranges_ruby, fname_filter_ruby -from persper.graphs.git_tools import initialize_repo, get_contents -from persper.graphs.processor import Processor -from persper.graphs.write_graph_to_dot import write_G_to_dot_with_pr - -parser = argparse.ArgumentParser( - description="Draw commit graph for a git repository") -parser.add_argument('repo', type=str, - help="path to target repo") -parser.add_argument('language', type=str, - help="programming language of the target repo, currently support [cpp, ruby]") -parser.add_argument('--output', type=str, - help="output path of generated svg, default as working directory") - -class Language(Enum): - CPP = 1 - RUBY = 2 - -def add_edge(G, sp, ep, func_id): - if ep in G[sp]: - G[sp][ep]['func_ids'].append(func_id) - else: - G.add_edge(sp, ep, func_ids=[func_id]) - -def intersected(a, b): - return a[0] <= b[0] <= a[1] or b[0] <= a[0] <= b[1] - -def get_modified_func_ids(func_ranges, modified_intervals, func_ids): - search_ptr = 0 - num_intervals = len(modified_intervals) - modified_func_ids = [] - for func_r, func_id in zip(func_ranges, func_ids): - for i in range(search_ptr, num_intervals): - if intersected(func_r, modified_intervals[i]): - - modified_func_ids.append(func_id) - search_ptr = i - break - return modified_func_ids - -class CommitGraph(Processor): - - def __init__(self, repo_path, language_str): - super().__init__(repo_path) - language = Language[language_str] - if language == Language.CPP: - self.fname_filter = fname_filter_cpp - self.func_extractor = get_func_ranges_cpp - elif language == Language.RUBY: - self.fname_filter = fname_filter_ruby - self.func_extractor = get_func_ranges_ruby - else: - print("This language is not supported yet!") - - def start_process(self): - self.G = nx.DiGraph() - self.func_commit = {} - - def start_process_commit(self, commit): - self.G.add_node(commit.hexsha) - - def on_add(self, diff, commit): - fname = diff.b_blob.path - sha = commit.hexsha - if self.fname_filter(fname): - file_contents = get_contents(self.repo, commit, fname) - func_ids, _ = self.func_extractor(file_contents, fname) - for func_id in func_ids: - self.func_commit[func_id] = sha - - def on_delete(self, diff, commit): - fname = diff.a_blob.path - sha = commit.hexsha - if self.fname_filter(fname): - last_commit = commit.parents[0] - file_contents = get_contents(self.repo, last_commit, fname) - func_ids, _ = self.func_extractor(file_contents, fname) - for func_id in func_ids: - if func_id in self.func_commit: - add_edge(self.G, sha, self.func_commit[func_id], func_id) - del self.func_commit[func_id] - - def on_rename(self, diff, commit): - # when similarity is 100%, diff.a_blob and diff.b_blob are None, so don't use them - new_fname = diff.rename_to - old_fname = diff.rename_from - last_commit = commit.parents[0] - sha = commit.hexsha - - if self.fname_filter(new_fname) or self.fname_filter(old_fname): - file_contents = get_contents(self.repo, last_commit, old_fname) - func_ids, func_ranges = self.func_extractor(file_contents, old_fname) - try: - modified_intervals = parse_patch(diff.diff.decode("utf-8")) - except UnicodeDecodeError: - print("UnicodeDecodeError Found in change_type {}".format(diff.change_type)) - return -1 - modified_func_ids = get_modified_func_ids(func_ranges, modified_intervals, func_ids) - for func_id in modified_func_ids: - if func_id in self.func_commit: - add_edge(self.G, sha, self.func_commit[func_id], func_id) - self.func_commit[func_id] = sha - - def on_modify(self, diff, commit): - assert diff.b_blob.path == diff.a_blob.path - fname = diff.b_blob.path - last_commit = commit.parents[0] - sha = commit.hexsha - - if self.fname_filter(fname): - file_contents = get_contents(self.repo, last_commit, fname) - func_ids, func_ranges = self.func_extractor(file_contents, fname) - try: - modified_intervals = parse_patch(diff.diff.decode("utf-8")) - except UnicodeDecodeError: - print("UnicodeDecodeError Found in change_type {}".format(diff.change_type)) - return -1 - modified_func_ids = get_modified_func_ids(func_ranges, modified_intervals, func_ids) - - for func_id in modified_func_ids: - if func_id in self.func_commit: - add_edge(self.G, sha, self.func_commit[func_id], func_id) - self.func_commit[func_id] = sha - - -def draw_commit_graph(repo_path, language, output_path=None, num_commits=None): - repo_name = os.path.basename(repo_path) - cg = CommitGraph(repo_path, language) - cg.process(from_beginning=True, num_commits=num_commits) - pr = nx.pagerank(cg.G, alpha=0.85) - write_G_to_dot_with_pr(cg.G, pr, repo_name + ".dot", edge_attrib="func_ids") - subprocess.call('dot -Tsvg {}.dot -o {}.svg'.format(repo_name, repo_name), shell=True) - -def main(): - args = parser.parse_args() - draw_commit_graph(args['repo'], args['language']) - -if __name__ == '__main__': - main() diff --git a/persper/graphs/cpp.py b/persper/graphs/cpp.py deleted file mode 100644 index 4e6cd59696e..00000000000 --- a/persper/graphs/cpp.py +++ /dev/null @@ -1,3 +0,0 @@ -from persper.graphs.c import CGraphServer - -CPPGraphServer = CGraphServer diff --git a/persper/graphs/cpp_tools.py b/persper/graphs/cpp_tools.py deleted file mode 100644 index ed96ae8e8c8..00000000000 --- a/persper/graphs/cpp_tools.py +++ /dev/null @@ -1,38 +0,0 @@ -import re -import subprocess -from git import Repo -import networkx as nx - -from persper.graphs.write_graph_to_dot import write_G_to_dot_with_pr - -def get_func_ranges_cpp(src, fname): - re_signature = re.compile("""^(?P\w+(\s*[\*\&])?)\s+ - ((?P\w+)::)? - (?P\w+)\s* - \([^;]+$ - """, re.VERBOSE ) - func_ids = [] - func_ranges = [] - ptr = -1 - num_lines = 0 - for lineno, line in enumerate(src.split('\n'), 1): - num_lines += 1 - m = re_signature.search(line) - if m: - d = m.groupdict() - if d['class_name']: - func_ids.append('{}::{}'.format(d['class_name'], d['func_name'])) - else: - func_ids.append(d['func_name']) - if ptr != -1: - func_ranges.append([ptr, lineno - 1]) - ptr = lineno - if ptr != -1: - func_ranges.append([ptr, num_lines]) - - return func_ids, func_ranges - -def fname_filter_cpp(fname): - return fname.endswith('.cc') or fname.endswith('.cpp') - - diff --git a/persper/graphs/detect_change.py b/persper/graphs/detect_change.py deleted file mode 100644 index 14826f65443..00000000000 --- a/persper/graphs/detect_change.py +++ /dev/null @@ -1,70 +0,0 @@ -def get_intersected_length(a, b): - """ - >>> get_intersected_length([1, 9], [2, 8]) - 7 - >>> get_intersected_length([2, 8], [1, 9]) - 7 - >>> get_intersected_length([1, 4], [1, 5]) - 4 - >>> get_intersected_length([2, 10], [4, 11]) - 7 - """ - start = a[0] if a[0] >= b[0] else b[0] - end = a[1] if a[1] <= b[1] else b[1] - if start > end: - return 0 - else: - return end - start + 1 - - -def get_changed_functions(func_names, func_ranges, additions, deletions, - separate=False): - """ - Args: - func_names: A list of function names, - usually extracted from old src file, - so new functions aren't included. - func_ranges: A sorted list of function ranges - in the same order of func_names. - additions: A list of pair of integers, - deletions: A list of pair of integers, - separate: A boolean flag, if set to True, additions and deletions are - reported separately. - - Returns: - A dictionary where keys are function names and values are - number of lines edited. - """ - info = {} - - if (func_names is None or func_ranges is None or - additions is None or deletions is None): - return info - - def update_info(fn, num_lines, key): - """key should be one of 'adds' or 'dels'.""" - if fn in info: - info[fn][key] += num_lines - else: - info[fn] = {'adds': 0, 'dels': 0} - info[fn][key] = num_lines - - add_ptr, del_ptr = 0, 0 - num_adds, num_dels = len(additions), len(deletions) - for fn, fr in zip(func_names, func_ranges): - for i in range(add_ptr, num_adds): - if fr[0] <= additions[i][0] <= fr[1]: - update_info(fn, additions[i][1], 'adds') - add_ptr = i + 1 - - for j in range(del_ptr, num_dels): - inter_length = get_intersected_length(fr, deletions[j]) - if inter_length > 0: - update_info(fn, inter_length, 'dels') - del_ptr = j - - if not separate: - for fn in info: - info[fn] = info[fn]['adds'] + info[fn]['dels'] - - return info diff --git a/persper/graphs/devrank.py b/persper/graphs/devrank.py deleted file mode 100644 index 3977d7c279d..00000000000 --- a/persper/graphs/devrank.py +++ /dev/null @@ -1,58 +0,0 @@ -import numpy as np -from numpy import linalg as LA -from scipy.sparse import coo_matrix - - -def devrank(G, count_self=False, alpha=0.85, epsilon=1e-5, max_iters=300): - """Memory efficient DevRank using scipy.sparse""" - ni = {} - for i, u in enumerate(G): - ni[u] = i - - def sizeof(u): - return G.node[u]['num_lines'] - - num_nodes = len(G.nodes()) - row, col, data = [], [], [] - for u in G: - num_out_edges = len(G[u]) - if num_out_edges > 0: - total_out_sizes = 0 - for v in G[u]: - total_out_sizes += sizeof(v) - if count_self: - total_out_sizes += sizeof(u) - row.append(ni[u]) - col.append(ni[u]) - data.append(sizeof(u) / total_out_sizes) - for v in G[u]: - row.append(ni[v]) - col.append(ni[u]) - data.append(sizeof(v) / total_out_sizes) - - P = coo_matrix((data, (row, col)), shape=(num_nodes, num_nodes)).tocsr() - - universe_size = 0 - for u in G: - universe_size += sizeof(u) - - p = np.empty(num_nodes) - for u in G: - p[ni[u]] = sizeof(u) / universe_size - - v = np.ones(num_nodes) / num_nodes - - for i in range(max_iters): - new_v = alpha * P.dot(v) - gamma = LA.norm(v, 1) - LA.norm(new_v, 1) - new_v += gamma * p - delta = LA.norm(new_v - v, 1) - if delta < epsilon: - break - v = new_v - - pr = {} - for u in G: - pr[u] = v[ni[u]] - - return pr diff --git a/persper/graphs/git_tools.py b/persper/graphs/git_tools.py deleted file mode 100644 index 716f80e3156..00000000000 --- a/persper/graphs/git_tools.py +++ /dev/null @@ -1,32 +0,0 @@ -from git.exc import InvalidGitRepositoryError, NoSuchPathError -from git import Repo -import sys - -EMPTY_TREE_SHA = '4b825dc642cb6eb9a060e54bf8d69288fbee4904' - - -def _diff_with_first_parent(commit): - if len(commit.parents) == 0: - prev_commit = EMPTY_TREE_SHA - else: - prev_commit = commit.parents[0] - # commit.diff automatically detect renames - return commit.diff(prev_commit, - create_patch=True, R=True, indent_heuristic=True) - - -def initialize_repo(repo_path): - try: - repo = Repo(repo_path) - except InvalidGitRepositoryError as e: - print("Invalid Git Repository!") - sys.exit(-1) - except NoSuchPathError as e: - print("No such path error!") - sys.exit(-1) - return repo - - -def get_contents(repo, commit, path): - """Get contents of a path within a specific commit""" - return repo.git.show('{}:{}'.format(commit.hexsha, path)) diff --git a/persper/graphs/graph_server.py b/persper/graphs/graph_server.py deleted file mode 100644 index 6acbaf26aed..00000000000 --- a/persper/graphs/graph_server.py +++ /dev/null @@ -1,81 +0,0 @@ -from abc import ABC -from abc import abstractmethod - -JS_FILENAME_REGEXES = [ - r'.+\.js$', - r'^(?!dist/).+', - r'^(?!test(s)?/).+', - r'^(?!spec/).+', - r'^(?!build/).+', - r'^(?!bin/).+', - r'^(?!doc(s)?/).+' -] - -# todo(hezheng) consider moving these regexes to their corresponding language file -C_FILENAME_REGEXES = [ - r'.+\.(h|c)$' -] - -# http://gcc.gnu.org/onlinedocs/gcc-4.4.1/gcc/Overall-Options.html#index-file-name-suffix-71 -CPP_FILENAME_REGEXES = { - r'.+\.(c|cc|cxx|cpp|CPP|c\+\+|C|hh|hpp|Hpp|h\+\+|H)$' -} - - -class GraphServer(ABC): - - @abstractmethod - def update_graph(self, old_filename: str, old_src: str, new_filename: str, new_src: str, patch: bytes): - """ - Update the graph with a single-file patch - :param old_filename: the path to a file that the commit modifies - :param old_src: the source code of the file before the commit - :param new_filename: the path to the file after the commit - :param new_src: the source code of the file after the commit - :param patch: the raw patch generated by GitPython diff - :return: a function-ID-to-changed-LOCs mapping and an old-function-ID-to-new-function-ID mapping - """ - pass - - @abstractmethod - def parse(self, old_filename: str, old_src: str, new_filename: str, new_src: str, patch: bytes): - """ - Parse a single-file patch without updating the graph - :param old_filename: the path to a file that the commit modifies - :param old_src: the source code of the file before the commit - :param new_filename: the path to the file after the commit - :param new_src: the source code of the file after the commit - :param patch: the raw patch generated by GitPython diff - :return: a function-ID-to-changed-LOCs mapping and an old-function-ID-to-new-function-ID mapping - """ - pass - - @abstractmethod - def get_graph(self): - """ - Retrieve the graph - :return: A NetworkX graph object - """ - pass - - @abstractmethod - def reset_graph(self): - """Reset the graph discarding all data""" - pass - - @abstractmethod - def filter_file(self, filename): - """ - Check if the file should be filtered out - :param filename: the path of the file to check - :return: True if the file should be selected; False otherwise. - """ - pass - - @abstractmethod - def config(self, param: dict): - """ - One-time configuration of the server for following calls - :param param: key-value pairs of configuration - """ - pass diff --git a/persper/graphs/graph_server_http.py b/persper/graphs/graph_server_http.py deleted file mode 100644 index 5bea5ff0f39..00000000000 --- a/persper/graphs/graph_server_http.py +++ /dev/null @@ -1,54 +0,0 @@ -from networkx.readwrite import json_graph -from persper.graphs.graph_server import GraphServer -import re -import requests -import urllib.parse - - -class GraphServerHttp(GraphServer): - def __init__(self, server_addr, filename_regex_strs): - self.server_addr = server_addr - self.filename_regexes = [re.compile(regex_str) for regex_str in filename_regex_strs] - self.config_param = dict() - - def update_graph(self, old_filename, old_src, new_filename, new_src, patch): - payload = {'oldFname': old_filename, - 'oldSrc': old_src, - 'newFname': new_filename, - 'newSrc': new_src, - 'patch': patch.decode('utf-8', 'replace'), - 'config': self.config_param} - - update_url = urllib.parse.urljoin(self.server_addr, '/update') - r = requests.post(update_url, json=payload).json() - return r['idToLines'], r['idMap'] - - def parse(self, old_filename, old_src, new_filename, new_src, patch): - payload = {'oldFname': old_filename, - 'oldSrc': old_src, - 'newFname': new_filename, - 'newSrc': new_src, - 'patch': patch.decode('utf-8', 'replace'), - 'config': self.config_param} - - stats_url = urllib.parse.urljoin(self.server_addr, '/stats') - r = requests.get(stats_url, json=payload).json() - return r['idToLines'], r['idMap'] - - def get_graph(self): - graph_url = self.server_addr + '/callgraph' - r = requests.get(graph_url) - return json_graph.node_link_graph(r.json()) - - def reset_graph(self): - reset_url = urllib.parse.urljoin(self.server_addr, '/reset') - requests.post(reset_url) - - def filter_file(self, filename): - for regex in self.filename_regexes: - if not regex.match(filename): - return False - return True - - def config(self, param): - self.config_param = param diff --git a/persper/graphs/inverse_diff.py b/persper/graphs/inverse_diff.py deleted file mode 100644 index 24274c1c6eb..00000000000 --- a/persper/graphs/inverse_diff.py +++ /dev/null @@ -1,41 +0,0 @@ - -def inverse_diff(adds, dels): - """ - >>> adds = [[11, 1], [32, 1]] - >>> dels = [[11, 11], [31, 32]] - >>> _inverse_diff_result(adds, dels) - ([[10, 1], [30, 2]], [[11, 11], [31, 31]]) - """ - diff = 0 - add_ptr, del_ptr = 0, 0 - num_adds, num_dels = len(adds), len(dels) - inv_adds, inv_dels = [], [] - - def _handle_a(a): - nonlocal diff - inv_dels.append([diff + a[0] + 1, diff + a[0] + a[1]]) - diff += a[1] - - def _handle_d(d): - nonlocal diff - inv_adds.append([diff + d[0] - 1, d[1] - d[0] + 1]) - diff -= (d[1] - d[0] + 1) - - while add_ptr < num_adds or del_ptr < num_dels: - if add_ptr < num_adds and del_ptr < num_dels: - if adds[add_ptr][0] < dels[del_ptr][0]: - _handle_a(adds[add_ptr]) - add_ptr += 1 - else: - _handle_d(dels[del_ptr]) - del_ptr += 1 - elif add_ptr < num_adds and del_ptr >= num_dels: - # we have finished dels - _handle_a(adds[add_ptr]) - add_ptr += 1 - else: - # we have finished adds - _handle_d(dels[del_ptr]) - del_ptr += 1 - - return inv_adds, inv_dels diff --git a/persper/graphs/iterator.py b/persper/graphs/iterator.py deleted file mode 100644 index 2a93ba09940..00000000000 --- a/persper/graphs/iterator.py +++ /dev/null @@ -1,186 +0,0 @@ -import time -from persper.graphs.git_tools import initialize_repo -from collections import deque - - -class RepoIterator(): - - def __init__(self, repo_path): - self.repo_path = repo_path - self.repo = initialize_repo(repo_path) - self.visited = set() - self.last_processed_commit = None - - def iter(self, rev=None, - from_beginning=False, - num_commits=None, - continue_iter=False, - end_commit_sha=None, - into_branches=False, - max_branch_length=100, - min_branch_date=None): - """ - This function supports four ways of specifying the - range of commits to return: - - Method 1: rev - Pass `rev` parameter and set both - `from_beginning` and `continue_iter` to False. - `rev` is the revision specifier which follows - an extended SHA-1 syntax. Please refer to git-rev-parse - for viable options. `rev' should only include commits - on the master branch. - - Method 2: from_beginning & num_commits (optional) - Set `from_beginning` to True and - pass `num_commits` parameter. Using this - method, the function will start from the - very first commit on the master branch and - process the following `num_commits` commits - (also on the master branch). - - Method 3: continue_iter & num_commits - Set `continue_iter` to True and pass - `num_commits` parameter. Using this method, the - function will resume processing from succeeding commit of - `self.last_processed_commit` for `num_commits` commits. - - Method 4: continue_iter & end_commit_sha - Set `continue_iter` to True and pass - `end_commit_sha` parameter. The range of continued processing - will be `self.last_processed_commit.hexsha..end_commit_sha`. - - Args: - rev: A string, see above. - num_commits: An int, see above. - from_beginning: A boolean flag, see above. - continue_iter: A boolean flag, see above. - end_commit_sha: A string, see above. - into_branches: A boolean flag. - max_branch_length: An int, the maximum number of commits - to trace back before abortion. - min_branch_date: A python time object, stop backtracing if - a commit is authored before this time. - """ - commits = [] - branch_commits = [] - - if not continue_iter: - self.reset_state() - - # Method 2 - if from_beginning: - commits = list(self.repo.iter_commits(first_parent=True)) - if num_commits is not None: - commits = commits[-num_commits:] - - elif continue_iter: - if not self.last_processed_commit: - print("No history exists yet, terminated.") - return [], [] - - # Method 4 - if end_commit_sha: - rev = self.last_processed_commit.hexsha + '..' + end_commit_sha - commits = list(self.repo.iter_commits( - rev, first_parent=True)) - # Method 3 - elif num_commits: - # some project's main branch might not be master, thus use HEAD - rev = self.last_processed_commit.hexsha + '..HEAD' - commits = list(self.repo.iter_commits( - rev, first_parent=True))[-num_commits:] - else: - print("Both end_commit_sha and num_commits are None.") - return [], [] - - else: - # Method 1 - commits = list(self.repo.iter_commits(rev, first_parent=True)) - - # set self.last_processed_commit - if len(commits) > 0: - self.last_processed_commit = commits[0] - else: - print("The range specified is empty, terminated.") - return [], [] - - for commit in reversed(commits): - self.visited.add(commit.hexsha) - - if into_branches: - # find all merge commits - start_points = deque() - for commit in reversed(commits): - if len(commit.parents) > 1: - for pc in commit.parents[1:]: - start_points.append(pc) - - self.branch_lengths = [] - - while len(start_points) > 0: - cur_commit = start_points.popleft() - branch_length = 0 - - while True: - - # stop tracing back along this branch - # if cur_commit has been visited - if cur_commit.hexsha in self.visited: - break - - # stop if we have reached time boundary - authored_date = time.gmtime(cur_commit.authored_date) - if min_branch_date and min_branch_date > authored_date: - break - - # stop if we have reached max_branch_length - if branch_length >= max_branch_length: - print("WARNING: MAX_BRANCH_LENGTH reached.") - break - - self.visited.add(cur_commit.hexsha) - branch_commits.append(cur_commit) - branch_length += 1 - - # stop if we have reached the very first commit - if len(cur_commit.parents) == 0: - break - - # add to queue if cur_commit is a merge commit - if len(cur_commit.parents) > 1: - for pc in cur_commit.parents[1:]: - start_points.append(pc) - - # get next commit - cur_commit = cur_commit.parents[0] - - if branch_length > 0: - self.branch_lengths.append(branch_length) - - return commits, branch_commits - - def reset_state(self): - self.visited = set() - self.last_processed_commit = None - - def __getstate__(self): - state = {} - state['repo_path'] = self.repo_path - state['visited'] = self.visited - # Avoid directly pickle Commit object - if self.last_processed_commit is None: - state['last_processed_sha'] = None - else: - state['last_processed_sha'] = self.last_processed_commit.hexsha - return state - - def __setstate__(self, state): - self.repo_path = state['repo_path'] - self.visited = state['visited'] - self.repo = initialize_repo(state['repo_path']) - if state['last_processed_sha'] is None: - self.last_processed_commit = None - else: - self.last_processed_commit = self.repo.commit( - state['last_processed_sha']) diff --git a/persper/graphs/pagerank.py b/persper/graphs/pagerank.py deleted file mode 100644 index 186ea7aa9f9..00000000000 --- a/persper/graphs/pagerank.py +++ /dev/null @@ -1,43 +0,0 @@ -from numpy import linalg as LA -import numpy as np -from scipy.sparse import coo_matrix - - -def pagerank(G, alpha=0.85, epsilon=1e-5, max_iters=300): - """Memory efficient PageRank using scipy.sparse - This function implements Algo 1. in "A Survey on PageRank Computing" - """ - ni = {} - for i, u in enumerate(G): - ni[u] = i - - num_nodes = len(G.nodes()) - - row, col, data = [], [], [] - for u in G: - num_out_edges = len(G[u]) - if num_out_edges > 0: - w = 1 / num_out_edges - for v in G[u]: - row.append(ni[v]) - col.append(ni[u]) - data.append(w) - - P = coo_matrix((data, (row, col)), shape=(num_nodes, num_nodes)).tocsr() - p = np.ones(num_nodes) / num_nodes - v = np.ones(num_nodes) / num_nodes - - for i in range(max_iters): - new_v = alpha * P.dot(v) - gamma = LA.norm(v, 1) - LA.norm(new_v, 1) - new_v += gamma * p - delta = LA.norm(new_v - v, 1) - if delta < epsilon: - break - v = new_v - - pr = {} - for u in G: - pr[u] = v[ni[u]] - - return pr diff --git a/persper/graphs/parse_patch.py b/persper/graphs/parse_patch.py deleted file mode 100644 index 8cb15af3f29..00000000000 --- a/persper/graphs/parse_patch.py +++ /dev/null @@ -1,45 +0,0 @@ -import re - -example_patch = \ -r"""--- Portfile.orig 2011-07-25 18:52:12.000000000 -0700 -+++ Portfile 2011-07-25 18:53:35.000000000 -0700 -@@ -2,7 +2,7 @@ - PortSystem 1.0 - name foo - --version 1.3.0 -+version 1.4.0 - categories net - maintainers nomaintainer - description A network monitoring daemon. -@@ -13,9 +13,9 @@ - - homepage http://rsug.itd.umich.edu/software/${name} - - master_sites ${homepage}/files/ --checksums rmd160 f0953b21cdb5eb327e40d4b215110b71 -+checksums rmd160 01532e67a596bfff6a54aa36face26ae - extract.suffix .tgz - platforms darwin""" - -def parse_patch(text): - """Parse the content of a patch string and return a list of modified intervals - - >>> parse_patch(example_patch) - [[2, 8], [13, 21]] - """ - re_chunk_header = re.compile("""\@\@\s* - \-(?P\d+),(?P\d+)\s* - \+(?P\d+),(?P\d+)\s* - \@\@ - """, re.VERBOSE) - modified_intervals = [] - for m in re_chunk_header.finditer(text): - old_start_line, old_num_lines, _, _ = m.groups() - modified_intervals.append([int(old_start_line), int(old_start_line) + int(old_num_lines) - 1]) - - return modified_intervals - -if __name__ == "__main__": - import doctest - doctest.testmod() \ No newline at end of file diff --git a/persper/graphs/patch_parser.py b/persper/graphs/patch_parser.py deleted file mode 100644 index b021741b584..00000000000 --- a/persper/graphs/patch_parser.py +++ /dev/null @@ -1,87 +0,0 @@ -import re - - -class PatchParser(): - - def __init__(self): - self.re_chunk_header = re.compile("""\@\@\s* - \-(?P\d+)(,(?P\d+))?\s* - \+(?P\d+)(,(?P\d+))?\s* - \@\@ - """, re.VERBOSE) - - def clean(self): - self.additions = [] - self.deletions = [] - self.in_add, self.in_del = False, False - self.in_chunk = False - - self.add_start, self.del_start = None, None - self.add_num_lines = None - self.cur = None - - def start_add(self): - self.in_add = True - self.add_start = self.cur - 1 - self.add_num_lines = 1 - - def start_del(self): - self.in_del = True - self.del_start = self.cur - - def finish_add(self): - self.in_add = False - self.additions.append([self.add_start, self.add_num_lines]) - - def finish_del(self): - self.in_del = False - self.deletions.append([self.del_start, self.cur - 1]) - - def parse(self, text): - self.clean() - for line in text.split('\n'): - line = line.strip() - if not self.in_chunk: - if line.startswith('@@'): - self.in_chunk = True - else: - continue - - if line.startswith('@@'): - m = self.re_chunk_header.search(line) - self.cur = max(int(m.groups()[0]), 1) - elif line.startswith('-'): - # print("in minus") - if self.in_add: - self.finish_add() - self.start_del() - elif self.in_del: - pass - else: - self.start_del() - self.cur += 1 # always increment in minus - elif line.startswith('+'): - # print("in plus") - if self.in_add: - self.add_num_lines += 1 - elif self.in_del: - self.finish_del() - self.start_add() - else: - self.start_add() - else: - # print("in blank") - if self.in_add: - self.finish_add() - elif self.in_del: - self.finish_del() - else: - pass - self.cur += 1 # always increment in blank - - if self.in_add: - self.finish_add() - elif self.in_del: - self.finish_del() - - return self.additions, self.deletions diff --git a/persper/graphs/processor.py b/persper/graphs/processor.py deleted file mode 100644 index 69bc615c85b..00000000000 --- a/persper/graphs/processor.py +++ /dev/null @@ -1,375 +0,0 @@ -import os -import time -import pickle -from persper.graphs.git_tools import initialize_repo -from collections import deque -import functools -print = functools.partial(print, flush=True) - -EMPTY_TREE_SHA = '4b825dc642cb6eb9a060e54bf8d69288fbee4904' - - -def _diff_with_first_parent(commit): - if len(commit.parents) == 0: - prev_commit = EMPTY_TREE_SHA - else: - prev_commit = commit.parents[0] - # commit.diff automatically detect renames - return commit.diff(prev_commit, - create_patch=True, R=True, indent_heuristic=True) - - -def _fill_change_type(diff_index): - for diff in diff_index: - if diff.new_file: - diff.change_type = 'A' - elif diff.deleted_file: - diff.change_type = 'D' - elif diff.renamed: - diff.change_type = 'R' - elif (diff.a_blob and diff.b_blob and - (diff.a_blob != diff.b_blob)): - diff.change_type = 'M' - else: - diff.change_type = 'U' - - -def _print_diff_index(diff_index): - print(" ".join([diff.change_type for diff in diff_index])) - - -def _subject(msg): - return msg.split('\n', 1)[0].lstrip().rstrip() - - -class Processor(): - - def __init__(self, repo_path): - self.repo_path = repo_path - self.repo = initialize_repo(repo_path) - self.visited = set() - self.last_processed_commit = None - - def process(self, rev=None, - from_beginning=False, num_commits=None, - from_last_processed=False, end_commit_sha=None, - into_branches=False, - max_branch_length=100, - min_branch_date=None, - checkpoint_interval=100, - skip_work=False, - verbose=True): - """ - This function supports four ways of specifying the - range of commits to process: - - Method 1: rev - Pass `rev` parameter and set both - `from_beginning` and `from_last_processed` to False. - `rev` is the revision specifier which follows - an extended SHA-1 syntax. Please refer to git-rev-parse - for viable options. `rev' should only include commits - on the master branch. - - Method 2: from_beginning & num_commits (optional) - Set `from_beginning` to True and - pass `num_commits` parameter. Using this - method, the function will start from the - very first commit on the master branch and - process the following `num_commits` commits - (also on the master branch). - - Method 3: from_last_processed & num_commits - Set `from_last_processed` to True and pass - `num_commits` parameter. Using this method, the - function will resume processing from succeeding commit of - `self.last_processed_commit` for `num_commits` commits. - - Method 4: from_last_processed & end_commit_sha - Set `from_last_processed` to True and pass - `end_commit_sha` parameter. The range of continued processing - will be `self.last_processed_commit.hexsha..end_commit_sha`. - - Args: - rev: A string, see above. - num_commits: An int, see above. - from_beginning: A boolean flag, see above. - from_last_processed: A boolean flag, see above. - end_commit_sha: A string, see above. - into_branches: A boolean flag, if True, the process function - will operate in two phases. - - In the first phase, a call commit graph is contructed - by traversing the specified range of commits on the master - branch. Merge commits are detected and recorded if the - start commit (on master) and end/merge commit of the - corresponding branch are both within the range of - traversal. Those recorded merge commits do not - get any credits (thus they are not present in - self.history data structure). - - In the second phase, it traverses all the branches detected - in the first phase and assign them due credits. - - max_branch_length: An int, the maximum number of commits - to trace back before abortion. - min_branch_date: A python time object, stop backtracing if - a commit is authored before this time. - checkpoint_interval: An int. - """ - if not from_last_processed: - self._reset_state() - self.merge_commits = deque() - - # Method 2 - if from_beginning: - if num_commits is None: - num_commits = 0 - self.commits = list( - self.repo.iter_commits(first_parent=True))[-num_commits:] - - elif from_last_processed: - if not self.last_processed_commit: - print("No history exists yet, terminated.") - return - - # Method 4 - if end_commit_sha: - rev = self.last_processed_commit.hexsha + '..' + end_commit_sha - self.commits = list(self.repo.iter_commits( - rev, first_parent=True)) - # Method 3 - elif num_commits: - rev = self.last_processed_commit.hexsha + '..master' - self.commits = list(self.repo.iter_commits( - rev, first_parent=True))[-num_commits:] - else: - print("Both end_commit_sha and num_commits are None.") - return - - else: - # Method 1 - self.commits = list(self.repo.iter_commits(rev, first_parent=True)) - - if len(self.commits) > 0: - self.last_processed_commit = self.commits[0] - else: - print("The range specified is empty, terminated.") - return - - counter = 1 - start = time.time() - - # 1st phase - for commit in reversed(self.commits): - sha = commit.hexsha - self.visited.add(sha) - self._start_process_commit(commit) - - if verbose: - print('------ No.{} {} {} {} ------'.format( - counter, sha, _subject(commit.message), - time.strftime( - "%b %d %Y", time.gmtime(commit.authored_date) - )) - ) - else: - print('------ No.{} {} ------'.format(counter, sha)) - if counter % 100 == 0: - print('------ Used time: {} ------'.format( - time.time() - start)) - - if counter % checkpoint_interval == 0: - repo_name = os.path.basename(self.repo_path.rstrip('/')) - self.save(repo_name + '-1st-' + str(counter) + '.pickle') - - if into_branches: - is_merge_commit = len(commit.parents) > 1 - if is_merge_commit: - self.merge_commits.append(commit) - """ - is_merge_commit = self._detect_branch( - commit, max_branch_length, min_branch_date) - """ - else: - is_merge_commit = False - - if not skip_work: - # generate diff_index by diff commit with its first parent - diff_index = _diff_with_first_parent(commit) - - # figure out the change type of each entry in diff_index - _fill_change_type(diff_index) - - if verbose: - _print_diff_index(diff_index) - - for diff in diff_index: - if diff.change_type == 'U': - print('Unknown change type encountered.') - continue - - if diff.change_type == 'A': - self.on_add(diff, commit, is_merge_commit) - - elif diff.change_type == 'D': - self.on_delete(diff, commit, is_merge_commit) - - elif diff.change_type == 'R': - self.on_rename(diff, commit, is_merge_commit) - - else: - self.on_modify(diff, commit, is_merge_commit) - - counter += 1 - - # 2nd phase - if into_branches: - - commit_cnt = 1 - branch_cnt = 1 - start = time.time() - - print('\n------- 2nd phase -------\n') - - while len(self.merge_commits) > 0: - mc = self.merge_commits.popleft() - cur_commit = mc.parents[1] - branch_length = 0 - valid_branch = False - - while True: - - # stop tracing back along this branch - # if cur_commit has been visited - if cur_commit.hexsha in self.visited: - break - - # stop if we have reached time boundary - authored_date = time.gmtime(cur_commit.authored_date) - if min_branch_date and min_branch_date > authored_date: - break - - # stop if we have reached max_branch_length - if branch_length >= max_branch_length: - break - - # stop if we have reached the very first commit - if len(cur_commit.parents) == 0: - break - - # will process at least one commit for this branch - valid_branch = True - - # process this commit - if verbose: - print('------ Commit No.{} '.format(commit_cnt), - 'Branch No.{} {} {} {} ------'.format( - branch_cnt, - cur_commit.hexsha, - _subject(cur_commit.message), - time.strftime( - "%b %d %Y", - time.gmtime(cur_commit.authored_date) - ) - ) - ) - else: - print('------ Commit No.{} '.format(commit_cnt), - 'Branch No.{} {}------'.format( - branch_cnt, cur_commit.hexsha)) - - if commit_cnt % 100 == 0: - print('------ Used time: {} ------'.format( - time.time() - start)) - - if commit_cnt % checkpoint_interval == 0: - repo_name = os.path.basename( - self.repo_path.rstrip('/')) - self.save( - repo_name + '-2nd-' + str(counter) + '.pickle') - - self.visited.add(cur_commit.hexsha) - # add to queue if prev_commit is a merge commit - if len(cur_commit.parents) == 2: - self.merge_commits.append(cur_commit) - - if not skip_work: - self._start_process_commit(cur_commit) - diff_index = _diff_with_first_parent(cur_commit) - _fill_change_type(diff_index) - for diff in diff_index: - if diff.change_type == 'U': - print('Unknown change type encountered.') - continue - if diff.change_type == 'A': - self.on_add2(diff, cur_commit) - elif diff.change_type == 'D': - self.on_delete2(diff, cur_commit) - elif diff.change_type == 'R': - self.on_rename2(diff, cur_commit) - else: - self.on_modify2(diff, cur_commit) - - # get next commit - prev_commit = cur_commit.parents[0] - - cur_commit = prev_commit - branch_length += 1 - commit_cnt += 1 - - if valid_branch: - branch_cnt += 1 - - repo_name = os.path.basename(self.repo_path.rstrip('/')) - self.save(repo_name + '-finished.pickle') - - def _reset_state(self): - self.visited = set() - self.last_processed_commit = None - - def _start_process_commit(self, commit): - pass - - def set_repo_path(self, repo_path): - self.repo_path = repo_path - self.repo = initialize_repo(repo_path) - self.last_processed_commit = self.repo.commit(self.last_sha) - - def on_add(self, diff, commit, is_merge_commit): - return 0 - - def on_delete(self, diff, commit, is_merge_commit): - return 0 - - def on_rename(self, diff, commit, is_merge_commit): - return 0 - - def on_modify(self, diff, commit, is_merge_commit): - return 0 - - def on_add2(self, diff, commit): - return 0 - - def on_delete2(self, diff, commit): - return 0 - - def on_rename2(self, diff, commit): - return 0 - - def on_modify2(self, diff, commit): - return 0 - - def __getstate__(self): - state = { - 'visited': self.visited, - 'last_sha': self.last_processed_commit.hexsha - } - return state - - def __setstate__(self, state): - self.__dict__.update(state) - - def save(self, fname): - with open(fname, 'wb+') as f: - pickle.dump(self, f) diff --git a/persper/graphs/ruby_tools.py b/persper/graphs/ruby_tools.py deleted file mode 100644 index ef988a5161a..00000000000 --- a/persper/graphs/ruby_tools.py +++ /dev/null @@ -1,53 +0,0 @@ -import re -import os.path -import subprocess -from git import Repo -import networkx as nx - -from persper.graphs.write_graph_to_dot import write_G_to_dot_with_pr - - -def underscore_to_camelcase(value): - def camelcase(): - while True: - yield str.capitalize - - c = camelcase() - return "".join(next(c)(x) if x else '_' for x in value.split("_")) - - -def get_func_ranges_ruby(src, fname): - - def get_prefix(fname): - return fname - - fname = os.path.basename(fname).split('.')[0] - prefix = underscore_to_camelcase(get_prefix(fname)) - - re_def = re.compile("^\s*def\s+(?Pself\.)?(?P\w+\??)\s*\(?.*\)?$") - func_ids = [] - func_ranges = [] - ptr = -1 - num_lines = 0 - for lineno, line in enumerate(src.split('\n'), 1): - num_lines += 1 - m = re_def.search(line) - if m: - d = m.groupdict() - if d['class_method'] or fname.endswith('_helper.rb'): - op = "::" - else: - op = "#" - func_ids.append(prefix + op + d['func_name']) - - if ptr != -1: - func_ranges.append([ptr, lineno - 1]) - ptr = lineno - if ptr != -1: - func_ranges.append([ptr, num_lines]) - - return func_ids, func_ranges - - -def fname_filter_ruby(fname): - return fname.endswith('.rb') diff --git a/persper/graphs/srcml.py b/persper/graphs/srcml.py deleted file mode 100755 index 7b90d9090c5..00000000000 --- a/persper/graphs/srcml.py +++ /dev/null @@ -1,94 +0,0 @@ -#!/usr/bin/env python3 - -import argparse -import shutil -import os -import glob -import subprocess -import tempfile -from lxml import etree - - -def copy_dir(src, dst, *, follow_sym=True): - if os.path.isdir(dst): - dst = os.path.join(dst, os.path.basename(src)) - if os.path.isdir(src): - shutil.copyfile(src, dst, follow_symlinks=follow_sym) - shutil.copystat(src, dst, follow_symlinks=follow_sym) - return dst - - -def transform_dir(input_dir, output_dir, extensions=('.c', '.h')): - """Run srcML recursively under a directory - - First copy directory structure from input_dir to output_dir, - then for every source file that ends with ext in extentions, - run srcML and output to corresponding directory under output_dir. - """ - # copy directory structure - input_dir = os.path.expanduser(input_dir) - output_dir = os.path.expanduser(output_dir) - if os.path.isdir(output_dir): - shutil.rmtree(output_dir) - shutil.copytree(input_dir, output_dir, copy_function=copy_dir) - - print("Transforming source code to xml...") - counter = 0 - for ext in extensions: - for fname in glob.iglob(input_dir + '/**/*' + ext, recursive=True): - if counter % 100 == 0: - print('Processed {}'.format(counter)) - # linux-kernel/arch/alpha/boot/bootp.c -> arch/alpha/boot/bootp.c - pre = os.path.commonprefix((input_dir, fname)) - rel = os.path.relpath(fname, pre) - output_path = os.path.join(output_dir, rel) + ".xml" - - cmd = 'srcml {} --position -o {}'.format(fname, output_path) - subprocess.call(cmd, shell=True) - - counter += 1 - print("Tranformation completed, {} processed.".format(counter)) - - -def transform_src_to_tree(source_code, ext='.c'): - root = None - try: - f = tempfile.NamedTemporaryFile(mode='wb+', delete=False) - f.write(source_code.encode('utf-8', 'replace')) - f.close() - except UnicodeEncodeError as e: - print("UnicodeEncodeError in transform_src_to_tree!") - if not f.closed: - f.close() - os.remove(f.name) - return None - - # rename so that srcml can open it - new_fname = f.name + ext - os.rename(f.name, new_fname) - xml_path = f.name + ".xml" - cmd = 'srcml {} --position -o {}'.format(new_fname, xml_path) - subprocess.call(cmd, shell=True) - try: - root = etree.parse(xml_path).getroot() - except: - print("Unable to parse xml file!") - finally: - if not f.closed: - f.close() - os.remove(new_fname) - if os.path.exists(xml_path): - os.remove(xml_path) - - return root - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument('SOURCE', help='source dir', type=str) - parser.add_argument('OUTPUT', help='output dir', type=str) - args = parser.parse_args() - transform_dir(args.SOURCE, args.OUTPUT) - -if __name__ == '__main__': - main() diff --git a/persper/graphs/write_graph_to_dot.py b/persper/graphs/write_graph_to_dot.py deleted file mode 100644 index 1883f66f6a9..00000000000 --- a/persper/graphs/write_graph_to_dot.py +++ /dev/null @@ -1,31 +0,0 @@ -import matplotlib as mpl -import matplotlib.cm as cm - - -def map_to_color(x, m): - color_list = [int(255 * c) for c in m.to_rgba(x)] - return '#%02x%02x%02x%02x' % tuple(color_list) - - -def write_G_to_dot_with_pr(G, pr, fname, edge_attrib=None, header_lines=None): - norm = mpl.colors.Normalize(vmin=min(pr.values()), vmax=max(pr.values())) - cmap = cm.Blues - m = cm.ScalarMappable(norm, cmap=cmap) - with open(fname, 'w+') as f: - f.write('digraph graphname {\n') - if header_lines: - for line in header_lines: - f.write(line) - for n in G.nodes(data=False): - color_str = map_to_color(pr[n], m) - f.write( - '\"%s\" [style=filled fillcolor="%s" tooltip=\"%f\"];\n' % - (n, color_str, pr[n])) - for e in G.edges_iter(data=True): - if edge_attrib: - f.write('\"{}\" -> \"{}\" [ label=\"{}\"];\n'.format( - e[0], e[1], ' '.join(e[2][edge_attrib][:10]))) - else: - f.write('\"{}\" -> \"{}\";\n'.format(e[0], e[1])) - - f.write('}') diff --git a/test/test_graphs/example.cc b/test/test_graphs/example.cc deleted file mode 100644 index 6ab29590f3d..00000000000 --- a/test/test_graphs/example.cc +++ /dev/null @@ -1,100 +0,0 @@ -// migration_controller.cc -// Copyright (c) 2014 Jinglei Ren - -#include "migration_controller.h" - -using namespace std; - -void MigrationController::InputBlocks( - const vector& blocks) { - assert(nvm_pages_.empty()); - for (vector::const_iterator it = blocks.begin(); - it != blocks.end(); ++it) { - if (it->state == ATTEntry::CLEAN || it->state == ATTEntry::FREE) { - assert(it->epoch_writes == 0); - continue; - } - uint64_t block_addr = it->phy_tag << block_bits_; - NVMPage& p = nvm_pages_[PageAlign(block_addr)]; - p.epoch_reads += it->epoch_reads; - p.epoch_writes += it->epoch_writes; - - if (it->epoch_writes) { - p.blocks.insert(block_addr); - assert(p.blocks.size() <= page_blocks_); - } - } - dirty_nvm_pages_ += nvm_pages_.size(); -} - -bool MigrationController::ExtractNVMPage(NVMPageStats& stats, - Profiler& profiler) { - if (nvm_heap_.empty()) { - for (unordered_map::iterator it = nvm_pages_.begin(); - it != nvm_pages_.end(); ++it) { - double dr = it->second.blocks.size() / page_blocks_; - double wr = it->second.epoch_writes / page_blocks_; - nvm_heap_.push_back({it->first, dr, wr}); - - total_nvm_writes_ += it->second.epoch_writes; - dirty_nvm_blocks_ += it->second.blocks.size(); - } - make_heap(nvm_heap_.begin(), nvm_heap_.end()); - } - profiler.AddTableOp(); - - if (nvm_heap_.empty()) return false; - - stats = nvm_heap_.front(); - pop_heap(nvm_heap_.begin(), nvm_heap_.end()); - nvm_heap_.pop_back(); - return true; -} - -bool MigrationController::ExtractDRAMPage(DRAMPageStats& stats, - Profiler& profiler) { - if (dram_heap_.empty()) { - int dirts = 0; - for (unordered_map::iterator it = entries_.begin(); - it != entries_.end(); ++it) { - double wr = it->second.epoch_writes / page_blocks_; - dram_heap_.push_back({it->first, it->second.state, wr}); - - total_dram_writes_ += it->second.epoch_writes; - dirts += (it->second.epoch_writes ? 1 : 0); - } - assert(dirts == dirty_entries_); - dirty_dram_pages_ += dirty_entries_; - - make_heap(dram_heap_.begin(), dram_heap_.end()); - } - profiler.AddTableOp(); - - if (dram_heap_.empty()) return false; - - stats = dram_heap_.front(); - pop_heap(dram_heap_.begin(), dram_heap_.end()); - dram_heap_.pop_back(); - return true; -} - -void MigrationController::Clear(Profiler& profiler) { - profiler.AddPageMoveInter(dirty_entries_); // epoch write-backs - for (PTTEntryIterator it = entries_.begin(); it != entries_.end(); ++it) { - it->second.epoch_reads = 0; - it->second.epoch_writes = 0; - if (it->second.state == PTTEntry::DIRTY_DIRECT) { - ShiftState(it->second, PTTEntry::CLEAN_DIRECT, Profiler::Overlap); - --dirty_entries_; - } else if (it->second.state == PTTEntry::DIRTY_STATIC) { - ShiftState(it->second, PTTEntry::CLEAN_STATIC, Profiler::Overlap); - --dirty_entries_; - } - } - profiler.AddTableOp(); - assert(dirty_entries_ == 0); - - nvm_pages_.clear(); - dram_heap_.clear(); - nvm_heap_.clear(); -} \ No newline at end of file diff --git a/test/test_graphs/example.patch b/test/test_graphs/example.patch deleted file mode 100644 index 0f835ec6469..00000000000 --- a/test/test_graphs/example.patch +++ /dev/null @@ -1,111 +0,0 @@ -@@ -5,6 +5,37 @@ - - using namespace std; - - +void MigrationController::FillNVMPageHeap() { - + for (unordered_map::iterator it = nvm_pages_.begin(); - + it != nvm_pages_.end(); ++it) { - + double dr = it->second.blocks.size() / page_blocks_; - + double wr = it->second.epoch_writes / page_blocks_; - + nvm_heap_.push_back({it->first, dr, wr}); - + - + total_nvm_writes_ += it->second.epoch_writes; - + dirty_nvm_blocks_ += it->second.blocks.size(); - + } - + make_heap(nvm_heap_.begin(), nvm_heap_.end()); - + nvm_heap_filled_ = true; - +} - + - +void MigrationController::FillDRAMPageHeap() { - + int dirts = 0; - + for (unordered_map::iterator it = entries_.begin(); - + it != entries_.end(); ++it) { - + double wr = it->second.epoch_writes / page_blocks_; - + dram_heap_.push_back({it->first, it->second.state, wr}); - + - + total_dram_writes_ += it->second.epoch_writes; - + dirts += (it->second.epoch_writes ? 1 : 0); - + } - + assert(dirts == dirty_entries_); - + dirty_dram_pages_ += dirty_entries_; - + - + make_heap(dram_heap_.begin(), dram_heap_.end()); - + dram_heap_filled_ = true; - +} - + - void MigrationController::InputBlocks( - const vector& blocks) { - assert(nvm_pages_.empty()); - @@ -25,56 +56,37 @@ void MigrationController::InputBlocks( - } - } - dirty_nvm_pages_ += nvm_pages_.size(); - + - + FillNVMPageHeap(); - + FillDRAMPageHeap(); - } - - bool MigrationController::ExtractNVMPage(NVMPageStats& stats, - Profiler& profiler) { - - if (nvm_heap_.empty()) { - - for (unordered_map::iterator it = nvm_pages_.begin(); - - it != nvm_pages_.end(); ++it) { - - double dr = it->second.blocks.size() / page_blocks_; - - double wr = it->second.epoch_writes / page_blocks_; - - nvm_heap_.push_back({it->first, dr, wr}); - - - - total_nvm_writes_ += it->second.epoch_writes; - - dirty_nvm_blocks_ += it->second.blocks.size(); - - } - - make_heap(nvm_heap_.begin(), nvm_heap_.end()); - - } - - profiler.AddTableOp(); - + assert(nvm_heap_filled_); - - if (nvm_heap_.empty()) return false; - - stats = nvm_heap_.front(); - pop_heap(nvm_heap_.begin(), nvm_heap_.end()); - nvm_heap_.pop_back(); - + - + profiler.AddTableOp(); - return true; - } - - bool MigrationController::ExtractDRAMPage(DRAMPageStats& stats, - Profiler& profiler) { - - if (dram_heap_.empty()) { - - int dirts = 0; - - for (unordered_map::iterator it = entries_.begin(); - - it != entries_.end(); ++it) { - - double wr = it->second.epoch_writes / page_blocks_; - - dram_heap_.push_back({it->first, it->second.state, wr}); - - - - total_dram_writes_ += it->second.epoch_writes; - - dirts += (it->second.epoch_writes ? 1 : 0); - - } - - assert(dirts == dirty_entries_); - - dirty_dram_pages_ += dirty_entries_; - - - - make_heap(dram_heap_.begin(), dram_heap_.end()); - - } - + assert(dram_heap_filled_); - profiler.AddTableOp(); - - if (dram_heap_.empty()) return false; - - stats = dram_heap_.front(); - pop_heap(dram_heap_.begin(), dram_heap_.end()); - dram_heap_.pop_back(); - + - + profiler.AddTableOp(); - return true; - } - - @@ -97,4 +109,6 @@ void MigrationController::Clear(Profiler& profiler) { - nvm_pages_.clear(); - dram_heap_.clear(); - nvm_heap_.clear(); - + dram_heap_filled_ = false; - + nvm_heap_filled_ = false; - } \ No newline at end of file diff --git a/test/test_graphs/example2.patch b/test/test_graphs/example2.patch deleted file mode 100644 index ed3efe78b7f..00000000000 --- a/test/test_graphs/example2.patch +++ /dev/null @@ -1,8 +0,0 @@ -@@ -1 +1,7 @@ -+/* -+ * linux/lib/errno.c -+ * -+ * (C) 1991 Linus Torvalds -+ */ -+ - int errno; diff --git a/test/test_graphs/example3.patch b/test/test_graphs/example3.patch deleted file mode 100644 index f094bbc71f6..00000000000 --- a/test/test_graphs/example3.patch +++ /dev/null @@ -1,32 +0,0 @@ -@@ -6,12 +6,27 @@ int str_len(char *string) - return count - string; - } - --/* added in A*/ --char* str_append(char* string, char* append) { -+/* str_append is deleted in B */ -+ -+/* added in B */ -+char* str_append_chr(char* string, char append) { - char* newstring = NULL; -- size_t needed = snprintf(NULL, 0, "%s%s", string, append); -+ size_t needed = snprintf(NULL, 0, "%s%c", string, append); - newstring = malloc(needed); -- sprintf(newstring, "%s%s", string, append); -+ sprintf(newstring, "%s%c", string, append); - return newstring; - } - -+/* added in B */ -+int str_equals(char *equal1, char *eqaul2) -+{ -+ while(*equal1==*eqaul2) -+ { -+ if ( *equal1 == '\0' || *eqaul2 == '\0' ){break;} -+ equal1++; -+ eqaul2++; -+ } -+ if(*eqaul1 == '\0' && *eqaul2 == '\0' ){return 0;} -+ else {return -1}; -+} -+ \ No newline at end of file diff --git a/test/test_graphs/example4.patch b/test/test_graphs/example4.patch deleted file mode 100644 index 4c72485b6f2..00000000000 --- a/test/test_graphs/example4.patch +++ /dev/null @@ -1,28 +0,0 @@ -@@ -0,0 +1,27 @@ -+/* added in G */ -+char* str_replace(char* search, char* replace, char* subject) { -+ char* newstring = ""; -+ int i = 0; -+ for(i = 0; i < str_len(subject); i++) { -+ if (subject[i] == search[0]) { -+ int e = 0; -+ char* calc = ""; -+ for(e = 0; e < str_len(search); e++) { -+ if(subject[i+e] == search[e]) { -+ calc = str_append_chr(calc, search[e]); -+ } -+ } -+ if (str_equals(search, calc) == 0) { -+ newstring = str_append(newstring, replace); -+ i = i + str_len (search)-1; -+ } -+ else { -+ newstring = str_append_chr(newstring, subject[i]); -+ } -+ } -+ else { -+ newstring = str_append_chr(newstring, subject[i]); -+ } -+ } -+ return newstring; -+} \ No newline at end of file diff --git a/test/test_graphs/test_analyzer.py b/test/test_graphs/test_analyzer.py deleted file mode 100644 index 21f0d35883c..00000000000 --- a/test/test_graphs/test_analyzer.py +++ /dev/null @@ -1,139 +0,0 @@ -import os -import pytest -import pickle -import subprocess -from persper.graphs.c import CGraphServer -from persper.graphs.analyzer import Analyzer -from persper.graphs.iterator import RepoIterator -from persper.util.path import root_path -from persper.graphs.graph_server import C_FILENAME_REGEXES - - -@pytest.fixture(scope='module') -def az(): - # build the repo first if not exists yet - repo_path = os.path.join(root_path, 'repos/test_feature_branch') - script_path = os.path.join(root_path, 'tools/repo_creater/create_repo.py') - test_src_path = os.path.join(root_path, 'test/test_feature_branch') - if not os.path.isdir(repo_path): - cmd = '{} {}'.format(script_path, test_src_path) - subprocess.call(cmd, shell=True) - - return Analyzer(repo_path, CGraphServer(C_FILENAME_REGEXES)) - - -def assert_graphs_equal(g1, g2): - assert(set(g1.nodes()) == set(g2.nodes())) - assert(set(g1.edges()) == set(g2.edges())) - for n in g1: - print(n) - assert(g1.node[n] == g2.node[n]) - - -def assert_analyzer_equal(az1, az2): - assert(az1.history == az2.history) - assert_graphs_equal(az1.graph_server.get_graph(), az2.graph_server.get_graph()) - - -def assert_graph_match_history(az): - # total edits data stored in the graph should match az.history - master_commits, _ = az.ri.iter(from_beginning=True) - master_sha_set = set([c.hexsha for c in master_commits]) - g = az.graph_server.get_graph() - for func in g.nodes(): - print(func) - func_sum = 0 - for sha in az.history: - if sha in master_sha_set and func in az.history[sha]: - func_sum += az.history[sha][func] - if g.node[func]['defined']: - assert(func_sum == g.node[func]['num_lines']) - - -def test_az_basic(az): - az.analyze(from_beginning=True, into_branches=True) - assert_graph_match_history(az) - - history_truth = { - 'K': {'display': 5}, - 'F': {'display': 14, 'count': 12}, - 'E': {'append': 29, 'add': 11}, - 'D': {'str_replace': 26}, - 'C': {'str_append_chr': 34, 'str_equals': 1}, - 'B': {'str_append': 9, 'str_append_chr': 7, 'str_equals': 11}, - 'A': {'str_append': 7, 'str_len': 6}, - - # branch J from commit A, merge back through F - 'J': {'count': 12, 'display': 14}, - - # branch G from commit B, merge back through D - 'G': {'str_equals': 1, 'str_replace': 26}, - - # branch H from commit D, merge back through E - 'I': {'add': 5, 'append': 35, 'insert': 25}, - 'H': {'add': 16, 'append': 12, 'insert': 25}, - } - - for commit in az.ri.repo.iter_commits(): - assert(az.history[commit.hexsha] == - history_truth[commit.message.strip()]) - - edges_truth = [ - ('append', 'free'), - ('display', 'printf'), - ('str_replace', 'str_append_chr'), - ('str_replace', 'str_equals'), - ('str_replace', 'str_len'), - ('str_replace', 'str_append'), - ('str_append_chr', 'str_append_chr'), - ('str_append_chr', 'str_equals'), - ('str_append_chr', 'str_len'), - ('str_append_chr', 'str_append'), - ('add', 'malloc') - ] - assert(set(az.graph_server.get_graph().edges()) == set(edges_truth)) - - -def test_analyze_interface(az): - # test various ways to invoke process function - az.analyze(from_beginning=True, into_branches=True) - - repo_path = os.path.join(root_path, 'repos/test_feature_branch') - az1 = Analyzer(repo_path, CGraphServer(C_FILENAME_REGEXES)) - # A B - az1.analyze(from_beginning=True, num_commits=2, into_branches=True) - # C D - az1.analyze(continue_iter=True, num_commits=2, into_branches=True) - # E F K - az1.analyze(continue_iter=True, num_commits=3, into_branches=True) - # should see "The range specified is empty, terminated." - az1.analyze(continue_iter=True, num_commits=1, into_branches=True) - assert_analyzer_equal(az1, az) - - az2 = Analyzer(repo_path, CGraphServer(C_FILENAME_REGEXES)) - ri = RepoIterator(repo_path) - commits, _ = ri.iter(from_beginning=True) - assert(len(commits) == 7) - # should see "No history exists yet, terminated." - az2.analyze(continue_iter=True, num_commits=1, into_branches=True) - # A B C - az2.analyze(from_beginning=True, num_commits=3, into_branches=True) - # D E F - az2.analyze(from_beginning=True, - end_commit_sha=commits[5].hexsha, - into_branches=True) - # K - az2.analyze(from_beginning=True, - end_commit_sha=commits[6].hexsha, - into_branches=True) - assert_analyzer_equal(az2, az) - - -def test_save(az): - az.analyze(from_beginning=True, into_branches=True) - filename = "test_save_g.pickle" - az.save(filename) - with open(filename, 'rb') as f: - az1 = pickle.load(f) - os.remove(filename) - assert_analyzer_equal(az, az1) diff --git a/test/test_graphs/test_analyzer_js.py b/test/test_graphs/test_analyzer_js.py deleted file mode 100644 index e5739ddd587..00000000000 --- a/test/test_graphs/test_analyzer_js.py +++ /dev/null @@ -1,87 +0,0 @@ -import os -import time -import pytest -import subprocess -from persper.graphs.graph_server import JS_FILENAME_REGEXES -from persper.graphs.graph_server_http import GraphServerHttp -from persper.graphs.analyzer import Analyzer -from persper.util.path import root_path - -# Use a port other than the default 3000 in case of collision -server_port = 3002 -server_path = os.path.join(root_path, 'contribs/js-callgraph/src/app.js') - - -@pytest.fixture(scope='module') -def az(): - """ Build the test repo if not already exists - - Args: - repo_path - A string, path to the to-be-built test repo - script_path - A string, path to the repo creator script - test_src_path - A string, path to the dir to be passed to repo creator - """ - repo_path = os.path.join(root_path, 'repos/js_test_repo') - script_path = os.path.join(root_path, 'tools/repo_creater/create_repo.py') - test_src_path = os.path.join(root_path, 'test/js_test_repo') - server_addr = 'http://localhost:%d' % server_port - - if not os.path.isdir(repo_path): - cmd = '{} {}'.format(script_path, test_src_path) - subprocess.call(cmd, shell=True) - - return Analyzer(repo_path, GraphServerHttp(server_addr, JS_FILENAME_REGEXES)) - - -def assert_graph_match_history(az: Analyzer): - # total edits data stored in the graph should match az.history - g = az.graph_server.get_graph() - for fid in g.nodes(): - print(fid) - total_edits = 0 - for sha in az.history: - if fid in az.history[sha]: - total_edits += az.history[sha][fid] - assert(total_edits == g.node[fid]['num_lines']) - - -def test_az(az: Analyzer): - my_env = os.environ.copy() - my_env["PORT"] = str(server_port) - p = subprocess.Popen(['node', server_path], env=my_env) - - try: - # wait for the server to spin up - time.sleep(1.0) - az.graph_server.reset_graph() - az.analyze() - # assert_graph_match_history(az) - - history_truth = { - 'C': {'main.js:funcB:9:12': 1, - 'main.js:global': 1, - 'main.js:main:7:16': 1}, - 'B': {'main.js:funcB:9:11': 3, - 'main.js:global': 7, - 'main.js:main:7:15': 7}, - 'A': {'main.js:funcA:3:5': 3, - 'main.js:main:7:10': 4, - 'main.js:global': 12} - } - - for commit in az.ri.repo.iter_commits(): - assert(az.history[commit.hexsha] == - history_truth[commit.message.strip()]) - - edges_truth = [ - ('main.js:funcB:9:12', 'Native:Window_prototype_print'), - ('main.js:funcB:9:12', 'main.js:funcA:3:5'), - ('main.js:funcA:3:5', 'Native:Window_prototype_print'), - ('main.js:main:7:16', 'main.js:funcB:9:12'), - ('main.js:main:7:16', 'main.js:funcA:3:5'), - ('main.js:global', 'main.js:main:7:16') - ] - assert(set(az.graph_server.get_graph().edges()) == set(edges_truth)) - - finally: - p.terminate() diff --git a/test/test_graphs/test_callcommitgraph.py b/test/test_graphs/test_callcommitgraph.py deleted file mode 100644 index b4bafafc1eb..00000000000 --- a/test/test_graphs/test_callcommitgraph.py +++ /dev/null @@ -1,125 +0,0 @@ -import os -import pytest -import pickle -import subprocess -from persper.graphs.call_commit_graph import CallCommitGraph -from persper.graphs.call_commit_graph import _inverse_diff_result -from persper.util.path import root_path - - -@pytest.fixture(scope='module') -def g(): - # build the repo first if not exists yet - repo_path = os.path.join(root_path, 'repos/test_feature_branch') - script_path = os.path.join(root_path, 'tools/repo_creater/create_repo.py') - test_src_path = os.path.join(root_path, 'test/test_feature_branch') - if not os.path.isdir(repo_path): - cmd = '{} {}'.format(script_path, test_src_path) - subprocess.call(cmd, shell=True) - - g = CallCommitGraph(repo_path) - g.process(from_beginning=True, verbose=True, into_branches=True) - return g - - -def test_callcommitgraph(g): - history_truth = { - 'J': {'count': 12, 'display': 14}, - 'I': {'add': 5, 'append': 35}, - 'E': {}, - 'G': {'str_equals': 1, 'str_replace': 26}, - 'D': {}, - 'H': {'add': 16, 'append': 12}, - 'F': {}, - 'A': {'str_append': 7, 'str_len': 6}, - 'K': {'display': 5}, - 'C': {'str_append_chr': 34, 'str_equals': 1}, - 'B': {'str_append': 9, 'str_append_chr': 7, 'str_equals': 11} - } - - for commit in g.repo.iter_commits(): - assert(g.history[commit.hexsha] == - history_truth[commit.message.strip()]) - - edges_truth = [ - ('append', 'free'), - ('display', 'printf'), - ('str_replace', 'str_append_chr'), - ('str_replace', 'str_equals'), - ('str_replace', 'str_len'), - ('str_replace', 'str_append'), - ('str_append_chr', 'str_append_chr'), - ('str_append_chr', 'str_equals'), - ('str_append_chr', 'str_len'), - ('str_append_chr', 'str_append'), - ('add', 'malloc') - ] - assert(set(g.G.edges()) == set(edges_truth)) - - -def test_inverse_diff(): - # view parsing ground truth here - # https://github.com/basicthinker/Sexain-MemController/commit/f050c6f6dd4b1d3626574b0d23bb41125f7b75ca - adds_dels = ( - [[7, 31], [27, 3], [44, 1], [50, 2], [70, 1], [77, 2], [99, 2]], - [[32, 44], [56, 70]] - ) - inv_truth = ( - [[65, 13], [79, 15]], - [[8, 38], [59, 61], [66, 66], [73, 74], [80, 80], [88, 89], [112, 113]] - ) - - inv_result = _inverse_diff_result(*adds_dels) - assert(inv_truth == inv_result) - - -def assert_graphs_equal(G1, G2): - assert(set(G1.nodes()) == set(G2.nodes())) - assert(set(G1.edges()) == set(G2.edges())) - for n in G1: - assert(G1.node[n] == G2.node[n]) - - -def assert_callcommitgraphs_equal(g1, g2): - assert_graphs_equal(g1.G, g2.G) - assert(g1.history == g2.history) - assert(g1.exts == g2.exts) - - -def test_process_interface(g): - """test various ways to invoke process function""" - repo_path = os.path.join(root_path, 'repos/test_feature_branch') - g1 = CallCommitGraph(repo_path) - # A B - g1.process(from_beginning=True, into_branches=True, num_commits=2) - # C D - g1.process(from_last_processed=True, into_branches=True, num_commits=2) - # E F K - g1.process(from_last_processed=True, into_branches=True, num_commits=3) - # should see "The range specified is empty, terminated." - g1.process(from_last_processed=True, into_branches=True, num_commits=1) - assert_callcommitgraphs_equal(g1, g) - - g2 = CallCommitGraph(repo_path) - # should see "No history exists yet, terminated." - g2.process(from_last_processed=True, into_branches=True, num_commits=1) - # A B C - g2.process(from_beginning=True, into_branches=True, num_commits=3) - # D E F - g2.process(from_beginning=True, - into_branches=True, - end_commit_sha=g.commits[5].hexsha) - # K - g2.process(from_beginning=True, - into_branches=True, - end_commit_sha=g.commits[6].hexsha) - assert_callcommitgraphs_equal(g2, g) - - -def test_save(g): - fname = "test_save_g.pickle" - g.save(fname) - with open(fname, 'rb') as f: - gp = pickle.load(f) - os.remove(fname) - assert_callcommitgraphs_equal(g, gp) diff --git a/test/test_graphs/test_detect_change.py b/test/test_graphs/test_detect_change.py deleted file mode 100644 index 595d536f24f..00000000000 --- a/test/test_graphs/test_detect_change.py +++ /dev/null @@ -1,87 +0,0 @@ -import os -from persper.graphs.patch_parser import PatchParser -from persper.graphs.detect_change import get_changed_functions -from persper.graphs.call_graph.cpp import get_func_ranges_cpp -from persper.graphs.srcml import transform_src_to_tree - -dir_path = os.path.dirname(os.path.abspath(__file__)) - - -def test_detect_change(): - parser = PatchParser() - - # view parsing ground truth here - # https://github.com/basicthinker/Sexain-MemController/commit/f050c6f6dd4b1d3626574b0d23bb41125f7b75ca - parsing_truth = ( - [[7, 31], [27, 3], [44, 1], [50, 2], [70, 1], [77, 2], [99, 2]], - [[32, 44], [56, 70]] - ) - - # view function ranges ground truth here - # https://github.com/basicthinker/Sexain-MemController/blob/5b8886d9da3bb07140bfb1ff2d2b215b2dff584b/migration_controller.cc - func_ranges_truth = ( - ['MigrationController::InputBlocks', - 'MigrationController::ExtractNVMPage', - 'MigrationController::ExtractDRAMPage', - 'MigrationController::Clear'], - [[8, 28], [30, 52], [54, 79], [81, 100]] - ) - - changed_result = { - 'MigrationController::Clear': 2, - 'MigrationController::ExtractDRAMPage': 18, - 'MigrationController::ExtractNVMPage': 16, - 'MigrationController::InputBlocks': 3 - } - - with open(os.path.join(dir_path, 'example.patch'), 'r') as f: - example_patch = f.read() - parsing_result = parser.parse(example_patch) - assert(parsing_result == parsing_truth) - - with open(os.path.join(dir_path, 'example.cc'), 'r') as f: - root = transform_src_to_tree(f.read(), ext='.cc') - func_ranges_result = get_func_ranges_cpp(root) - assert(func_ranges_result == func_ranges_truth) - - assert(changed_result == get_changed_functions( - *func_ranges_result, *parsing_result)) - -def test_patch_parser(): - parser = PatchParser() - - patch2_truth = ( - [[0, 6]], - [] - ) - with open(os.path.join(dir_path, 'example2.patch'), 'r') as f: - example2_patch = f.read() - parsing_result = parser.parse(example2_patch) - assert(parsing_result == patch2_truth) - - # view patch3_truth here - # https://github.com/UltimateBeaver/test_feature_branch/commit/caaac10f604ea7ac759c2147df8fb2b588ee2a27 - patch3_truth = ( - [[10, 4], [12, 1], [14, 1], [17, 13]], - [[9, 10], [12, 12], [14, 14]] - ) - with open(os.path.join(dir_path, 'example3.patch'), 'r') as f: - example3_patch = f.read() - parsing_result = parser.parse(example3_patch) - assert(parsing_result == patch3_truth) - - # view patch4_truth here - # https://github.com/UltimateBeaver/test_feature_branch/commit/364d5cc49aeb2e354da458924ce84c0ab731ac77 - patch4_truth = ( - [[0, 27]], - [] - ) - with open(os.path.join(dir_path, 'example4.patch'), 'r') as f: - example4_patch = f.read() - parsing_result = parser.parse(example4_patch) - assert(parsing_result == patch4_truth) - - - - - diff --git a/test/test_graphs/test_inverse_diff.py b/test/test_graphs/test_inverse_diff.py deleted file mode 100644 index e0a390ad7ef..00000000000 --- a/test/test_graphs/test_inverse_diff.py +++ /dev/null @@ -1,18 +0,0 @@ -import pytest -from persper.graphs.inverse_diff import inverse_diff - - -def test_inverse_diff(): - # view parsing ground truth here - # https://github.com/basicthinker/Sexain-MemController/commit/f050c6f6dd4b1d3626574b0d23bb41125f7b75ca - adds_dels = ( - [[7, 31], [27, 3], [44, 1], [50, 2], [70, 1], [77, 2], [99, 2]], - [[32, 44], [56, 70]] - ) - inv_truth = ( - [[65, 13], [79, 15]], - [[8, 38], [59, 61], [66, 66], [73, 74], [80, 80], [88, 89], [112, 113]] - ) - - inv_result = inverse_diff(*adds_dels) - assert(inv_truth == inv_result) diff --git a/test/test_graphs/test_iterator.py b/test/test_graphs/test_iterator.py deleted file mode 100644 index 418572122f5..00000000000 --- a/test/test_graphs/test_iterator.py +++ /dev/null @@ -1,65 +0,0 @@ -import os -import pytest -import pickle -import subprocess -from persper.graphs.iterator import RepoIterator -from persper.util.path import root_path - - -def serialized_messages(commits): - return ' '.join([c.message.strip() for c in commits]) - - -@pytest.fixture(scope='module') -def ri(): - # build the repo first if not exists yet - repo_path = os.path.join(root_path, 'repos/test_processor') - script_path = os.path.join(root_path, 'tools/repo_creater/create_repo.py') - test_src_path = os.path.join(root_path, 'test/test_processor') - if not os.path.isdir(repo_path): - cmd = '{} {}'.format(script_path, test_src_path) - subprocess.call(cmd, shell=True) - - repo_path = os.path.join(root_path, 'repos/test_processor') - ri = RepoIterator(repo_path) - return ri - - -def test_iterator(ri): - commits, branch_commits = ri.iter(from_beginning=True, into_branches=True) - # from A to L - # use `git log --graph` to view ground truth - assert(len(ri.visited) == 12) - assert(len(commits) == 4) - assert(len(branch_commits) == 8) - assert(serialized_messages(commits) == 'D C B A') - assert(serialized_messages(branch_commits) == 'G F E J I H L K') - - -def test_continue_iter(ri): - commits, branch_commits = ri.iter( - from_beginning=True, num_commits=2, into_branches=True) - assert(serialized_messages(commits) == 'B A') - assert(serialized_messages(branch_commits) == '') - commits2, branch_commits2 = ri.iter( - continue_iter=True, num_commits=2, into_branches=True) - assert(serialized_messages(commits2) == 'D C') - assert(serialized_messages(branch_commits2) == 'G F E J I H L K') - - -def test_rev(ri): - commits, branch_commits = ri.iter(rev='C', into_branches=True) - assert(serialized_messages(commits) == 'C B A') - assert(serialized_messages(branch_commits) == '') - commits2, branch_commits2 = ri.iter( - continue_iter=True, end_commit_sha='D', into_branches=True) - assert(serialized_messages(commits2) == 'D') - assert(serialized_messages(branch_commits2) == 'G F E J I H L K') - - -def test_iter_twice(ri): - commits, branch_commits = ri.iter(from_beginning=True, into_branches=True) - commits2, branch_commits2 = ri.iter( - from_beginning=True, into_branches=True) - assert(commits == commits2) - assert(branch_commits == branch_commits2) diff --git a/test/test_graphs/test_map_id.py b/test/test_graphs/test_map_id.py deleted file mode 100644 index be93dae433a..00000000000 --- a/test/test_graphs/test_map_id.py +++ /dev/null @@ -1,46 +0,0 @@ -import os -import subprocess -from persper.graphs.graph_server import JS_FILENAME_REGEXES -from persper.graphs.graph_server_http import GraphServerHttp -from persper.graphs.analyzer import Analyzer -from persper.util.path import root_path - - -def test_map_id(): - repo_path = os.path.join(root_path, 'repos/js_test_repo') - script_path = os.path.join(root_path, 'tools/repo_creater/create_repo.py') - test_src_path = os.path.join(root_path, 'test/js_test_repo') - if not os.path.isdir(repo_path): - cmd = '{} {}'.format(script_path, test_src_path) - subprocess.call(cmd, shell=True) - - server_addr = 'http://localhost:3000' - az = Analyzer(repo_path, GraphServerHttp(server_addr, JS_FILENAME_REGEXES)) - - az.ordered_shas = ['c1', 'c2', 'c3', 'c4', 'c5', 'c6', 'c7'] - - az.id_map = { - 'c1': {'A': 'B'}, - 'c2': {'B': 'C', 'E': 'F'}, - 'c3': {'C': 'D', 'F': 'G'}, - 'c4': {'G': 'H'}, - 'c5': {'D': 'I', 'J': 'K'}, - 'c6': {'I': 'B', 'H': 'E'}, # make two cycles - 'c7': {'B': 'L'} # remove a cycle - } - - final_map_truth = { - 'A': 'L', - 'B': 'L', - 'C': 'L', - 'D': 'L', - 'I': 'L', - 'E': 'E', - 'F': 'E', - 'G': 'E', - 'H': 'E', - 'J': 'K' - } - - final_map = az.aggregate_id_map() - assert(final_map_truth == final_map) diff --git a/test/test_graphs/test_processor.py b/test/test_graphs/test_processor.py deleted file mode 100644 index 8260656e2bb..00000000000 --- a/test/test_graphs/test_processor.py +++ /dev/null @@ -1,26 +0,0 @@ -import os -import pytest -import subprocess -from persper.graphs.processor import Processor -from persper.util.path import root_path - - -def setup_module(module): - # build the repo first if not exists yet - repo_path = os.path.join(root_path, 'repos/test_processor') - script_path = os.path.join(root_path, 'tools/repo_creater/create_repo.py') - test_src_path = os.path.join(root_path, 'test/test_processor') - if not os.path.isdir(repo_path): - cmd = '{} {}'.format(script_path, test_src_path) - subprocess.call(cmd, shell=True) - - -def test_processor(capsys): - repo_path = os.path.join(root_path, 'repos/test_processor') - p = Processor(repo_path) - p.process(from_beginning=True, into_branches=True) - # from A to L - assert(len(p.visited) == 12) - out, _ = capsys.readouterr() - print(out) - assert("Commit No.8 Branch No.3" in out) From 1cb4580f6585b2e74064759bf63fef7b81904b4a Mon Sep 17 00:00:00 2001 From: xinyan Date: Sat, 2 Mar 2019 22:23:17 +0800 Subject: [PATCH 53/84] Separate added and removed LOC in node history. Fixed baseline. ccls should be able to cope with the difference between the linefeed in didOpen request and linefeed in the actual file, or this can cause trouble on Windows. --- .../analytics/lsp_graph_server/__init__.py | 24 +- .../baseline/cpp_test_repo/A.g.json | 328 ++++++++++----- .../baseline/cpp_test_repo/B.g.json | 333 ++++++++++----- .../baseline/cpp_test_repo/C.g.json | 358 +++++++++++----- .../baseline/cpp_test_repo/D.g.json | 385 +++++++++++++----- .../baseline/feature_branch/A.g.json | 12 +- .../baseline/feature_branch/B.g.json | 29 +- .../baseline/feature_branch/C.g.json | 45 +- .../baseline/feature_branch/D.g.json | 56 ++- .../baseline/feature_branch/E.g.json | 93 ++++- .../baseline/feature_branch/F.g.json | 105 ++++- .../baseline/feature_branch/G.g.json | 56 ++- .../baseline/feature_branch/H.g.json | 78 +++- .../baseline/feature_branch/I.g.json | 93 ++++- .../baseline/feature_branch/J.g.json | 105 ++++- .../baseline/feature_branch/K.g.json | 110 +++-- .../feature_branch_first_parent/A.g.json | 12 +- .../feature_branch_first_parent/B.g.json | 29 +- .../feature_branch_first_parent/C.g.json | 45 +- .../feature_branch_first_parent/D.g.json | 51 ++- .../feature_branch_first_parent/E.g.json | 63 ++- .../feature_branch_first_parent/F.g.json | 75 +++- .../feature_branch_first_parent/K.g.json | 80 +++- 23 files changed, 1924 insertions(+), 641 deletions(-) diff --git a/persper/analytics/lsp_graph_server/__init__.py b/persper/analytics/lsp_graph_server/__init__.py index 61421763f5a..d5076dd9441 100644 --- a/persper/analytics/lsp_graph_server/__init__.py +++ b/persper/analytics/lsp_graph_server/__init__.py @@ -99,7 +99,7 @@ async def update_graph(self, old_filename: str, old_src: str, new_filename: str, if newPath is None: # The file has been deleted # We need to scan it before it's gone, instead of in end_commit - self._markWholeDocumentAsChanged(await self._callGraphBuilder.getTokenizedDocument(oldPath)) + self._markWholeDocumentAsChanged(await self._callGraphBuilder.getTokenizedDocument(oldPath), True) elif oldPath is None: # The file has been added self._stashedPatches.append((oldPath, newPath, None, None)) @@ -114,7 +114,7 @@ async def update_graph(self, old_filename: str, old_src: str, new_filename: str, for i in range(start - 1, end): scope = oldDoc.scopeAt(i, 0) if scope: - self._safeUpdateNodeHistory(scope.name, 1) + self._safeUpdateNodeHistory(scope.name, 0, 1) self._stashedPatches.append((oldPath, newPath, added, None)) # perform file operations @@ -128,19 +128,21 @@ async def update_graph(self, old_filename: str, old_src: str, new_filename: str, self._invalidatedFiles.add(newPath) self._lastFileWrittenTime = datetime.now() - def _safeUpdateNodeHistory(self, name: str, changeOfLines: int): + def _safeUpdateNodeHistory(self, name: str, addedLines: int, removedLines: int): if name not in self._ccgraph.nodes(): self._ccgraph.add_node(name) - self._ccgraph.update_node_history(name, changeOfLines) + self._ccgraph.update_node_history(name, addedLines, removedLines) - def _markWholeDocumentAsChanged(self, doc: TokenizedDocument): + def _markWholeDocumentAsChanged(self, doc: TokenizedDocument, markAsRemoved: bool): + # markAsRemoved: True: document has been deleted + # False: document has been added parentScopes = [] # print("_markWholeDocumentAsChanged: ", doc.fileName) for scope in doc.scopes: while parentScopes and parentScopes[-1][0].endPos <= scope.startPos: # scope is out of parentScope, then the changed line count for parentScope is decided s, c = parentScopes.pop() - self._safeUpdateNodeHistory(s.name, c) + self._safeUpdateNodeHistory(s.name, c, 0) thisScopeLines = scope.endPos.line - scope.startPos.line + 1 if parentScopes: # Subtract LOC from innermost scope to eliminate dups @@ -161,7 +163,10 @@ def _markWholeDocumentAsChanged(self, doc: TokenizedDocument): parentScopes.append([scope, thisScopeLines]) while parentScopes: s, c = parentScopes.pop() - self._safeUpdateNodeHistory(s.name, c) + if markAsRemoved: + self._safeUpdateNodeHistory(s.name, 0, c) + else: + self._safeUpdateNodeHistory(s.name, c, 0) async def end_commit(self, hexsha): # update vetices & edges @@ -178,17 +183,18 @@ async def end_commit(self, hexsha): newDoc: TokenizedDocument = await self._callGraphBuilder.getTokenizedDocument(newPath) if not oldPath: # file has been added - self._markWholeDocumentAsChanged(newDoc) + self._markWholeDocumentAsChanged(newDoc, False) else: assert added for start, end in added: for i in range(start - 1, end): scope = newDoc.scopeAt(i, 0) if scope: - self._safeUpdateNodeHistory(scope.name, 1) + self._safeUpdateNodeHistory(scope.name, 1, 0) self._stashedPatches.clear() # ensure the files in the next commit has a different timestamp from this commit. + if datetime.now() - self._lastFileWrittenTime < timedelta(seconds=1): await asyncio.sleep(1) diff --git a/test/test_analytics/baseline/cpp_test_repo/A.g.json b/test/test_analytics/baseline/cpp_test_repo/A.g.json index 019e2272079..e91a766ab1f 100644 --- a/test/test_analytics/baseline/cpp_test_repo/A.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/A.g.json @@ -12,14 +12,6 @@ "addedBy": "A", "weight": null }, - "RowReader &operator>>(RowReader &reader, bool &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { - "addedBy": "A", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, bool &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "addedBy": "A", - "weight": null - }, "RowReader &operator>>(RowReader &reader, bool &rhs)|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { "addedBy": "A", "weight": null @@ -28,34 +20,6 @@ "addedBy": "A", "weight": null }, - "RowReader &operator>>(RowReader &reader, double &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { - "addedBy": "A", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, double &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "addedBy": "A", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, float &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { - "addedBy": "A", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, float &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "addedBy": "A", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, int &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "addedBy": "A", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, long &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { - "addedBy": "A", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, long &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "addedBy": "A", - "weight": null - }, "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { "addedBy": "A", "weight": null @@ -64,10 +28,6 @@ "addedBy": "A", "weight": null }, - "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { - "addedBy": "A", - "weight": null - }, "bool Confirm(const std::string &prompt)|->|bool Confirm(const std::string &prompt)": { "addedBy": "A", "weight": null @@ -92,14 +52,6 @@ "addedBy": "A", "weight": null }, - "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { - "addedBy": "A", - "weight": null - }, - "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { - "addedBy": "A", - "weight": null - }, "int main(int argc, char *argv[])|->|TStream OpenAndValidate(const TPath arg1)": { "addedBy": "A", "weight": null @@ -108,10 +60,6 @@ "addedBy": "A", "weight": null }, - "void ConfigurationParser::Load(std::istream &inputStream)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { - "addedBy": "A", - "weight": null - }, "void ReportException(const std::exception &ex, int level)|->|void ReportException(const std::exception &ex, int level)": { "addedBy": "A", "weight": null @@ -119,278 +67,462 @@ }, "nodes": { "ANSI_COLOR_BLUE": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_BRIGHT": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_CYAN": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_GREEN": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_MAGENTA": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_RED": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_RESET": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_YELLOW": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)": { + "files": [], "history": { - "A": 5 + "A": { + "adds": 5, + "dels": 0 + } }, "size": null }, "ConfigurationParser::ConfigurationParser(std::istream &inputStream)": { + "files": [], "history": { - "A": 4 + "A": { + "adds": 4, + "dels": 0 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, bool &rhs)": { + "files": [], "history": { - "A": 25 + "A": { + "adds": 25, + "dels": 0 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, double &rhs)": { + "files": [], "history": { - "A": 7 + "A": { + "adds": 7, + "dels": 0 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, float &rhs)": { + "files": [], "history": { - "A": 7 + "A": { + "adds": 7, + "dels": 0 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, int &rhs)": { + "files": [], "history": { - "A": 7 + "A": { + "adds": 7, + "dels": 0 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, long &rhs)": { + "files": [], "history": { - "A": 7 + "A": { + "adds": 7, + "dels": 0 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, std::string &rhs)": { + "files": [], "history": { - "A": 17 + "A": { + "adds": 17, + "dels": 0 + } }, "size": null }, "TDest safe_cast(TSrc obj)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "TEnum operator&(TEnum lhs, TEnum rhs)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "TEnum operator|(TEnum lhs, TEnum rhs)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "TStream &operator>>(TStream &s, RowReader &reader)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "TStream OpenAndValidate(const TPath arg1)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "_DECLARE_ENUM": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "_DECLARE_ENUM_DEFAULT": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "_RE_TRACE": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "_RangeToEnumerable RangeToEnumerable(const std::pair range)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "_SILENCE_STDEXT_ALLOCATORS_DEPRECATION_WARNING": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const": { + "files": [], "history": { - "A": 16 + "A": { + "adds": 16, + "dels": 0 + } }, "size": null }, "bool Confirm(const std::string &prompt)": { + "files": [], "history": { - "A": 18 + "A": { + "adds": 18, + "dels": 0 + } }, "size": null }, "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { + "files": [], "history": { - "A": 30 + "A": { + "adds": 30, + "dels": 0 + } }, "size": null }, "bool dynamic_kind_of(const TSrc *obj)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "bool pointer_kind_of(const std::shared_ptr obj)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "const char *FriendlyNameOf()": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "const char *FriendlyNameOf(const std::type_index &type)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "const char *FriendlyNameOf(const type_info &type)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const": { + "files": [], "history": { - "A": 12 + "A": { + "adds": 12, + "dels": 0 + } }, "size": null }, "enum class StringComparison : int {}": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null }, "inline std::string to_string(const std::pair &value)": { + "files": [], "history": { - "A": 4 + "A": { + "adds": 4, + "dels": 0 + } }, "size": null }, "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const": { + "files": [], "history": { - "A": 12 + "A": { + "adds": 12, + "dels": 0 + } }, "size": null }, "int main(int argc, char *argv[])": { + "files": [], "history": { - "A": 7 + "A": { + "adds": 7, + "dels": 0 + } }, "size": null }, "std::shared_ptr safe_pointer_cast(const std::shared_ptr &obj)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null }, "std::string StreamStatusToString(const TStream &stream)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "void ConfigurationParser::Load(std::istream &inputStream)": { + "files": [], "history": { - "A": 23 + "A": { + "adds": 23, + "dels": 0 + } }, "size": null }, "void ReportException(const std::exception &ex, int level)": { + "files": [], "history": { - "A": 21 + "A": { + "adds": 21, + "dels": 0 + } }, "size": null }, "void ValidateStream(const TStream &stream)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null } diff --git a/test/test_analytics/baseline/cpp_test_repo/B.g.json b/test/test_analytics/baseline/cpp_test_repo/B.g.json index f504ab4e582..eb9000faf62 100644 --- a/test/test_analytics/baseline/cpp_test_repo/B.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/B.g.json @@ -12,14 +12,6 @@ "addedBy": "A", "weight": null }, - "RowReader &operator>>(RowReader &reader, bool &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { - "addedBy": "A", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, bool &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "addedBy": "A", - "weight": null - }, "RowReader &operator>>(RowReader &reader, bool &rhs)|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { "addedBy": "A", "weight": null @@ -28,34 +20,6 @@ "addedBy": "A", "weight": null }, - "RowReader &operator>>(RowReader &reader, double &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { - "addedBy": "A", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, double &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "addedBy": "A", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, float &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { - "addedBy": "A", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, float &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "addedBy": "A", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, int &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "addedBy": "A", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, long &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { - "addedBy": "A", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, long &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "addedBy": "A", - "weight": null - }, "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { "addedBy": "A", "weight": null @@ -64,10 +28,6 @@ "addedBy": "A", "weight": null }, - "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { - "addedBy": "A", - "weight": null - }, "bool Confirm(const std::string &prompt)|->|bool Confirm(const std::string &prompt)": { "addedBy": "B", "weight": null @@ -92,14 +52,6 @@ "addedBy": "A", "weight": null }, - "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { - "addedBy": "A", - "weight": null - }, - "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { - "addedBy": "A", - "weight": null - }, "int main(int argc, char *argv[])|->|TStream OpenAndValidate(const TPath arg1)": { "addedBy": "B", "weight": null @@ -120,10 +72,6 @@ "addedBy": "B", "weight": null }, - "void ConfigurationParser::Load(std::istream &inputStream)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { - "addedBy": "A", - "weight": null - }, "void ReportException(const std::exception &ex, int level)|->|void ReportException(const std::exception &ex, int level)": { "addedBy": "B", "weight": null @@ -131,279 +79,466 @@ }, "nodes": { "ANSI_COLOR_BLUE": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_BRIGHT": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_CYAN": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_GREEN": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_MAGENTA": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_RED": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_RESET": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_YELLOW": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)": { + "files": [], "history": { - "A": 5 + "A": { + "adds": 5, + "dels": 0 + } }, "size": null }, "ConfigurationParser::ConfigurationParser(std::istream &inputStream)": { + "files": [], "history": { - "A": 4 + "A": { + "adds": 4, + "dels": 0 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, bool &rhs)": { + "files": [], "history": { - "A": 25 + "A": { + "adds": 25, + "dels": 0 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, double &rhs)": { + "files": [], "history": { - "A": 7 + "A": { + "adds": 7, + "dels": 0 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, float &rhs)": { + "files": [], "history": { - "A": 7 + "A": { + "adds": 7, + "dels": 0 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, int &rhs)": { + "files": [], "history": { - "A": 7 + "A": { + "adds": 7, + "dels": 0 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, long &rhs)": { + "files": [], "history": { - "A": 7 + "A": { + "adds": 7, + "dels": 0 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, std::string &rhs)": { + "files": [], "history": { - "A": 17 + "A": { + "adds": 17, + "dels": 0 + } }, "size": null }, "TDest safe_cast(TSrc obj)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "TEnum operator&(TEnum lhs, TEnum rhs)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "TEnum operator|(TEnum lhs, TEnum rhs)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "TStream &operator>>(TStream &s, RowReader &reader)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "TStream OpenAndValidate(const TPath arg1)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "_DECLARE_ENUM": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "_DECLARE_ENUM_DEFAULT": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "_RE_TRACE": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "_RangeToEnumerable RangeToEnumerable(const std::pair range)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "_SILENCE_STDEXT_ALLOCATORS_DEPRECATION_WARNING": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const": { + "files": [], "history": { - "A": 16 + "A": { + "adds": 16, + "dels": 0 + } }, "size": null }, "bool Confirm(const std::string &prompt)": { + "files": [], "history": { - "A": 18 + "A": { + "adds": 18, + "dels": 0 + } }, "size": null }, "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { + "files": [], "history": { - "A": 30 + "A": { + "adds": 30, + "dels": 0 + } }, "size": null }, "bool dynamic_kind_of(const TSrc *obj)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "bool pointer_kind_of(const std::shared_ptr obj)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "const char *FriendlyNameOf()": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "const char *FriendlyNameOf(const std::type_index &type)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "const char *FriendlyNameOf(const type_info &type)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const": { + "files": [], "history": { - "A": 12 + "A": { + "adds": 12, + "dels": 0 + } }, "size": null }, "enum class StringComparison : int {}": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null }, "inline std::string to_string(const std::pair &value)": { + "files": [], "history": { - "A": 4 + "A": { + "adds": 4, + "dels": 0 + } }, "size": null }, "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const": { + "files": [], "history": { - "A": 12 + "A": { + "adds": 12, + "dels": 0 + } }, "size": null }, "int main(int argc, char *argv[])": { + "files": [], "history": { - "A": 7, - "B": 4 + "A": { + "adds": 7, + "dels": 0 + }, + "B": { + "adds": 4, + "dels": 0 + } }, "size": null }, "std::shared_ptr safe_pointer_cast(const std::shared_ptr &obj)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null }, "std::string StreamStatusToString(const TStream &stream)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "void ConfigurationParser::Load(std::istream &inputStream)": { + "files": [], "history": { - "A": 23 + "A": { + "adds": 23, + "dels": 0 + } }, "size": null }, "void ReportException(const std::exception &ex, int level)": { + "files": [], "history": { - "A": 21 + "A": { + "adds": 21, + "dels": 0 + } }, "size": null }, "void ValidateStream(const TStream &stream)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null } diff --git a/test/test_analytics/baseline/cpp_test_repo/C.g.json b/test/test_analytics/baseline/cpp_test_repo/C.g.json index 88834d24b6f..092ff75892a 100644 --- a/test/test_analytics/baseline/cpp_test_repo/C.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/C.g.json @@ -12,14 +12,6 @@ "addedBy": "C", "weight": null }, - "RowReader &operator>>(RowReader &reader, bool &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { - "addedBy": "C", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, bool &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "addedBy": "C", - "weight": null - }, "RowReader &operator>>(RowReader &reader, bool &rhs)|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { "addedBy": "C", "weight": null @@ -28,34 +20,6 @@ "addedBy": "C", "weight": null }, - "RowReader &operator>>(RowReader &reader, double &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { - "addedBy": "C", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, double &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "addedBy": "C", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, float &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { - "addedBy": "C", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, float &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "addedBy": "C", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, int &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "addedBy": "C", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, long &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { - "addedBy": "C", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, long &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "addedBy": "C", - "weight": null - }, "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { "addedBy": "C", "weight": null @@ -64,10 +28,6 @@ "addedBy": "C", "weight": null }, - "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { - "addedBy": "C", - "weight": null - }, "bool Confirm(const std::string &prompt)|->|bool Confirm(const std::string &prompt)": { "addedBy": "C", "weight": null @@ -92,14 +52,6 @@ "addedBy": "C", "weight": null }, - "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { - "addedBy": "C", - "weight": null - }, - "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { - "addedBy": "C", - "weight": null - }, "int main(int argc, char *argv[])|->|TStream OpenAndValidate(const TPath arg1)": { "addedBy": "C", "weight": null @@ -120,10 +72,6 @@ "addedBy": "B", "weight": null }, - "void ConfigurationParser::Load(std::istream &inputStream)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { - "addedBy": "C", - "weight": null - }, "void ReportException(const std::exception &ex, int level)|->|void ReportException(const std::exception &ex, int level)": { "addedBy": "C", "weight": null @@ -131,284 +79,486 @@ }, "nodes": { "ANSI_COLOR_BLUE": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_BRIGHT": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_CYAN": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_GREEN": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_MAGENTA": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_RED": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_RESET": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_YELLOW": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)": { + "files": [], "history": { - "A": 5 + "A": { + "adds": 5, + "dels": 0 + } }, "size": null }, "ConfigurationParser::ConfigurationParser(std::istream &inputStream)": { + "files": [], "history": { - "A": 4 + "A": { + "adds": 4, + "dels": 0 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, bool &rhs)": { + "files": [], "history": { - "A": 25, - "C": 4 + "A": { + "adds": 25, + "dels": 0 + }, + "C": { + "adds": 2, + "dels": 2 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, double &rhs)": { + "files": [], "history": { - "A": 7 + "A": { + "adds": 7, + "dels": 0 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, float &rhs)": { + "files": [], "history": { - "A": 7 + "A": { + "adds": 7, + "dels": 0 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, int &rhs)": { + "files": [], "history": { - "A": 7 + "A": { + "adds": 7, + "dels": 0 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, long &rhs)": { + "files": [], "history": { - "A": 7 + "A": { + "adds": 7, + "dels": 0 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, std::string &rhs)": { + "files": [], "history": { - "A": 17 + "A": { + "adds": 17, + "dels": 0 + } }, "size": null }, "TDest safe_cast(TSrc obj)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "TEnum operator&(TEnum lhs, TEnum rhs)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "TEnum operator|(TEnum lhs, TEnum rhs)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "TStream &operator>>(TStream &s, RowReader &reader)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "TStream OpenAndValidate(const TPath arg1)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "_DECLARE_ENUM": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "_DECLARE_ENUM_DEFAULT": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "_RE_TRACE": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "_RangeToEnumerable RangeToEnumerable(const std::pair range)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "_SILENCE_STDEXT_ALLOCATORS_DEPRECATION_WARNING": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const": { + "files": [], "history": { - "A": 16, - "C": 4 + "A": { + "adds": 16, + "dels": 0 + }, + "C": { + "adds": 2, + "dels": 2 + } }, "size": null }, "bool Confirm(const std::string &prompt)": { + "files": [], "history": { - "A": 18 + "A": { + "adds": 18, + "dels": 0 + } }, "size": null }, "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { + "files": [], "history": { - "A": 30, - "C": 2 + "A": { + "adds": 30, + "dels": 0 + }, + "C": { + "adds": 1, + "dels": 1 + } }, "size": null }, "bool dynamic_kind_of(const TSrc *obj)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "bool pointer_kind_of(const std::shared_ptr obj)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "const char *FriendlyNameOf()": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "const char *FriendlyNameOf(const std::type_index &type)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "const char *FriendlyNameOf(const type_info &type)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const": { + "files": [], "history": { - "A": 12 + "A": { + "adds": 12, + "dels": 0 + } }, "size": null }, "enum class StringComparison : int {}": { + "files": [], "history": { - "A": 6, - "C": 2 + "A": { + "adds": 6, + "dels": 0 + }, + "C": { + "adds": 1, + "dels": 1 + } }, "size": null }, "inline std::string to_string(const std::pair &value)": { + "files": [], "history": { - "A": 4 + "A": { + "adds": 4, + "dels": 0 + } }, "size": null }, "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const": { + "files": [], "history": { - "A": 12 + "A": { + "adds": 12, + "dels": 0 + } }, "size": null }, "int main(int argc, char *argv[])": { + "files": [], "history": { - "A": 7, - "B": 4, - "C": 4 + "A": { + "adds": 7, + "dels": 0 + }, + "B": { + "adds": 4, + "dels": 0 + }, + "C": { + "adds": 0, + "dels": 4 + } }, "size": null }, "std::shared_ptr safe_pointer_cast(const std::shared_ptr &obj)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null }, "std::string StreamStatusToString(const TStream &stream)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "void ConfigurationParser::Load(std::istream &inputStream)": { + "files": [], "history": { - "A": 23 + "A": { + "adds": 23, + "dels": 0 + } }, "size": null }, "void ReportException(const std::exception &ex, int level)": { + "files": [], "history": { - "A": 21 + "A": { + "adds": 21, + "dels": 0 + } }, "size": null }, "void ValidateStream(const TStream &stream)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null } diff --git a/test/test_analytics/baseline/cpp_test_repo/D.g.json b/test/test_analytics/baseline/cpp_test_repo/D.g.json index 3e271ea882d..283418a6ce5 100644 --- a/test/test_analytics/baseline/cpp_test_repo/D.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/D.g.json @@ -12,14 +12,6 @@ "addedBy": "D", "weight": null }, - "RowReader &operator>>(RowReader &reader, bool &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { - "addedBy": "D", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, bool &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "addedBy": "D", - "weight": null - }, "RowReader &operator>>(RowReader &reader, bool &rhs)|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { "addedBy": "D", "weight": null @@ -28,34 +20,6 @@ "addedBy": "D", "weight": null }, - "RowReader &operator>>(RowReader &reader, double &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { - "addedBy": "D", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, double &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "addedBy": "D", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, float &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { - "addedBy": "D", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, float &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "addedBy": "D", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, int &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "addedBy": "D", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, long &rhs)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { - "addedBy": "D", - "weight": null - }, - "RowReader &operator>>(RowReader &reader, long &rhs)|->|RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "addedBy": "D", - "weight": null - }, "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { "addedBy": "D", "weight": null @@ -64,10 +28,6 @@ "addedBy": "D", "weight": null }, - "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { - "addedBy": "D", - "weight": null - }, "bool Confirm(const std::string &prompt)|->|bool Confirm(const std::string &prompt)": { "addedBy": "D", "weight": null @@ -92,14 +52,6 @@ "addedBy": "D", "weight": null }, - "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { - "addedBy": "D", - "weight": null - }, - "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const|->|std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { - "addedBy": "D", - "weight": null - }, "int main(int argc, char *argv[])|->|TStream OpenAndValidate(const TPath arg1)": { "addedBy": "C", "weight": null @@ -120,10 +72,6 @@ "addedBy": "B", "weight": null }, - "void ConfigurationParser::Load(std::istream &inputStream)|->|RowReader &operator>>(RowReader &reader, int &rhs)": { - "addedBy": "D", - "weight": null - }, "void ReportException(const std::exception &ex, int level)|->|void ReportException(const std::exception &ex, int level)": { "addedBy": "D", "weight": null @@ -131,289 +79,506 @@ }, "nodes": { "ANSI_COLOR_BLUE": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_BRIGHT": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_CYAN": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_GREEN": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_MAGENTA": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_RED": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_RESET": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ANSI_COLOR_YELLOW": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)": { + "files": [], "history": { - "A": 5 + "A": { + "adds": 5, + "dels": 0 + } }, "size": null }, "ConfigurationParser::ConfigurationParser(std::istream &inputStream)": { + "files": [], "history": { - "A": 4 + "A": { + "adds": 4, + "dels": 0 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, bool &rhs)": { + "files": [], "history": { - "A": 25, - "C": 4, - "D": 4 + "A": { + "adds": 25, + "dels": 0 + }, + "C": { + "adds": 2, + "dels": 2 + }, + "D": { + "adds": 2, + "dels": 2 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, double &rhs)": { + "files": [], "history": { - "A": 7 + "A": { + "adds": 7, + "dels": 0 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, float &rhs)": { + "files": [], "history": { - "A": 7 + "A": { + "adds": 7, + "dels": 0 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, int &rhs)": { + "files": [], "history": { - "A": 7 + "A": { + "adds": 7, + "dels": 0 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, long &rhs)": { + "files": [], "history": { - "A": 7 + "A": { + "adds": 7, + "dels": 0 + } }, "size": null }, "RowReader &operator>>(RowReader &reader, std::string &rhs)": { + "files": [], "history": { - "A": 17 + "A": { + "adds": 17, + "dels": 0 + } }, "size": null }, "TDest safe_cast(TSrc obj)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "TEnum operator&(TEnum lhs, TEnum rhs)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "TEnum operator|(TEnum lhs, TEnum rhs)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "TStream &operator>>(TStream &s, RowReader &reader)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "TStream OpenAndValidate(const TPath arg1)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "_DECLARE_ENUM": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "_DECLARE_ENUM_DEFAULT": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "_RE_TRACE": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "_RangeToEnumerable RangeToEnumerable(const std::pair range)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "_SILENCE_STDEXT_ALLOCATORS_DEPRECATION_WARNING": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const": { + "files": [], "history": { - "A": 16, - "C": 4, - "D": 4 + "A": { + "adds": 16, + "dels": 0 + }, + "C": { + "adds": 2, + "dels": 2 + }, + "D": { + "adds": 2, + "dels": 2 + } }, "size": null }, "bool Confirm(const std::string &prompt)": { + "files": [], "history": { - "A": 18 + "A": { + "adds": 18, + "dels": 0 + } }, "size": null }, "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { + "files": [], "history": { - "A": 30, - "C": 2, - "D": 2 + "A": { + "adds": 30, + "dels": 0 + }, + "C": { + "adds": 1, + "dels": 1 + }, + "D": { + "adds": 1, + "dels": 1 + } }, "size": null }, "bool dynamic_kind_of(const TSrc *obj)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "bool pointer_kind_of(const std::shared_ptr obj)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "const char *FriendlyNameOf()": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "const char *FriendlyNameOf(const std::type_index &type)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "const char *FriendlyNameOf(const type_info &type)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const": { + "files": [], "history": { - "A": 12 + "A": { + "adds": 12, + "dels": 0 + } }, "size": null }, "enum class StringComparison : int {}": { + "files": [], "history": { - "A": 6, - "C": 2, - "D": 2 + "A": { + "adds": 6, + "dels": 0 + }, + "C": { + "adds": 1, + "dels": 1 + }, + "D": { + "adds": 1, + "dels": 1 + } }, "size": null }, "inline std::string to_string(const std::pair &value)": { + "files": [], "history": { - "A": 4 + "A": { + "adds": 4, + "dels": 0 + } }, "size": null }, "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const": { + "files": [], "history": { - "A": 12 + "A": { + "adds": 12, + "dels": 0 + } }, "size": null }, "int main(int argc, char *argv[])": { - "history": { - "A": 7, - "B": 4, - "C": 4, - "D": 7 + "files": [], + "history": { + "A": { + "adds": 7, + "dels": 0 + }, + "B": { + "adds": 4, + "dels": 0 + }, + "C": { + "adds": 0, + "dels": 4 + }, + "D": { + "adds": 4, + "dels": 3 + } }, "size": null }, "std::shared_ptr safe_pointer_cast(const std::shared_ptr &obj)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null }, "std::string StreamStatusToString(const TStream &stream)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null }, "void ConfigurationParser::Load(std::istream &inputStream)": { + "files": [], "history": { - "A": 23 + "A": { + "adds": 23, + "dels": 0 + } }, "size": null }, "void ReportException(const std::exception &ex, int level)": { + "files": [], "history": { - "A": 21 + "A": { + "adds": 21, + "dels": 0 + } }, "size": null }, "void ValidateStream(const TStream &stream)": { + "files": [], "history": { - "A": 1 + "A": { + "adds": 1, + "dels": 0 + } }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch/A.g.json b/test/test_analytics/baseline/feature_branch/A.g.json index 7833183bead..ddabd6b9c37 100644 --- a/test/test_analytics/baseline/feature_branch/A.g.json +++ b/test/test_analytics/baseline/feature_branch/A.g.json @@ -2,14 +2,22 @@ "edges": {}, "nodes": { "char *str_append(char *string, char *append)": { + "files": [], "history": { - "A": 7 + "A": { + "adds": 7, + "dels": 0 + } }, "size": null }, "int str_len(char *string)": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch/B.g.json b/test/test_analytics/baseline/feature_branch/B.g.json index d6849e2667e..91024320909 100644 --- a/test/test_analytics/baseline/feature_branch/B.g.json +++ b/test/test_analytics/baseline/feature_branch/B.g.json @@ -2,27 +2,46 @@ "edges": {}, "nodes": { "char *str_append(char *string, char *append)": { + "files": [], "history": { - "A": 7, - "B": 3 + "A": { + "adds": 7, + "dels": 0 + }, + "B": { + "adds": 0, + "dels": 3 + } }, "size": null }, "char *str_append_chr(char *string, char append)": { + "files": [], "history": { - "B": 3 + "B": { + "adds": 3, + "dels": 0 + } }, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { + "files": [], "history": { - "B": 11 + "B": { + "adds": 11, + "dels": 0 + } }, "size": null }, "int str_len(char *string)": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch/C.g.json b/test/test_analytics/baseline/feature_branch/C.g.json index 73c976c5550..75e33885dc3 100644 --- a/test/test_analytics/baseline/feature_branch/C.g.json +++ b/test/test_analytics/baseline/feature_branch/C.g.json @@ -11,35 +11,64 @@ }, "nodes": { "char *str_append(char *string, char *append)": { + "files": [], "history": { - "A": 7, - "B": 3 + "A": { + "adds": 7, + "dels": 0 + }, + "B": { + "adds": 0, + "dels": 3 + } }, "size": null }, "char *str_append_chr(char *string, char append)": { + "files": [], "history": { - "B": 3, - "C": 34 + "B": { + "adds": 3, + "dels": 0 + }, + "C": { + "adds": 30, + "dels": 4 + } }, "size": null }, "int str_equals()": { + "files": [], "history": { - "C": 1 + "C": { + "adds": 1, + "dels": 0 + } }, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { + "files": [], "history": { - "B": 11, - "C": 1 + "B": { + "adds": 11, + "dels": 0 + }, + "C": { + "adds": 0, + "dels": 1 + } }, "size": null }, "int str_len(char *string)": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch/D.g.json b/test/test_analytics/baseline/feature_branch/D.g.json index dc709adda76..3ab44c0690f 100644 --- a/test/test_analytics/baseline/feature_branch/D.g.json +++ b/test/test_analytics/baseline/feature_branch/D.g.json @@ -23,42 +23,78 @@ }, "nodes": { "char *str_append(char *string, char *append)": { + "files": [], "history": { - "A": 7, - "B": 3 + "A": { + "adds": 7, + "dels": 0 + }, + "B": { + "adds": 0, + "dels": 3 + } }, "size": null }, "char *str_append_chr(char *string, char append)": { + "files": [], "history": { - "B": 3, - "C": 34 + "B": { + "adds": 3, + "dels": 0 + }, + "C": { + "adds": 30, + "dels": 4 + } }, "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { + "files": [], "history": { - "G": 26 + "G": { + "adds": 26, + "dels": 0 + } }, "size": null }, "int str_equals()": { + "files": [], "history": { - "C": 1 + "C": { + "adds": 1, + "dels": 0 + } }, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { + "files": [], "history": { - "B": 11, - "C": 1, - "G": 2 + "B": { + "adds": 11, + "dels": 0 + }, + "C": { + "adds": 0, + "dels": 1 + }, + "G": { + "adds": 1, + "dels": 1 + } }, "size": null }, "int str_len(char *string)": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch/E.g.json b/test/test_analytics/baseline/feature_branch/E.g.json index 421199f1fb2..a3d682842c0 100644 --- a/test/test_analytics/baseline/feature_branch/E.g.json +++ b/test/test_analytics/baseline/feature_branch/E.g.json @@ -20,10 +20,6 @@ "addedBy": "G", "weight": null }, - "void add(int num)|->|void append(int num)": { - "addedBy": "H", - "weight": null - }, "void insert(int num)|->|void add(int num)": { "addedBy": "H", "weight": null @@ -35,63 +31,120 @@ }, "nodes": { "char *str_append(char *string, char *append)": { + "files": [], "history": { - "A": 7, - "B": 3 + "A": { + "adds": 7, + "dels": 0 + }, + "B": { + "adds": 0, + "dels": 3 + } }, "size": null }, "char *str_append_chr(char *string, char append)": { + "files": [], "history": { - "B": 3, - "C": 34 + "B": { + "adds": 3, + "dels": 0 + }, + "C": { + "adds": 30, + "dels": 4 + } }, "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { + "files": [], "history": { - "G": 26 + "G": { + "adds": 26, + "dels": 0 + } }, "size": null }, "int str_equals()": { + "files": [], "history": { - "C": 1 + "C": { + "adds": 1, + "dels": 0 + } }, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { + "files": [], "history": { - "B": 11, - "C": 1, - "G": 2 + "B": { + "adds": 11, + "dels": 0 + }, + "C": { + "adds": 0, + "dels": 1 + }, + "G": { + "adds": 1, + "dels": 1 + } }, "size": null }, "int str_len(char *string)": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null }, "void add(int num)": { + "files": [], "history": { - "H": 16, - "I": 5 + "H": { + "adds": 16, + "dels": 0 + }, + "I": { + "adds": 0, + "dels": 5 + } }, "size": null }, "void append(int num)": { + "files": [], "history": { - "H": 12, - "I": 35 + "H": { + "adds": 12, + "dels": 0 + }, + "I": { + "adds": 26, + "dels": 9 + } }, "size": null }, "void insert(int num)": { + "files": [], "history": { - "H": 25, - "I": 25 + "H": { + "adds": 25, + "dels": 0 + }, + "I": { + "adds": 0, + "dels": 25 + } }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch/F.g.json b/test/test_analytics/baseline/feature_branch/F.g.json index a8ec1d2d5f5..e2f6a4e788f 100644 --- a/test/test_analytics/baseline/feature_branch/F.g.json +++ b/test/test_analytics/baseline/feature_branch/F.g.json @@ -28,10 +28,6 @@ "addedBy": "F", "weight": null }, - "void add(int num)|->|void append(int num)": { - "addedBy": "H", - "weight": null - }, "void insert(int num)|->|void add(int num)": { "addedBy": "H", "weight": null @@ -43,75 +39,140 @@ }, "nodes": { "char *str_append(char *string, char *append)": { + "files": [], "history": { - "A": 7, - "B": 3 + "A": { + "adds": 7, + "dels": 0 + }, + "B": { + "adds": 0, + "dels": 3 + } }, "size": null }, "char *str_append_chr(char *string, char append)": { + "files": [], "history": { - "B": 3, - "C": 34 + "B": { + "adds": 3, + "dels": 0 + }, + "C": { + "adds": 30, + "dels": 4 + } }, "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { + "files": [], "history": { - "G": 26 + "G": { + "adds": 26, + "dels": 0 + } }, "size": null }, "int count()": { + "files": [], "history": { - "J": 12 + "J": { + "adds": 12, + "dels": 0 + } }, "size": null }, "int str_equals()": { + "files": [], "history": { - "C": 1 + "C": { + "adds": 1, + "dels": 0 + } }, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { + "files": [], "history": { - "B": 11, - "C": 1, - "G": 2 + "B": { + "adds": 11, + "dels": 0 + }, + "C": { + "adds": 0, + "dels": 1 + }, + "G": { + "adds": 1, + "dels": 1 + } }, "size": null }, "int str_len(char *string)": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null }, "void add(int num)": { + "files": [], "history": { - "H": 16, - "I": 5 + "H": { + "adds": 16, + "dels": 0 + }, + "I": { + "adds": 0, + "dels": 5 + } }, "size": null }, "void append(int num)": { + "files": [], "history": { - "H": 12, - "I": 35 + "H": { + "adds": 12, + "dels": 0 + }, + "I": { + "adds": 26, + "dels": 9 + } }, "size": null }, "void display(struct node *r)": { + "files": [], "history": { - "J": 14 + "J": { + "adds": 14, + "dels": 0 + } }, "size": null }, "void insert(int num)": { + "files": [], "history": { - "H": 25, - "I": 25 + "H": { + "adds": 25, + "dels": 0 + }, + "I": { + "adds": 0, + "dels": 25 + } }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch/G.g.json b/test/test_analytics/baseline/feature_branch/G.g.json index c7f0ffddad3..4988009f0d2 100644 --- a/test/test_analytics/baseline/feature_branch/G.g.json +++ b/test/test_analytics/baseline/feature_branch/G.g.json @@ -23,42 +23,78 @@ }, "nodes": { "char *str_append(char *string, char *append)": { + "files": [], "history": { - "A": 7, - "B": 3 + "A": { + "adds": 7, + "dels": 0 + }, + "B": { + "adds": 0, + "dels": 3 + } }, "size": null }, "char *str_append_chr(char *string, char append)": { + "files": [], "history": { - "B": 3, - "C": 34 + "B": { + "adds": 3, + "dels": 0 + }, + "C": { + "adds": 30, + "dels": 4 + } }, "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { + "files": [], "history": { - "G": 26 + "G": { + "adds": 26, + "dels": 0 + } }, "size": null }, "int str_equals()": { + "files": [], "history": { - "C": 1 + "C": { + "adds": 1, + "dels": 0 + } }, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { + "files": [], "history": { - "B": 11, - "C": 1, - "G": 2 + "B": { + "adds": 11, + "dels": 0 + }, + "C": { + "adds": 0, + "dels": 1 + }, + "G": { + "adds": 1, + "dels": 1 + } }, "size": null }, "int str_len(char *string)": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch/H.g.json b/test/test_analytics/baseline/feature_branch/H.g.json index 0fdd9219bd8..2034c2f568b 100644 --- a/test/test_analytics/baseline/feature_branch/H.g.json +++ b/test/test_analytics/baseline/feature_branch/H.g.json @@ -20,10 +20,6 @@ "addedBy": "G", "weight": null }, - "void add(int num)|->|void append(int num)": { - "addedBy": "H", - "weight": null - }, "void insert(int num)|->|void add(int num)": { "addedBy": "H", "weight": null @@ -35,60 +31,108 @@ }, "nodes": { "char *str_append(char *string, char *append)": { + "files": [], "history": { - "A": 7, - "B": 3 + "A": { + "adds": 7, + "dels": 0 + }, + "B": { + "adds": 0, + "dels": 3 + } }, "size": null }, "char *str_append_chr(char *string, char append)": { + "files": [], "history": { - "B": 3, - "C": 34 + "B": { + "adds": 3, + "dels": 0 + }, + "C": { + "adds": 30, + "dels": 4 + } }, "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { + "files": [], "history": { - "G": 26 + "G": { + "adds": 26, + "dels": 0 + } }, "size": null }, "int str_equals()": { + "files": [], "history": { - "C": 1 + "C": { + "adds": 1, + "dels": 0 + } }, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { + "files": [], "history": { - "B": 11, - "C": 1, - "G": 2 + "B": { + "adds": 11, + "dels": 0 + }, + "C": { + "adds": 0, + "dels": 1 + }, + "G": { + "adds": 1, + "dels": 1 + } }, "size": null }, "int str_len(char *string)": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null }, "void add(int num)": { + "files": [], "history": { - "H": 16 + "H": { + "adds": 16, + "dels": 0 + } }, "size": null }, "void append(int num)": { + "files": [], "history": { - "H": 12 + "H": { + "adds": 12, + "dels": 0 + } }, "size": null }, "void insert(int num)": { + "files": [], "history": { - "H": 25 + "H": { + "adds": 25, + "dels": 0 + } }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch/I.g.json b/test/test_analytics/baseline/feature_branch/I.g.json index 421199f1fb2..a3d682842c0 100644 --- a/test/test_analytics/baseline/feature_branch/I.g.json +++ b/test/test_analytics/baseline/feature_branch/I.g.json @@ -20,10 +20,6 @@ "addedBy": "G", "weight": null }, - "void add(int num)|->|void append(int num)": { - "addedBy": "H", - "weight": null - }, "void insert(int num)|->|void add(int num)": { "addedBy": "H", "weight": null @@ -35,63 +31,120 @@ }, "nodes": { "char *str_append(char *string, char *append)": { + "files": [], "history": { - "A": 7, - "B": 3 + "A": { + "adds": 7, + "dels": 0 + }, + "B": { + "adds": 0, + "dels": 3 + } }, "size": null }, "char *str_append_chr(char *string, char append)": { + "files": [], "history": { - "B": 3, - "C": 34 + "B": { + "adds": 3, + "dels": 0 + }, + "C": { + "adds": 30, + "dels": 4 + } }, "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { + "files": [], "history": { - "G": 26 + "G": { + "adds": 26, + "dels": 0 + } }, "size": null }, "int str_equals()": { + "files": [], "history": { - "C": 1 + "C": { + "adds": 1, + "dels": 0 + } }, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { + "files": [], "history": { - "B": 11, - "C": 1, - "G": 2 + "B": { + "adds": 11, + "dels": 0 + }, + "C": { + "adds": 0, + "dels": 1 + }, + "G": { + "adds": 1, + "dels": 1 + } }, "size": null }, "int str_len(char *string)": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null }, "void add(int num)": { + "files": [], "history": { - "H": 16, - "I": 5 + "H": { + "adds": 16, + "dels": 0 + }, + "I": { + "adds": 0, + "dels": 5 + } }, "size": null }, "void append(int num)": { + "files": [], "history": { - "H": 12, - "I": 35 + "H": { + "adds": 12, + "dels": 0 + }, + "I": { + "adds": 26, + "dels": 9 + } }, "size": null }, "void insert(int num)": { + "files": [], "history": { - "H": 25, - "I": 25 + "H": { + "adds": 25, + "dels": 0 + }, + "I": { + "adds": 0, + "dels": 25 + } }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch/J.g.json b/test/test_analytics/baseline/feature_branch/J.g.json index 9937b4ebebf..a1b20e3761c 100644 --- a/test/test_analytics/baseline/feature_branch/J.g.json +++ b/test/test_analytics/baseline/feature_branch/J.g.json @@ -20,10 +20,6 @@ "addedBy": "G", "weight": null }, - "void add(int num)|->|void append(int num)": { - "addedBy": "H", - "weight": null - }, "void insert(int num)|->|void add(int num)": { "addedBy": "H", "weight": null @@ -35,75 +31,140 @@ }, "nodes": { "char *str_append(char *string, char *append)": { + "files": [], "history": { - "A": 7, - "B": 3 + "A": { + "adds": 7, + "dels": 0 + }, + "B": { + "adds": 0, + "dels": 3 + } }, "size": null }, "char *str_append_chr(char *string, char append)": { + "files": [], "history": { - "B": 3, - "C": 34 + "B": { + "adds": 3, + "dels": 0 + }, + "C": { + "adds": 30, + "dels": 4 + } }, "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { + "files": [], "history": { - "G": 26 + "G": { + "adds": 26, + "dels": 0 + } }, "size": null }, "int count()": { + "files": [], "history": { - "J": 12 + "J": { + "adds": 12, + "dels": 0 + } }, "size": null }, "int str_equals()": { + "files": [], "history": { - "C": 1 + "C": { + "adds": 1, + "dels": 0 + } }, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { + "files": [], "history": { - "B": 11, - "C": 1, - "G": 2 + "B": { + "adds": 11, + "dels": 0 + }, + "C": { + "adds": 0, + "dels": 1 + }, + "G": { + "adds": 1, + "dels": 1 + } }, "size": null }, "int str_len(char *string)": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null }, "void add(int num)": { + "files": [], "history": { - "H": 16, - "I": 5 + "H": { + "adds": 16, + "dels": 0 + }, + "I": { + "adds": 0, + "dels": 5 + } }, "size": null }, "void append(int num)": { + "files": [], "history": { - "H": 12, - "I": 35 + "H": { + "adds": 12, + "dels": 0 + }, + "I": { + "adds": 26, + "dels": 9 + } }, "size": null }, "void display(struct node *r)": { + "files": [], "history": { - "J": 14 + "J": { + "adds": 14, + "dels": 0 + } }, "size": null }, "void insert(int num)": { + "files": [], "history": { - "H": 25, - "I": 25 + "H": { + "adds": 25, + "dels": 0 + }, + "I": { + "adds": 0, + "dels": 25 + } }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch/K.g.json b/test/test_analytics/baseline/feature_branch/K.g.json index b3718df38be..a863b368ffd 100644 --- a/test/test_analytics/baseline/feature_branch/K.g.json +++ b/test/test_analytics/baseline/feature_branch/K.g.json @@ -28,10 +28,6 @@ "addedBy": "F", "weight": null }, - "void add(int num)|->|void append(int num)": { - "addedBy": "H", - "weight": null - }, "void insert(int num)|->|void add(int num)": { "addedBy": "H", "weight": null @@ -43,76 +39,144 @@ }, "nodes": { "char *str_append(char *string, char *append)": { + "files": [], "history": { - "A": 7, - "B": 3 + "A": { + "adds": 7, + "dels": 0 + }, + "B": { + "adds": 0, + "dels": 3 + } }, "size": null }, "char *str_append_chr(char *string, char append)": { + "files": [], "history": { - "B": 3, - "C": 34 + "B": { + "adds": 3, + "dels": 0 + }, + "C": { + "adds": 30, + "dels": 4 + } }, "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { + "files": [], "history": { - "G": 26 + "G": { + "adds": 26, + "dels": 0 + } }, "size": null }, "int count()": { + "files": [], "history": { - "J": 12 + "J": { + "adds": 12, + "dels": 0 + } }, "size": null }, "int str_equals()": { + "files": [], "history": { - "C": 1 + "C": { + "adds": 1, + "dels": 0 + } }, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { + "files": [], "history": { - "B": 11, - "C": 1, - "G": 2 + "B": { + "adds": 11, + "dels": 0 + }, + "C": { + "adds": 0, + "dels": 1 + }, + "G": { + "adds": 1, + "dels": 1 + } }, "size": null }, "int str_len(char *string)": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null }, "void add(int num)": { + "files": [], "history": { - "H": 16, - "I": 5 + "H": { + "adds": 16, + "dels": 0 + }, + "I": { + "adds": 0, + "dels": 5 + } }, "size": null }, "void append(int num)": { + "files": [], "history": { - "H": 12, - "I": 35 + "H": { + "adds": 12, + "dels": 0 + }, + "I": { + "adds": 26, + "dels": 9 + } }, "size": null }, "void display(struct node *r)": { + "files": [], "history": { - "J": 14, - "K": 5 + "J": { + "adds": 14, + "dels": 0 + }, + "K": { + "adds": 0, + "dels": 5 + } }, "size": null }, "void insert(int num)": { + "files": [], "history": { - "H": 25, - "I": 25 + "H": { + "adds": 25, + "dels": 0 + }, + "I": { + "adds": 0, + "dels": 25 + } }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch_first_parent/A.g.json b/test/test_analytics/baseline/feature_branch_first_parent/A.g.json index 7833183bead..ddabd6b9c37 100644 --- a/test/test_analytics/baseline/feature_branch_first_parent/A.g.json +++ b/test/test_analytics/baseline/feature_branch_first_parent/A.g.json @@ -2,14 +2,22 @@ "edges": {}, "nodes": { "char *str_append(char *string, char *append)": { + "files": [], "history": { - "A": 7 + "A": { + "adds": 7, + "dels": 0 + } }, "size": null }, "int str_len(char *string)": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch_first_parent/B.g.json b/test/test_analytics/baseline/feature_branch_first_parent/B.g.json index d6849e2667e..91024320909 100644 --- a/test/test_analytics/baseline/feature_branch_first_parent/B.g.json +++ b/test/test_analytics/baseline/feature_branch_first_parent/B.g.json @@ -2,27 +2,46 @@ "edges": {}, "nodes": { "char *str_append(char *string, char *append)": { + "files": [], "history": { - "A": 7, - "B": 3 + "A": { + "adds": 7, + "dels": 0 + }, + "B": { + "adds": 0, + "dels": 3 + } }, "size": null }, "char *str_append_chr(char *string, char append)": { + "files": [], "history": { - "B": 3 + "B": { + "adds": 3, + "dels": 0 + } }, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { + "files": [], "history": { - "B": 11 + "B": { + "adds": 11, + "dels": 0 + } }, "size": null }, "int str_len(char *string)": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch_first_parent/C.g.json b/test/test_analytics/baseline/feature_branch_first_parent/C.g.json index 73c976c5550..75e33885dc3 100644 --- a/test/test_analytics/baseline/feature_branch_first_parent/C.g.json +++ b/test/test_analytics/baseline/feature_branch_first_parent/C.g.json @@ -11,35 +11,64 @@ }, "nodes": { "char *str_append(char *string, char *append)": { + "files": [], "history": { - "A": 7, - "B": 3 + "A": { + "adds": 7, + "dels": 0 + }, + "B": { + "adds": 0, + "dels": 3 + } }, "size": null }, "char *str_append_chr(char *string, char append)": { + "files": [], "history": { - "B": 3, - "C": 34 + "B": { + "adds": 3, + "dels": 0 + }, + "C": { + "adds": 30, + "dels": 4 + } }, "size": null }, "int str_equals()": { + "files": [], "history": { - "C": 1 + "C": { + "adds": 1, + "dels": 0 + } }, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { + "files": [], "history": { - "B": 11, - "C": 1 + "B": { + "adds": 11, + "dels": 0 + }, + "C": { + "adds": 0, + "dels": 1 + } }, "size": null }, "int str_len(char *string)": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch_first_parent/D.g.json b/test/test_analytics/baseline/feature_branch_first_parent/D.g.json index 8fa297dd82e..1835ec023d1 100644 --- a/test/test_analytics/baseline/feature_branch_first_parent/D.g.json +++ b/test/test_analytics/baseline/feature_branch_first_parent/D.g.json @@ -23,41 +23,74 @@ }, "nodes": { "char *str_append(char *string, char *append)": { + "files": [], "history": { - "A": 7, - "B": 3 + "A": { + "adds": 7, + "dels": 0 + }, + "B": { + "adds": 0, + "dels": 3 + } }, "size": null }, "char *str_append_chr(char *string, char append)": { + "files": [], "history": { - "B": 3, - "C": 34 + "B": { + "adds": 3, + "dels": 0 + }, + "C": { + "adds": 30, + "dels": 4 + } }, "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { + "files": [], "history": { - "D": 26 + "D": { + "adds": 26, + "dels": 0 + } }, "size": null }, "int str_equals()": { + "files": [], "history": { - "C": 1 + "C": { + "adds": 1, + "dels": 0 + } }, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { + "files": [], "history": { - "B": 11, - "C": 1 + "B": { + "adds": 11, + "dels": 0 + }, + "C": { + "adds": 0, + "dels": 1 + } }, "size": null }, "int str_len(char *string)": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch_first_parent/E.g.json b/test/test_analytics/baseline/feature_branch_first_parent/E.g.json index 5a43da6ea80..5d0ebdb01d6 100644 --- a/test/test_analytics/baseline/feature_branch_first_parent/E.g.json +++ b/test/test_analytics/baseline/feature_branch_first_parent/E.g.json @@ -23,53 +23,94 @@ }, "nodes": { "char *str_append(char *string, char *append)": { + "files": [], "history": { - "A": 7, - "B": 3 + "A": { + "adds": 7, + "dels": 0 + }, + "B": { + "adds": 0, + "dels": 3 + } }, "size": null }, "char *str_append_chr(char *string, char append)": { + "files": [], "history": { - "B": 3, - "C": 34 + "B": { + "adds": 3, + "dels": 0 + }, + "C": { + "adds": 30, + "dels": 4 + } }, "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { + "files": [], "history": { - "D": 26 + "D": { + "adds": 26, + "dels": 0 + } }, "size": null }, "int str_equals()": { + "files": [], "history": { - "C": 1 + "C": { + "adds": 1, + "dels": 0 + } }, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { + "files": [], "history": { - "B": 11, - "C": 1 + "B": { + "adds": 11, + "dels": 0 + }, + "C": { + "adds": 0, + "dels": 1 + } }, "size": null }, "int str_len(char *string)": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null }, "void add(int num)": { + "files": [], "history": { - "E": 11 + "E": { + "adds": 11, + "dels": 0 + } }, "size": null }, "void append(int num)": { + "files": [], "history": { - "E": 29 + "E": { + "adds": 29, + "dels": 0 + } }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch_first_parent/F.g.json b/test/test_analytics/baseline/feature_branch_first_parent/F.g.json index 547dea94605..18cafb83055 100644 --- a/test/test_analytics/baseline/feature_branch_first_parent/F.g.json +++ b/test/test_analytics/baseline/feature_branch_first_parent/F.g.json @@ -23,65 +23,114 @@ }, "nodes": { "char *str_append(char *string, char *append)": { + "files": [], "history": { - "A": 7, - "B": 3 + "A": { + "adds": 7, + "dels": 0 + }, + "B": { + "adds": 0, + "dels": 3 + } }, "size": null }, "char *str_append_chr(char *string, char append)": { + "files": [], "history": { - "B": 3, - "C": 34 + "B": { + "adds": 3, + "dels": 0 + }, + "C": { + "adds": 30, + "dels": 4 + } }, "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { + "files": [], "history": { - "D": 26 + "D": { + "adds": 26, + "dels": 0 + } }, "size": null }, "int count()": { + "files": [], "history": { - "F": 12 + "F": { + "adds": 12, + "dels": 0 + } }, "size": null }, "int str_equals()": { + "files": [], "history": { - "C": 1 + "C": { + "adds": 1, + "dels": 0 + } }, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { + "files": [], "history": { - "B": 11, - "C": 1 + "B": { + "adds": 11, + "dels": 0 + }, + "C": { + "adds": 0, + "dels": 1 + } }, "size": null }, "int str_len(char *string)": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null }, "void add(int num)": { + "files": [], "history": { - "E": 11 + "E": { + "adds": 11, + "dels": 0 + } }, "size": null }, "void append(int num)": { + "files": [], "history": { - "E": 29 + "E": { + "adds": 29, + "dels": 0 + } }, "size": null }, "void display(struct node *r)": { + "files": [], "history": { - "F": 14 + "F": { + "adds": 14, + "dels": 0 + } }, "size": null } diff --git a/test/test_analytics/baseline/feature_branch_first_parent/K.g.json b/test/test_analytics/baseline/feature_branch_first_parent/K.g.json index a4c6c6aebbb..c47b146aec0 100644 --- a/test/test_analytics/baseline/feature_branch_first_parent/K.g.json +++ b/test/test_analytics/baseline/feature_branch_first_parent/K.g.json @@ -23,66 +23,118 @@ }, "nodes": { "char *str_append(char *string, char *append)": { + "files": [], "history": { - "A": 7, - "B": 3 + "A": { + "adds": 7, + "dels": 0 + }, + "B": { + "adds": 0, + "dels": 3 + } }, "size": null }, "char *str_append_chr(char *string, char append)": { + "files": [], "history": { - "B": 3, - "C": 34 + "B": { + "adds": 3, + "dels": 0 + }, + "C": { + "adds": 30, + "dels": 4 + } }, "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { + "files": [], "history": { - "D": 26 + "D": { + "adds": 26, + "dels": 0 + } }, "size": null }, "int count()": { + "files": [], "history": { - "F": 12 + "F": { + "adds": 12, + "dels": 0 + } }, "size": null }, "int str_equals()": { + "files": [], "history": { - "C": 1 + "C": { + "adds": 1, + "dels": 0 + } }, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { + "files": [], "history": { - "B": 11, - "C": 1 + "B": { + "adds": 11, + "dels": 0 + }, + "C": { + "adds": 0, + "dels": 1 + } }, "size": null }, "int str_len(char *string)": { + "files": [], "history": { - "A": 6 + "A": { + "adds": 6, + "dels": 0 + } }, "size": null }, "void add(int num)": { + "files": [], "history": { - "E": 11 + "E": { + "adds": 11, + "dels": 0 + } }, "size": null }, "void append(int num)": { + "files": [], "history": { - "E": 29 + "E": { + "adds": 29, + "dels": 0 + } }, "size": null }, "void display(struct node *r)": { + "files": [], "history": { - "F": 14, - "K": 5 + "F": { + "adds": 14, + "dels": 0 + }, + "K": { + "adds": 0, + "dels": 5 + } }, "size": null } From f0ba65081d2b0ea89c609e3875c284702a7b188e Mon Sep 17 00:00:00 2001 From: xinyan Date: Sun, 3 Mar 2019 22:21:35 +0800 Subject: [PATCH 54/84] Mark all the scopes on the line for change lines. --- persper/analytics/lsp_graph_server/__init__.py | 8 ++++---- .../analytics/lsp_graph_server/callgraph/builder.py | 13 +++++++++++++ 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/persper/analytics/lsp_graph_server/__init__.py b/persper/analytics/lsp_graph_server/__init__.py index d5076dd9441..fad937738d7 100644 --- a/persper/analytics/lsp_graph_server/__init__.py +++ b/persper/analytics/lsp_graph_server/__init__.py @@ -112,8 +112,8 @@ async def update_graph(self, old_filename: str, old_src: str, new_filename: str, # start, end are inclusive, 1-based for start, end in removed: for i in range(start - 1, end): - scope = oldDoc.scopeAt(i, 0) - if scope: + # print("Removed L", i + 1, list((s.name, s.startPos, s.endPos) for s in oldDoc.scopesOnLine(i))) + for scope in oldDoc.scopesOnLine(i): self._safeUpdateNodeHistory(scope.name, 0, 1) self._stashedPatches.append((oldPath, newPath, added, None)) @@ -188,8 +188,8 @@ async def end_commit(self, hexsha): assert added for start, end in added: for i in range(start - 1, end): - scope = newDoc.scopeAt(i, 0) - if scope: + # print("Added L", i + 1, list((s.name, s.startPos, s.endPos) for s in newDoc.scopesOnLine(i))) + for scope in newDoc.scopesOnLine(i): self._safeUpdateNodeHistory(scope.name, 1, 0) self._stashedPatches.clear() diff --git a/persper/analytics/lsp_graph_server/callgraph/builder.py b/persper/analytics/lsp_graph_server/callgraph/builder.py index f790870bdf8..b54bd46bec6 100644 --- a/persper/analytics/lsp_graph_server/callgraph/builder.py +++ b/persper/analytics/lsp_graph_server/callgraph/builder.py @@ -135,6 +135,19 @@ def tokenAt(self, line: int, character: int) -> CallGraphNode: return tokenM return None + def scopesOnLine(self, line: int) -> List[CallGraphScope]: + """ + Gets all the instances of CallGraphScope from the specified 0-base line number. + """ + scopes = [] + for scope in self._scopes: + # This is inefficient (yet correct) + if scope.startPos.line > line: + break + if scope.endPos.line >= line: + scopes.append(scope) + return scopes + def scopeAt(self, line: int, character: int) -> CallGraphScope: """ Gets the CallGraphScope from the specified 0-base line and character position From 77aed5e10e69993829ca7c5a3d8dec0e51c8fd81 Mon Sep 17 00:00:00 2001 From: xinyan Date: Sun, 3 Mar 2019 22:23:12 +0800 Subject: [PATCH 55/84] Ignore blank lines and space changes for diff. Explicitly restrict originCommit and terminalCommit to parse for Commit object. --- persper/analytics/analyzer2.py | 9 ++++----- persper/analytics/git_tools.py | 3 ++- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/persper/analytics/analyzer2.py b/persper/analytics/analyzer2.py index 13ab72f5f07..4db0b058e67 100644 --- a/persper/analytics/analyzer2.py +++ b/persper/analytics/analyzer2.py @@ -5,8 +5,7 @@ from git import Commit, Diff, DiffIndex, Repo -from persper.analytics.git_tools import (EMPTY_TREE_SHA, diff_with_commit, - get_contents) +from persper.analytics.git_tools import (diff_with_commit, get_contents) from persper.analytics.graph_server import CommitSeekingMode, GraphServer @@ -61,7 +60,7 @@ def originCommit(self): @originCommit.setter def originCommit(self, value: Union[Commit, str]): - self._originCommit = self._repo.rev_parse(value) if value else None + self._originCommit = self._repo.commit(value) if value else None @property def terminalCommit(self): @@ -72,7 +71,7 @@ def terminalCommit(self): @terminalCommit.setter def terminalCommit(self, value: Union[Commit, str]): - self._terminalCommit = self._repo.rev_parse(value) + self._terminalCommit = self._repo.commit(value) @property def firstParentOnly(self): @@ -98,7 +97,7 @@ def visitedCommits(self) -> Set[str]: return self._s_visitedCommits async def analyze(self, maxAnalyzedCommits=1000): - graphServerLastCommit = EMPTY_TREE_SHA + graphServerLastCommit:str = None commitSpec = self._terminalCommit if self._originCommit: commitSpec = self._originCommit.hexsha + ".." + self._terminalCommit.hexsha diff --git a/persper/analytics/git_tools.py b/persper/analytics/git_tools.py index 192c4345c56..93e1584a427 100644 --- a/persper/analytics/git_tools.py +++ b/persper/analytics/git_tools.py @@ -19,7 +19,8 @@ def diff_with_commit(repo: Repo, current_commit: Commit, base_commit_sha: str): base_commit = repo.tree(EMPTY_TREE_SHA) else: base_commit = repo.commit(base_commit_sha) - return base_commit.diff(current_commit, create_patch=True, indent_heuristic=True) + return base_commit.diff(current_commit, create_patch=True, indent_heuristic=True, + ignore_blank_lines=True, ignore_space_change=True) def initialize_repo(repo_path): From 645d8bf535cf3f95f09c3dbc77d7dc64631c2c14 Mon Sep 17 00:00:00 2001 From: xinyan Date: Sun, 3 Mar 2019 23:40:06 +0800 Subject: [PATCH 56/84] Add file names in graph nodes. --- persper/analytics/call_commit_graph.py | 4 +-- .../analytics/lsp_graph_server/__init__.py | 30 +++++++++++++------ 2 files changed, 23 insertions(+), 11 deletions(-) diff --git a/persper/analytics/call_commit_graph.py b/persper/analytics/call_commit_graph.py index 4f14eaa65b3..7adddc20d80 100644 --- a/persper/analytics/call_commit_graph.py +++ b/persper/analytics/call_commit_graph.py @@ -83,7 +83,7 @@ def _next_cindex(self): # TODO: remove the default value of files def add_node(self, node, files=[]): - self._digraph.add_node(node, size=None, history={}, files=files) + self._digraph.add_node(node, size=None, history={}, files=set(files)) # add_node must be called on source and target first def add_edge(self, source, target): @@ -110,7 +110,7 @@ def _get_node_history(self, node): return self._digraph.nodes[node]['history'] def update_node_files(self, node, new_files): - self._digraph.nodes[node]['files'] = new_files + self._digraph.nodes[node]['files'] = set(new_files) # TODO: provide other options for computing a node's size def _set_all_nodes_size(self, black_set=None): diff --git a/persper/analytics/lsp_graph_server/__init__.py b/persper/analytics/lsp_graph_server/__init__.py index fad937738d7..0545b419a66 100644 --- a/persper/analytics/lsp_graph_server/__init__.py +++ b/persper/analytics/lsp_graph_server/__init__.py @@ -67,6 +67,7 @@ def __init__(self, workspaceRoot: str, # [(oldPath, newPath, addedLines, removedLines), ...] # added/removedLines := [[startLine, modifiedLines], ...] self._stashedPatches: List[Tuple[PurePath, PurePath, List[Tuple[int, int]], List[Tuple[int, int]]]] = [] + self._symbolPaths = dict() self._commitSeekingMode: CommitSeekingMode = None def __getstate__(self): @@ -88,6 +89,7 @@ def start_commit(self, hexsha: str, seeking_mode: CommitSeekingMode, author_name self._commitSeekingMode = seeking_mode if seeking_mode != CommitSeekingMode.Rewind: self._ccgraph.add_commit(hexsha, author_name, author_email, commit_message) + self._symbolPaths.clear() async def update_graph(self, old_filename: str, old_src: str, new_filename: str, new_src: str, patch: bytes): oldPath = self._workspaceRoot.joinpath(old_filename).resolve() if old_filename else None @@ -114,7 +116,7 @@ async def update_graph(self, old_filename: str, old_src: str, new_filename: str, for i in range(start - 1, end): # print("Removed L", i + 1, list((s.name, s.startPos, s.endPos) for s in oldDoc.scopesOnLine(i))) for scope in oldDoc.scopesOnLine(i): - self._safeUpdateNodeHistory(scope.name, 0, 1) + self._safeUpdateNodeHistory(scope, 0, 1) self._stashedPatches.append((oldPath, newPath, added, None)) # perform file operations @@ -128,10 +130,10 @@ async def update_graph(self, old_filename: str, old_src: str, new_filename: str, self._invalidatedFiles.add(newPath) self._lastFileWrittenTime = datetime.now() - def _safeUpdateNodeHistory(self, name: str, addedLines: int, removedLines: int): - if name not in self._ccgraph.nodes(): - self._ccgraph.add_node(name) - self._ccgraph.update_node_history(name, addedLines, removedLines) + def _safeUpdateNodeHistory(self, scope: CallGraphScope, addedLines: int, removedLines: int): + if scope.name not in self._ccgraph.nodes(): + self._ccgraph.add_node(scope.name) + self._ccgraph.update_node_history(scope.name, addedLines, removedLines) def _markWholeDocumentAsChanged(self, doc: TokenizedDocument, markAsRemoved: bool): # markAsRemoved: True: document has been deleted @@ -142,7 +144,7 @@ def _markWholeDocumentAsChanged(self, doc: TokenizedDocument, markAsRemoved: boo while parentScopes and parentScopes[-1][0].endPos <= scope.startPos: # scope is out of parentScope, then the changed line count for parentScope is decided s, c = parentScopes.pop() - self._safeUpdateNodeHistory(s.name, c, 0) + self._safeUpdateNodeHistory(s, c, 0) thisScopeLines = scope.endPos.line - scope.startPos.line + 1 if parentScopes: # Subtract LOC from innermost scope to eliminate dups @@ -164,9 +166,9 @@ def _markWholeDocumentAsChanged(self, doc: TokenizedDocument, markAsRemoved: boo while parentScopes: s, c = parentScopes.pop() if markAsRemoved: - self._safeUpdateNodeHistory(s.name, 0, c) + self._safeUpdateNodeHistory(s, 0, c) else: - self._safeUpdateNodeHistory(s.name, c, 0) + self._safeUpdateNodeHistory(s, c, 0) async def end_commit(self, hexsha): # update vetices & edges @@ -190,7 +192,12 @@ async def end_commit(self, hexsha): for i in range(start - 1, end): # print("Added L", i + 1, list((s.name, s.startPos, s.endPos) for s in newDoc.scopesOnLine(i))) for scope in newDoc.scopesOnLine(i): - self._safeUpdateNodeHistory(scope.name, 1, 0) + self._safeUpdateNodeHistory(scope, 1, 0) + + # update node files + for nodeName, nodeFiles in self._symbolPaths.items(): + self._ccgraph.update_node_files(nodeName, [str(f.relative_to(self._workspaceRoot)).replace("\\", "/") for f in nodeFiles]) + self._stashedPatches.clear() # ensure the files in the next commit has a different timestamp from this commit. @@ -287,6 +294,11 @@ async def updateGraph(self): scope: CallGraphScope if scope.name not in self._ccgraph.nodes().data(): self._ccgraph.add_node(scope.name) + symbolPaths = self._symbolPaths.get(scope.name, None) + if not symbolPaths: + symbolPaths = set() + self._symbolPaths[scope.name] = symbolPaths + symbolPaths.add(scope.file) # update edges await self._callGraphManager.buildGraph(fileNames=affectedFiles) self._invalidatedFiles.clear() From bceae4dcf98fdae0837c5c9c83a95d89e8160ae9 Mon Sep 17 00:00:00 2001 From: xinyan Date: Sun, 3 Mar 2019 23:40:37 +0800 Subject: [PATCH 57/84] Update c/cpp baseline. --- .../baseline/cpp_test_repo/A.g.json | 193 +++++++++++++----- .../baseline/cpp_test_repo/B.g.json | 190 ++++++++++++----- .../baseline/cpp_test_repo/C.g.json | 187 ++++++++++++----- .../baseline/cpp_test_repo/D.g.json | 187 ++++++++++++----- .../baseline/feature_branch/A.g.json | 8 +- .../baseline/feature_branch/B.g.json | 16 +- .../baseline/feature_branch/C.g.json | 31 +-- .../baseline/feature_branch/D.g.json | 39 ++-- .../baseline/feature_branch/E.g.json | 51 ++--- .../baseline/feature_branch/F.g.json | 59 +++--- .../baseline/feature_branch/G.g.json | 39 ++-- .../baseline/feature_branch/H.g.json | 51 ++--- .../baseline/feature_branch/I.g.json | 51 ++--- .../baseline/feature_branch/J.g.json | 59 +++--- .../baseline/feature_branch/K.g.json | 59 +++--- .../feature_branch_first_parent/A.g.json | 8 +- .../feature_branch_first_parent/B.g.json | 16 +- .../feature_branch_first_parent/C.g.json | 31 +-- .../feature_branch_first_parent/D.g.json | 35 ++-- .../feature_branch_first_parent/E.g.json | 43 ++-- .../feature_branch_first_parent/F.g.json | 51 +++-- .../feature_branch_first_parent/K.g.json | 51 +++-- 22 files changed, 971 insertions(+), 484 deletions(-) diff --git a/test/test_analytics/baseline/cpp_test_repo/A.g.json b/test/test_analytics/baseline/cpp_test_repo/A.g.json index e91a766ab1f..3a503bd6209 100644 --- a/test/test_analytics/baseline/cpp_test_repo/A.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/A.g.json @@ -67,7 +67,9 @@ }, "nodes": { "ANSI_COLOR_BLUE": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -77,7 +79,9 @@ "size": null }, "ANSI_COLOR_BRIGHT": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -87,7 +91,9 @@ "size": null }, "ANSI_COLOR_CYAN": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -97,7 +103,9 @@ "size": null }, "ANSI_COLOR_GREEN": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -107,7 +115,9 @@ "size": null }, "ANSI_COLOR_MAGENTA": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -117,7 +127,9 @@ "size": null }, "ANSI_COLOR_RED": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -127,7 +139,9 @@ "size": null }, "ANSI_COLOR_RESET": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -137,7 +151,9 @@ "size": null }, "ANSI_COLOR_YELLOW": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -147,7 +163,9 @@ "size": null }, "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 5, @@ -157,7 +175,9 @@ "size": null }, "ConfigurationParser::ConfigurationParser(std::istream &inputStream)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 4, @@ -167,7 +187,10 @@ "size": null }, "RowReader &operator>>(RowReader &reader, bool &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp", + "TextFileParsers.h" + ], "history": { "A": { "adds": 25, @@ -177,7 +200,10 @@ "size": null }, "RowReader &operator>>(RowReader &reader, double &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp", + "TextFileParsers.h" + ], "history": { "A": { "adds": 7, @@ -187,7 +213,10 @@ "size": null }, "RowReader &operator>>(RowReader &reader, float &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp", + "TextFileParsers.h" + ], "history": { "A": { "adds": 7, @@ -197,7 +226,10 @@ "size": null }, "RowReader &operator>>(RowReader &reader, int &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp", + "TextFileParsers.h" + ], "history": { "A": { "adds": 7, @@ -207,7 +239,10 @@ "size": null }, "RowReader &operator>>(RowReader &reader, long &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp", + "TextFileParsers.h" + ], "history": { "A": { "adds": 7, @@ -217,7 +252,10 @@ "size": null }, "RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp", + "TextFileParsers.h" + ], "history": { "A": { "adds": 17, @@ -227,7 +265,9 @@ "size": null }, "TDest safe_cast(TSrc obj)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -237,7 +277,9 @@ "size": null }, "TEnum operator&(TEnum lhs, TEnum rhs)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -247,7 +289,9 @@ "size": null }, "TEnum operator|(TEnum lhs, TEnum rhs)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -257,7 +301,9 @@ "size": null }, "TStream &operator>>(TStream &s, RowReader &reader)": { - "files": [], + "files": [ + "TextFileParsers.h" + ], "history": { "A": { "adds": 1, @@ -267,7 +313,9 @@ "size": null }, "TStream OpenAndValidate(const TPath arg1)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -277,7 +325,9 @@ "size": null }, "_DECLARE_ENUM": { - "files": [], + "files": [ + "Utility.cpp" + ], "history": { "A": { "adds": 1, @@ -287,7 +337,9 @@ "size": null }, "_DECLARE_ENUM_DEFAULT": { - "files": [], + "files": [ + "Utility.cpp" + ], "history": { "A": { "adds": 1, @@ -297,7 +349,9 @@ "size": null }, "_RE_TRACE": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -307,7 +361,9 @@ "size": null }, "_RangeToEnumerable RangeToEnumerable(const std::pair range)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -317,7 +373,9 @@ "size": null }, "_SILENCE_STDEXT_ALLOCATORS_DEPRECATION_WARNING": { - "files": [], + "files": [ + "stdafx.h" + ], "history": { "A": { "adds": 1, @@ -327,7 +385,9 @@ "size": null }, "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 16, @@ -337,7 +397,10 @@ "size": null }, "bool Confirm(const std::string &prompt)": { - "files": [], + "files": [ + "Utility.cpp", + "Utility.h" + ], "history": { "A": { "adds": 18, @@ -347,7 +410,10 @@ "size": null }, "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { - "files": [], + "files": [ + "Utility.cpp", + "Utility.h" + ], "history": { "A": { "adds": 30, @@ -357,7 +423,9 @@ "size": null }, "bool dynamic_kind_of(const TSrc *obj)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -367,7 +435,9 @@ "size": null }, "bool pointer_kind_of(const std::shared_ptr obj)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -377,7 +447,9 @@ "size": null }, "const char *FriendlyNameOf()": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -387,7 +459,9 @@ "size": null }, "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -397,7 +471,9 @@ "size": null }, "const char *FriendlyNameOf(const std::type_index &type)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -407,7 +483,9 @@ "size": null }, "const char *FriendlyNameOf(const type_info &type)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -417,7 +495,9 @@ "size": null }, "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 12, @@ -427,7 +507,9 @@ "size": null }, "enum class StringComparison : int {}": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 6, @@ -437,7 +519,9 @@ "size": null }, "inline std::string to_string(const std::pair &value)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 4, @@ -447,7 +531,9 @@ "size": null }, "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 12, @@ -457,7 +543,9 @@ "size": null }, "int main(int argc, char *argv[])": { - "files": [], + "files": [ + "main.cpp" + ], "history": { "A": { "adds": 7, @@ -467,7 +555,9 @@ "size": null }, "std::shared_ptr safe_pointer_cast(const std::shared_ptr &obj)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -477,7 +567,9 @@ "size": null }, "std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 6, @@ -487,7 +579,9 @@ "size": null }, "std::string StreamStatusToString(const TStream &stream)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -497,7 +591,9 @@ "size": null }, "void ConfigurationParser::Load(std::istream &inputStream)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 23, @@ -507,7 +603,10 @@ "size": null }, "void ReportException(const std::exception &ex, int level)": { - "files": [], + "files": [ + "Utility.cpp", + "Utility.h" + ], "history": { "A": { "adds": 21, @@ -517,7 +616,9 @@ "size": null }, "void ValidateStream(const TStream &stream)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, diff --git a/test/test_analytics/baseline/cpp_test_repo/B.g.json b/test/test_analytics/baseline/cpp_test_repo/B.g.json index eb9000faf62..aebb4ca25f3 100644 --- a/test/test_analytics/baseline/cpp_test_repo/B.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/B.g.json @@ -79,7 +79,9 @@ }, "nodes": { "ANSI_COLOR_BLUE": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -89,7 +91,9 @@ "size": null }, "ANSI_COLOR_BRIGHT": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -99,7 +103,9 @@ "size": null }, "ANSI_COLOR_CYAN": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -109,7 +115,9 @@ "size": null }, "ANSI_COLOR_GREEN": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -119,7 +127,9 @@ "size": null }, "ANSI_COLOR_MAGENTA": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -129,7 +139,9 @@ "size": null }, "ANSI_COLOR_RED": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -139,7 +151,9 @@ "size": null }, "ANSI_COLOR_RESET": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -149,7 +163,9 @@ "size": null }, "ANSI_COLOR_YELLOW": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -159,7 +175,9 @@ "size": null }, "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 5, @@ -169,7 +187,9 @@ "size": null }, "ConfigurationParser::ConfigurationParser(std::istream &inputStream)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 4, @@ -179,7 +199,10 @@ "size": null }, "RowReader &operator>>(RowReader &reader, bool &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp", + "TextFileParsers.h" + ], "history": { "A": { "adds": 25, @@ -189,7 +212,10 @@ "size": null }, "RowReader &operator>>(RowReader &reader, double &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp", + "TextFileParsers.h" + ], "history": { "A": { "adds": 7, @@ -199,7 +225,10 @@ "size": null }, "RowReader &operator>>(RowReader &reader, float &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp", + "TextFileParsers.h" + ], "history": { "A": { "adds": 7, @@ -209,7 +238,10 @@ "size": null }, "RowReader &operator>>(RowReader &reader, int &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp", + "TextFileParsers.h" + ], "history": { "A": { "adds": 7, @@ -219,7 +251,10 @@ "size": null }, "RowReader &operator>>(RowReader &reader, long &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp", + "TextFileParsers.h" + ], "history": { "A": { "adds": 7, @@ -229,7 +264,10 @@ "size": null }, "RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp", + "TextFileParsers.h" + ], "history": { "A": { "adds": 17, @@ -239,7 +277,9 @@ "size": null }, "TDest safe_cast(TSrc obj)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -249,7 +289,9 @@ "size": null }, "TEnum operator&(TEnum lhs, TEnum rhs)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -259,7 +301,9 @@ "size": null }, "TEnum operator|(TEnum lhs, TEnum rhs)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -269,7 +313,9 @@ "size": null }, "TStream &operator>>(TStream &s, RowReader &reader)": { - "files": [], + "files": [ + "TextFileParsers.h" + ], "history": { "A": { "adds": 1, @@ -279,7 +325,9 @@ "size": null }, "TStream OpenAndValidate(const TPath arg1)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -289,7 +337,9 @@ "size": null }, "_DECLARE_ENUM": { - "files": [], + "files": [ + "Utility-1.cpp" + ], "history": { "A": { "adds": 1, @@ -299,7 +349,9 @@ "size": null }, "_DECLARE_ENUM_DEFAULT": { - "files": [], + "files": [ + "Utility-1.cpp" + ], "history": { "A": { "adds": 1, @@ -309,7 +361,9 @@ "size": null }, "_RE_TRACE": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -319,7 +373,9 @@ "size": null }, "_RangeToEnumerable RangeToEnumerable(const std::pair range)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -329,7 +385,9 @@ "size": null }, "_SILENCE_STDEXT_ALLOCATORS_DEPRECATION_WARNING": { - "files": [], + "files": [ + "stdafx.h" + ], "history": { "A": { "adds": 1, @@ -339,7 +397,9 @@ "size": null }, "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 16, @@ -349,7 +409,9 @@ "size": null }, "bool Confirm(const std::string &prompt)": { - "files": [], + "files": [ + "Utility-1.cpp" + ], "history": { "A": { "adds": 18, @@ -359,7 +421,9 @@ "size": null }, "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { - "files": [], + "files": [ + "Utility-1.cpp" + ], "history": { "A": { "adds": 30, @@ -369,7 +433,9 @@ "size": null }, "bool dynamic_kind_of(const TSrc *obj)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -379,7 +445,9 @@ "size": null }, "bool pointer_kind_of(const std::shared_ptr obj)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -389,7 +457,9 @@ "size": null }, "const char *FriendlyNameOf()": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -399,7 +469,9 @@ "size": null }, "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -409,7 +481,9 @@ "size": null }, "const char *FriendlyNameOf(const std::type_index &type)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -419,7 +493,9 @@ "size": null }, "const char *FriendlyNameOf(const type_info &type)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -429,7 +505,9 @@ "size": null }, "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 12, @@ -439,7 +517,9 @@ "size": null }, "enum class StringComparison : int {}": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 6, @@ -449,7 +529,9 @@ "size": null }, "inline std::string to_string(const std::pair &value)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 4, @@ -459,7 +541,9 @@ "size": null }, "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 12, @@ -469,7 +553,9 @@ "size": null }, "int main(int argc, char *argv[])": { - "files": [], + "files": [ + "main.cpp" + ], "history": { "A": { "adds": 7, @@ -483,7 +569,9 @@ "size": null }, "std::shared_ptr safe_pointer_cast(const std::shared_ptr &obj)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -493,7 +581,9 @@ "size": null }, "std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 6, @@ -503,7 +593,9 @@ "size": null }, "std::string StreamStatusToString(const TStream &stream)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -513,7 +605,9 @@ "size": null }, "void ConfigurationParser::Load(std::istream &inputStream)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 23, @@ -523,7 +617,9 @@ "size": null }, "void ReportException(const std::exception &ex, int level)": { - "files": [], + "files": [ + "Utility-1.cpp" + ], "history": { "A": { "adds": 21, @@ -533,7 +629,9 @@ "size": null }, "void ValidateStream(const TStream &stream)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, diff --git a/test/test_analytics/baseline/cpp_test_repo/C.g.json b/test/test_analytics/baseline/cpp_test_repo/C.g.json index 092ff75892a..2e596bc9ef6 100644 --- a/test/test_analytics/baseline/cpp_test_repo/C.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/C.g.json @@ -79,7 +79,9 @@ }, "nodes": { "ANSI_COLOR_BLUE": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -89,7 +91,9 @@ "size": null }, "ANSI_COLOR_BRIGHT": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -99,7 +103,9 @@ "size": null }, "ANSI_COLOR_CYAN": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -109,7 +115,9 @@ "size": null }, "ANSI_COLOR_GREEN": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -119,7 +127,9 @@ "size": null }, "ANSI_COLOR_MAGENTA": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -129,7 +139,9 @@ "size": null }, "ANSI_COLOR_RED": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -139,7 +151,9 @@ "size": null }, "ANSI_COLOR_RESET": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -149,7 +163,9 @@ "size": null }, "ANSI_COLOR_YELLOW": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -159,7 +175,9 @@ "size": null }, "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 5, @@ -169,7 +187,9 @@ "size": null }, "ConfigurationParser::ConfigurationParser(std::istream &inputStream)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 4, @@ -179,7 +199,9 @@ "size": null }, "RowReader &operator>>(RowReader &reader, bool &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 25, @@ -193,7 +215,9 @@ "size": null }, "RowReader &operator>>(RowReader &reader, double &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 7, @@ -203,7 +227,9 @@ "size": null }, "RowReader &operator>>(RowReader &reader, float &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 7, @@ -213,7 +239,9 @@ "size": null }, "RowReader &operator>>(RowReader &reader, int &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 7, @@ -223,7 +251,9 @@ "size": null }, "RowReader &operator>>(RowReader &reader, long &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 7, @@ -233,7 +263,9 @@ "size": null }, "RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 17, @@ -243,7 +275,9 @@ "size": null }, "TDest safe_cast(TSrc obj)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -253,7 +287,9 @@ "size": null }, "TEnum operator&(TEnum lhs, TEnum rhs)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -263,7 +299,9 @@ "size": null }, "TEnum operator|(TEnum lhs, TEnum rhs)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -273,7 +311,9 @@ "size": null }, "TStream &operator>>(TStream &s, RowReader &reader)": { - "files": [], + "files": [ + "TextFileParsers.h" + ], "history": { "A": { "adds": 1, @@ -283,7 +323,9 @@ "size": null }, "TStream OpenAndValidate(const TPath arg1)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -293,7 +335,9 @@ "size": null }, "_DECLARE_ENUM": { - "files": [], + "files": [ + "Utility.cpp" + ], "history": { "A": { "adds": 1, @@ -303,7 +347,9 @@ "size": null }, "_DECLARE_ENUM_DEFAULT": { - "files": [], + "files": [ + "Utility.cpp" + ], "history": { "A": { "adds": 1, @@ -313,7 +359,9 @@ "size": null }, "_RE_TRACE": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -323,7 +371,9 @@ "size": null }, "_RangeToEnumerable RangeToEnumerable(const std::pair range)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -333,7 +383,9 @@ "size": null }, "_SILENCE_STDEXT_ALLOCATORS_DEPRECATION_WARNING": { - "files": [], + "files": [ + "stdafx.h" + ], "history": { "A": { "adds": 1, @@ -343,7 +395,9 @@ "size": null }, "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 16, @@ -357,7 +411,10 @@ "size": null }, "bool Confirm(const std::string &prompt)": { - "files": [], + "files": [ + "Utility.cpp", + "Utility.h" + ], "history": { "A": { "adds": 18, @@ -367,7 +424,10 @@ "size": null }, "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { - "files": [], + "files": [ + "Utility.cpp", + "Utility.h" + ], "history": { "A": { "adds": 30, @@ -381,7 +441,9 @@ "size": null }, "bool dynamic_kind_of(const TSrc *obj)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -391,7 +453,9 @@ "size": null }, "bool pointer_kind_of(const std::shared_ptr obj)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -401,7 +465,9 @@ "size": null }, "const char *FriendlyNameOf()": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -411,7 +477,9 @@ "size": null }, "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -421,7 +489,9 @@ "size": null }, "const char *FriendlyNameOf(const std::type_index &type)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -431,7 +501,9 @@ "size": null }, "const char *FriendlyNameOf(const type_info &type)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -441,7 +513,9 @@ "size": null }, "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 12, @@ -451,7 +525,9 @@ "size": null }, "enum class StringComparison : int {}": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 6, @@ -465,7 +541,9 @@ "size": null }, "inline std::string to_string(const std::pair &value)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 4, @@ -475,7 +553,9 @@ "size": null }, "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 12, @@ -485,7 +565,9 @@ "size": null }, "int main(int argc, char *argv[])": { - "files": [], + "files": [ + "main.cpp" + ], "history": { "A": { "adds": 7, @@ -503,7 +585,9 @@ "size": null }, "std::shared_ptr safe_pointer_cast(const std::shared_ptr &obj)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -513,7 +597,9 @@ "size": null }, "std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 6, @@ -523,7 +609,9 @@ "size": null }, "std::string StreamStatusToString(const TStream &stream)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -533,7 +621,9 @@ "size": null }, "void ConfigurationParser::Load(std::istream &inputStream)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 23, @@ -543,7 +633,10 @@ "size": null }, "void ReportException(const std::exception &ex, int level)": { - "files": [], + "files": [ + "Utility.cpp", + "Utility.h" + ], "history": { "A": { "adds": 21, @@ -553,7 +646,9 @@ "size": null }, "void ValidateStream(const TStream &stream)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, diff --git a/test/test_analytics/baseline/cpp_test_repo/D.g.json b/test/test_analytics/baseline/cpp_test_repo/D.g.json index 283418a6ce5..02fe0fde492 100644 --- a/test/test_analytics/baseline/cpp_test_repo/D.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/D.g.json @@ -79,7 +79,9 @@ }, "nodes": { "ANSI_COLOR_BLUE": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -89,7 +91,9 @@ "size": null }, "ANSI_COLOR_BRIGHT": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -99,7 +103,9 @@ "size": null }, "ANSI_COLOR_CYAN": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -109,7 +115,9 @@ "size": null }, "ANSI_COLOR_GREEN": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -119,7 +127,9 @@ "size": null }, "ANSI_COLOR_MAGENTA": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -129,7 +139,9 @@ "size": null }, "ANSI_COLOR_RED": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -139,7 +151,9 @@ "size": null }, "ANSI_COLOR_RESET": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -149,7 +163,9 @@ "size": null }, "ANSI_COLOR_YELLOW": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -159,7 +175,9 @@ "size": null }, "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 5, @@ -169,7 +187,9 @@ "size": null }, "ConfigurationParser::ConfigurationParser(std::istream &inputStream)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 4, @@ -179,7 +199,9 @@ "size": null }, "RowReader &operator>>(RowReader &reader, bool &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 25, @@ -197,7 +219,9 @@ "size": null }, "RowReader &operator>>(RowReader &reader, double &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 7, @@ -207,7 +231,9 @@ "size": null }, "RowReader &operator>>(RowReader &reader, float &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 7, @@ -217,7 +243,9 @@ "size": null }, "RowReader &operator>>(RowReader &reader, int &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 7, @@ -227,7 +255,9 @@ "size": null }, "RowReader &operator>>(RowReader &reader, long &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 7, @@ -237,7 +267,9 @@ "size": null }, "RowReader &operator>>(RowReader &reader, std::string &rhs)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 17, @@ -247,7 +279,9 @@ "size": null }, "TDest safe_cast(TSrc obj)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -257,7 +291,9 @@ "size": null }, "TEnum operator&(TEnum lhs, TEnum rhs)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -267,7 +303,9 @@ "size": null }, "TEnum operator|(TEnum lhs, TEnum rhs)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -277,7 +315,9 @@ "size": null }, "TStream &operator>>(TStream &s, RowReader &reader)": { - "files": [], + "files": [ + "TextFileParsers.h" + ], "history": { "A": { "adds": 1, @@ -287,7 +327,9 @@ "size": null }, "TStream OpenAndValidate(const TPath arg1)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -297,7 +339,9 @@ "size": null }, "_DECLARE_ENUM": { - "files": [], + "files": [ + "Utility.cpp" + ], "history": { "A": { "adds": 1, @@ -307,7 +351,9 @@ "size": null }, "_DECLARE_ENUM_DEFAULT": { - "files": [], + "files": [ + "Utility.cpp" + ], "history": { "A": { "adds": 1, @@ -317,7 +363,9 @@ "size": null }, "_RE_TRACE": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -327,7 +375,9 @@ "size": null }, "_RangeToEnumerable RangeToEnumerable(const std::pair range)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -337,7 +387,9 @@ "size": null }, "_SILENCE_STDEXT_ALLOCATORS_DEPRECATION_WARNING": { - "files": [], + "files": [ + "stdafx.h" + ], "history": { "A": { "adds": 1, @@ -347,7 +399,9 @@ "size": null }, "bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 16, @@ -365,7 +419,10 @@ "size": null }, "bool Confirm(const std::string &prompt)": { - "files": [], + "files": [ + "Utility.cpp", + "Utility.h" + ], "history": { "A": { "adds": 18, @@ -375,7 +432,10 @@ "size": null }, "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { - "files": [], + "files": [ + "Utility.cpp", + "Utility.h" + ], "history": { "A": { "adds": 30, @@ -393,7 +453,9 @@ "size": null }, "bool dynamic_kind_of(const TSrc *obj)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -403,7 +465,9 @@ "size": null }, "bool pointer_kind_of(const std::shared_ptr obj)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -413,7 +477,9 @@ "size": null }, "const char *FriendlyNameOf()": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -423,7 +489,9 @@ "size": null }, "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -433,7 +501,9 @@ "size": null }, "const char *FriendlyNameOf(const std::type_index &type)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -443,7 +513,9 @@ "size": null }, "const char *FriendlyNameOf(const type_info &type)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -453,7 +525,9 @@ "size": null }, "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 12, @@ -463,7 +537,9 @@ "size": null }, "enum class StringComparison : int {}": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 6, @@ -481,7 +557,9 @@ "size": null }, "inline std::string to_string(const std::pair &value)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 4, @@ -491,7 +569,9 @@ "size": null }, "int ConfigurationParser::GetInt(const std::string &key, int defaultValue) const": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 12, @@ -501,7 +581,9 @@ "size": null }, "int main(int argc, char *argv[])": { - "files": [], + "files": [ + "main.cpp" + ], "history": { "A": { "adds": 7, @@ -523,7 +605,9 @@ "size": null }, "std::shared_ptr safe_pointer_cast(const std::shared_ptr &obj)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -533,7 +617,9 @@ "size": null }, "std::string ConfigurationParser::GetString(const std::string &key, const std::string &defaultValue) const": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 6, @@ -543,7 +629,9 @@ "size": null }, "std::string StreamStatusToString(const TStream &stream)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, @@ -553,7 +641,9 @@ "size": null }, "void ConfigurationParser::Load(std::istream &inputStream)": { - "files": [], + "files": [ + "TextFileParsers.cpp" + ], "history": { "A": { "adds": 23, @@ -563,7 +653,10 @@ "size": null }, "void ReportException(const std::exception &ex, int level)": { - "files": [], + "files": [ + "Utility.cpp", + "Utility.h" + ], "history": { "A": { "adds": 21, @@ -573,7 +666,9 @@ "size": null }, "void ValidateStream(const TStream &stream)": { - "files": [], + "files": [ + "Utility.h" + ], "history": { "A": { "adds": 1, diff --git a/test/test_analytics/baseline/feature_branch/A.g.json b/test/test_analytics/baseline/feature_branch/A.g.json index ddabd6b9c37..fd012a79514 100644 --- a/test/test_analytics/baseline/feature_branch/A.g.json +++ b/test/test_analytics/baseline/feature_branch/A.g.json @@ -2,7 +2,9 @@ "edges": {}, "nodes": { "char *str_append(char *string, char *append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 7, @@ -12,7 +14,9 @@ "size": null }, "int str_len(char *string)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 6, diff --git a/test/test_analytics/baseline/feature_branch/B.g.json b/test/test_analytics/baseline/feature_branch/B.g.json index 91024320909..a186e6d2226 100644 --- a/test/test_analytics/baseline/feature_branch/B.g.json +++ b/test/test_analytics/baseline/feature_branch/B.g.json @@ -2,7 +2,9 @@ "edges": {}, "nodes": { "char *str_append(char *string, char *append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 7, @@ -16,7 +18,9 @@ "size": null }, "char *str_append_chr(char *string, char append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 3, @@ -26,7 +30,9 @@ "size": null }, "int str_equals(char *equal1, char *eqaul2)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 11, @@ -36,7 +42,9 @@ "size": null }, "int str_len(char *string)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 6, diff --git a/test/test_analytics/baseline/feature_branch/C.g.json b/test/test_analytics/baseline/feature_branch/C.g.json index 75e33885dc3..4b791785f4e 100644 --- a/test/test_analytics/baseline/feature_branch/C.g.json +++ b/test/test_analytics/baseline/feature_branch/C.g.json @@ -11,7 +11,9 @@ }, "nodes": { "char *str_append(char *string, char *append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 7, @@ -25,7 +27,9 @@ "size": null }, "char *str_append_chr(char *string, char append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 3, @@ -39,31 +43,28 @@ "size": null }, "int str_equals()": { - "files": [], - "history": { - "C": { - "adds": 1, - "dels": 0 - } - }, + "files": [ + "main.c" + ], + "history": {}, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 11, "dels": 0 - }, - "C": { - "adds": 0, - "dels": 1 } }, "size": null }, "int str_len(char *string)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 6, diff --git a/test/test_analytics/baseline/feature_branch/D.g.json b/test/test_analytics/baseline/feature_branch/D.g.json index 3ab44c0690f..4b931b0a2c0 100644 --- a/test/test_analytics/baseline/feature_branch/D.g.json +++ b/test/test_analytics/baseline/feature_branch/D.g.json @@ -23,7 +23,9 @@ }, "nodes": { "char *str_append(char *string, char *append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 7, @@ -37,7 +39,9 @@ "size": null }, "char *str_append_chr(char *string, char append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 3, @@ -51,7 +55,9 @@ "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { - "files": [], + "files": [ + "feature-G.c" + ], "history": { "G": { "adds": 26, @@ -61,35 +67,28 @@ "size": null }, "int str_equals()": { - "files": [], - "history": { - "C": { - "adds": 1, - "dels": 0 - } - }, + "files": [ + "main.c" + ], + "history": {}, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 11, "dels": 0 - }, - "C": { - "adds": 0, - "dels": 1 - }, - "G": { - "adds": 1, - "dels": 1 } }, "size": null }, "int str_len(char *string)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 6, diff --git a/test/test_analytics/baseline/feature_branch/E.g.json b/test/test_analytics/baseline/feature_branch/E.g.json index a3d682842c0..414ed9fd459 100644 --- a/test/test_analytics/baseline/feature_branch/E.g.json +++ b/test/test_analytics/baseline/feature_branch/E.g.json @@ -31,7 +31,9 @@ }, "nodes": { "char *str_append(char *string, char *append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 7, @@ -45,7 +47,9 @@ "size": null }, "char *str_append_chr(char *string, char append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 3, @@ -59,7 +63,9 @@ "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { - "files": [], + "files": [ + "feature-G.c" + ], "history": { "G": { "adds": 26, @@ -69,35 +75,28 @@ "size": null }, "int str_equals()": { - "files": [], - "history": { - "C": { - "adds": 1, - "dels": 0 - } - }, + "files": [ + "main.c" + ], + "history": {}, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 11, "dels": 0 - }, - "C": { - "adds": 0, - "dels": 1 - }, - "G": { - "adds": 1, - "dels": 1 } }, "size": null }, "int str_len(char *string)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 6, @@ -107,7 +106,9 @@ "size": null }, "void add(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "H": { "adds": 16, @@ -121,7 +122,9 @@ "size": null }, "void append(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "H": { "adds": 12, @@ -135,7 +138,9 @@ "size": null }, "void insert(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "H": { "adds": 25, diff --git a/test/test_analytics/baseline/feature_branch/F.g.json b/test/test_analytics/baseline/feature_branch/F.g.json index e2f6a4e788f..31a14cda404 100644 --- a/test/test_analytics/baseline/feature_branch/F.g.json +++ b/test/test_analytics/baseline/feature_branch/F.g.json @@ -39,7 +39,9 @@ }, "nodes": { "char *str_append(char *string, char *append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 7, @@ -53,7 +55,9 @@ "size": null }, "char *str_append_chr(char *string, char append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 3, @@ -67,7 +71,9 @@ "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { - "files": [], + "files": [ + "feature-G.c" + ], "history": { "G": { "adds": 26, @@ -77,7 +83,9 @@ "size": null }, "int count()": { - "files": [], + "files": [ + "feature-J.c" + ], "history": { "J": { "adds": 12, @@ -87,35 +95,28 @@ "size": null }, "int str_equals()": { - "files": [], - "history": { - "C": { - "adds": 1, - "dels": 0 - } - }, + "files": [ + "main.c" + ], + "history": {}, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 11, "dels": 0 - }, - "C": { - "adds": 0, - "dels": 1 - }, - "G": { - "adds": 1, - "dels": 1 } }, "size": null }, "int str_len(char *string)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 6, @@ -125,7 +126,9 @@ "size": null }, "void add(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "H": { "adds": 16, @@ -139,7 +142,9 @@ "size": null }, "void append(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "H": { "adds": 12, @@ -153,7 +158,9 @@ "size": null }, "void display(struct node *r)": { - "files": [], + "files": [ + "feature-J.c" + ], "history": { "J": { "adds": 14, @@ -163,7 +170,9 @@ "size": null }, "void insert(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "H": { "adds": 25, diff --git a/test/test_analytics/baseline/feature_branch/G.g.json b/test/test_analytics/baseline/feature_branch/G.g.json index 4988009f0d2..9be01d4cc7f 100644 --- a/test/test_analytics/baseline/feature_branch/G.g.json +++ b/test/test_analytics/baseline/feature_branch/G.g.json @@ -23,7 +23,9 @@ }, "nodes": { "char *str_append(char *string, char *append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 7, @@ -37,7 +39,9 @@ "size": null }, "char *str_append_chr(char *string, char append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 3, @@ -51,7 +55,9 @@ "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { - "files": [], + "files": [ + "feature-G.c" + ], "history": { "G": { "adds": 26, @@ -61,35 +67,28 @@ "size": null }, "int str_equals()": { - "files": [], - "history": { - "C": { - "adds": 1, - "dels": 0 - } - }, + "files": [ + "main.c" + ], + "history": {}, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 11, "dels": 0 - }, - "C": { - "adds": 0, - "dels": 1 - }, - "G": { - "adds": 1, - "dels": 1 } }, "size": null }, "int str_len(char *string)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 6, diff --git a/test/test_analytics/baseline/feature_branch/H.g.json b/test/test_analytics/baseline/feature_branch/H.g.json index 2034c2f568b..8089ef9ccbf 100644 --- a/test/test_analytics/baseline/feature_branch/H.g.json +++ b/test/test_analytics/baseline/feature_branch/H.g.json @@ -31,7 +31,9 @@ }, "nodes": { "char *str_append(char *string, char *append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 7, @@ -45,7 +47,9 @@ "size": null }, "char *str_append_chr(char *string, char append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 3, @@ -59,7 +63,9 @@ "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { - "files": [], + "files": [ + "feature-G.c" + ], "history": { "G": { "adds": 26, @@ -69,35 +75,28 @@ "size": null }, "int str_equals()": { - "files": [], - "history": { - "C": { - "adds": 1, - "dels": 0 - } - }, + "files": [ + "main.c" + ], + "history": {}, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 11, "dels": 0 - }, - "C": { - "adds": 0, - "dels": 1 - }, - "G": { - "adds": 1, - "dels": 1 } }, "size": null }, "int str_len(char *string)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 6, @@ -107,7 +106,9 @@ "size": null }, "void add(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "H": { "adds": 16, @@ -117,7 +118,9 @@ "size": null }, "void append(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "H": { "adds": 12, @@ -127,7 +130,9 @@ "size": null }, "void insert(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "H": { "adds": 25, diff --git a/test/test_analytics/baseline/feature_branch/I.g.json b/test/test_analytics/baseline/feature_branch/I.g.json index a3d682842c0..414ed9fd459 100644 --- a/test/test_analytics/baseline/feature_branch/I.g.json +++ b/test/test_analytics/baseline/feature_branch/I.g.json @@ -31,7 +31,9 @@ }, "nodes": { "char *str_append(char *string, char *append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 7, @@ -45,7 +47,9 @@ "size": null }, "char *str_append_chr(char *string, char append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 3, @@ -59,7 +63,9 @@ "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { - "files": [], + "files": [ + "feature-G.c" + ], "history": { "G": { "adds": 26, @@ -69,35 +75,28 @@ "size": null }, "int str_equals()": { - "files": [], - "history": { - "C": { - "adds": 1, - "dels": 0 - } - }, + "files": [ + "main.c" + ], + "history": {}, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 11, "dels": 0 - }, - "C": { - "adds": 0, - "dels": 1 - }, - "G": { - "adds": 1, - "dels": 1 } }, "size": null }, "int str_len(char *string)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 6, @@ -107,7 +106,9 @@ "size": null }, "void add(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "H": { "adds": 16, @@ -121,7 +122,9 @@ "size": null }, "void append(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "H": { "adds": 12, @@ -135,7 +138,9 @@ "size": null }, "void insert(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "H": { "adds": 25, diff --git a/test/test_analytics/baseline/feature_branch/J.g.json b/test/test_analytics/baseline/feature_branch/J.g.json index a1b20e3761c..7606ab45a52 100644 --- a/test/test_analytics/baseline/feature_branch/J.g.json +++ b/test/test_analytics/baseline/feature_branch/J.g.json @@ -31,7 +31,9 @@ }, "nodes": { "char *str_append(char *string, char *append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 7, @@ -45,7 +47,9 @@ "size": null }, "char *str_append_chr(char *string, char append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 3, @@ -59,7 +63,9 @@ "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { - "files": [], + "files": [ + "feature-G.c" + ], "history": { "G": { "adds": 26, @@ -69,7 +75,9 @@ "size": null }, "int count()": { - "files": [], + "files": [ + "feature-J.c" + ], "history": { "J": { "adds": 12, @@ -79,35 +87,28 @@ "size": null }, "int str_equals()": { - "files": [], - "history": { - "C": { - "adds": 1, - "dels": 0 - } - }, + "files": [ + "main.c" + ], + "history": {}, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 11, "dels": 0 - }, - "C": { - "adds": 0, - "dels": 1 - }, - "G": { - "adds": 1, - "dels": 1 } }, "size": null }, "int str_len(char *string)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 6, @@ -117,7 +118,9 @@ "size": null }, "void add(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "H": { "adds": 16, @@ -131,7 +134,9 @@ "size": null }, "void append(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "H": { "adds": 12, @@ -145,7 +150,9 @@ "size": null }, "void display(struct node *r)": { - "files": [], + "files": [ + "feature-J.c" + ], "history": { "J": { "adds": 14, @@ -155,7 +162,9 @@ "size": null }, "void insert(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "H": { "adds": 25, diff --git a/test/test_analytics/baseline/feature_branch/K.g.json b/test/test_analytics/baseline/feature_branch/K.g.json index a863b368ffd..555bdff3d1e 100644 --- a/test/test_analytics/baseline/feature_branch/K.g.json +++ b/test/test_analytics/baseline/feature_branch/K.g.json @@ -39,7 +39,9 @@ }, "nodes": { "char *str_append(char *string, char *append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 7, @@ -53,7 +55,9 @@ "size": null }, "char *str_append_chr(char *string, char append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 3, @@ -67,7 +71,9 @@ "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { - "files": [], + "files": [ + "feature-G.c" + ], "history": { "G": { "adds": 26, @@ -77,7 +83,9 @@ "size": null }, "int count()": { - "files": [], + "files": [ + "feature-K.c" + ], "history": { "J": { "adds": 12, @@ -87,35 +95,28 @@ "size": null }, "int str_equals()": { - "files": [], - "history": { - "C": { - "adds": 1, - "dels": 0 - } - }, + "files": [ + "main.c" + ], + "history": {}, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 11, "dels": 0 - }, - "C": { - "adds": 0, - "dels": 1 - }, - "G": { - "adds": 1, - "dels": 1 } }, "size": null }, "int str_len(char *string)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 6, @@ -125,7 +126,9 @@ "size": null }, "void add(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "H": { "adds": 16, @@ -139,7 +142,9 @@ "size": null }, "void append(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "H": { "adds": 12, @@ -153,7 +158,9 @@ "size": null }, "void display(struct node *r)": { - "files": [], + "files": [ + "feature-K.c" + ], "history": { "J": { "adds": 14, @@ -167,7 +174,9 @@ "size": null }, "void insert(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "H": { "adds": 25, diff --git a/test/test_analytics/baseline/feature_branch_first_parent/A.g.json b/test/test_analytics/baseline/feature_branch_first_parent/A.g.json index ddabd6b9c37..fd012a79514 100644 --- a/test/test_analytics/baseline/feature_branch_first_parent/A.g.json +++ b/test/test_analytics/baseline/feature_branch_first_parent/A.g.json @@ -2,7 +2,9 @@ "edges": {}, "nodes": { "char *str_append(char *string, char *append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 7, @@ -12,7 +14,9 @@ "size": null }, "int str_len(char *string)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 6, diff --git a/test/test_analytics/baseline/feature_branch_first_parent/B.g.json b/test/test_analytics/baseline/feature_branch_first_parent/B.g.json index 91024320909..a186e6d2226 100644 --- a/test/test_analytics/baseline/feature_branch_first_parent/B.g.json +++ b/test/test_analytics/baseline/feature_branch_first_parent/B.g.json @@ -2,7 +2,9 @@ "edges": {}, "nodes": { "char *str_append(char *string, char *append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 7, @@ -16,7 +18,9 @@ "size": null }, "char *str_append_chr(char *string, char append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 3, @@ -26,7 +30,9 @@ "size": null }, "int str_equals(char *equal1, char *eqaul2)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 11, @@ -36,7 +42,9 @@ "size": null }, "int str_len(char *string)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 6, diff --git a/test/test_analytics/baseline/feature_branch_first_parent/C.g.json b/test/test_analytics/baseline/feature_branch_first_parent/C.g.json index 75e33885dc3..4b791785f4e 100644 --- a/test/test_analytics/baseline/feature_branch_first_parent/C.g.json +++ b/test/test_analytics/baseline/feature_branch_first_parent/C.g.json @@ -11,7 +11,9 @@ }, "nodes": { "char *str_append(char *string, char *append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 7, @@ -25,7 +27,9 @@ "size": null }, "char *str_append_chr(char *string, char append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 3, @@ -39,31 +43,28 @@ "size": null }, "int str_equals()": { - "files": [], - "history": { - "C": { - "adds": 1, - "dels": 0 - } - }, + "files": [ + "main.c" + ], + "history": {}, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 11, "dels": 0 - }, - "C": { - "adds": 0, - "dels": 1 } }, "size": null }, "int str_len(char *string)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 6, diff --git a/test/test_analytics/baseline/feature_branch_first_parent/D.g.json b/test/test_analytics/baseline/feature_branch_first_parent/D.g.json index 1835ec023d1..f289e724860 100644 --- a/test/test_analytics/baseline/feature_branch_first_parent/D.g.json +++ b/test/test_analytics/baseline/feature_branch_first_parent/D.g.json @@ -23,7 +23,9 @@ }, "nodes": { "char *str_append(char *string, char *append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 7, @@ -37,7 +39,9 @@ "size": null }, "char *str_append_chr(char *string, char append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 3, @@ -51,7 +55,9 @@ "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { - "files": [], + "files": [ + "feature-G.c" + ], "history": { "D": { "adds": 26, @@ -61,31 +67,28 @@ "size": null }, "int str_equals()": { - "files": [], - "history": { - "C": { - "adds": 1, - "dels": 0 - } - }, + "files": [ + "main.c" + ], + "history": {}, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 11, "dels": 0 - }, - "C": { - "adds": 0, - "dels": 1 } }, "size": null }, "int str_len(char *string)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 6, diff --git a/test/test_analytics/baseline/feature_branch_first_parent/E.g.json b/test/test_analytics/baseline/feature_branch_first_parent/E.g.json index 5d0ebdb01d6..8d6ec786302 100644 --- a/test/test_analytics/baseline/feature_branch_first_parent/E.g.json +++ b/test/test_analytics/baseline/feature_branch_first_parent/E.g.json @@ -23,7 +23,9 @@ }, "nodes": { "char *str_append(char *string, char *append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 7, @@ -37,7 +39,9 @@ "size": null }, "char *str_append_chr(char *string, char append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 3, @@ -51,7 +55,9 @@ "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { - "files": [], + "files": [ + "feature-G.c" + ], "history": { "D": { "adds": 26, @@ -61,31 +67,28 @@ "size": null }, "int str_equals()": { - "files": [], - "history": { - "C": { - "adds": 1, - "dels": 0 - } - }, + "files": [ + "main.c" + ], + "history": {}, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 11, "dels": 0 - }, - "C": { - "adds": 0, - "dels": 1 } }, "size": null }, "int str_len(char *string)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 6, @@ -95,7 +98,9 @@ "size": null }, "void add(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "E": { "adds": 11, @@ -105,7 +110,9 @@ "size": null }, "void append(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "E": { "adds": 29, diff --git a/test/test_analytics/baseline/feature_branch_first_parent/F.g.json b/test/test_analytics/baseline/feature_branch_first_parent/F.g.json index 18cafb83055..da60e2d5a1b 100644 --- a/test/test_analytics/baseline/feature_branch_first_parent/F.g.json +++ b/test/test_analytics/baseline/feature_branch_first_parent/F.g.json @@ -23,7 +23,9 @@ }, "nodes": { "char *str_append(char *string, char *append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 7, @@ -37,7 +39,9 @@ "size": null }, "char *str_append_chr(char *string, char append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 3, @@ -51,7 +55,9 @@ "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { - "files": [], + "files": [ + "feature-G.c" + ], "history": { "D": { "adds": 26, @@ -61,7 +67,9 @@ "size": null }, "int count()": { - "files": [], + "files": [ + "feature-J.c" + ], "history": { "F": { "adds": 12, @@ -71,31 +79,28 @@ "size": null }, "int str_equals()": { - "files": [], - "history": { - "C": { - "adds": 1, - "dels": 0 - } - }, + "files": [ + "main.c" + ], + "history": {}, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 11, "dels": 0 - }, - "C": { - "adds": 0, - "dels": 1 } }, "size": null }, "int str_len(char *string)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 6, @@ -105,7 +110,9 @@ "size": null }, "void add(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "E": { "adds": 11, @@ -115,7 +122,9 @@ "size": null }, "void append(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "E": { "adds": 29, @@ -125,7 +134,9 @@ "size": null }, "void display(struct node *r)": { - "files": [], + "files": [ + "feature-J.c" + ], "history": { "F": { "adds": 14, diff --git a/test/test_analytics/baseline/feature_branch_first_parent/K.g.json b/test/test_analytics/baseline/feature_branch_first_parent/K.g.json index c47b146aec0..1c8c8e2e3a9 100644 --- a/test/test_analytics/baseline/feature_branch_first_parent/K.g.json +++ b/test/test_analytics/baseline/feature_branch_first_parent/K.g.json @@ -23,7 +23,9 @@ }, "nodes": { "char *str_append(char *string, char *append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 7, @@ -37,7 +39,9 @@ "size": null }, "char *str_append_chr(char *string, char append)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 3, @@ -51,7 +55,9 @@ "size": null }, "char *str_replace(char *search, char *replace, char *subject)": { - "files": [], + "files": [ + "feature-G.c" + ], "history": { "D": { "adds": 26, @@ -61,7 +67,9 @@ "size": null }, "int count()": { - "files": [], + "files": [ + "feature-K.c" + ], "history": { "F": { "adds": 12, @@ -71,31 +79,28 @@ "size": null }, "int str_equals()": { - "files": [], - "history": { - "C": { - "adds": 1, - "dels": 0 - } - }, + "files": [ + "main.c" + ], + "history": {}, "size": null }, "int str_equals(char *equal1, char *eqaul2)": { - "files": [], + "files": [ + "main.c" + ], "history": { "B": { "adds": 11, "dels": 0 - }, - "C": { - "adds": 0, - "dels": 1 } }, "size": null }, "int str_len(char *string)": { - "files": [], + "files": [ + "main.c" + ], "history": { "A": { "adds": 6, @@ -105,7 +110,9 @@ "size": null }, "void add(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "E": { "adds": 11, @@ -115,7 +122,9 @@ "size": null }, "void append(int num)": { - "files": [], + "files": [ + "feature-H.c" + ], "history": { "E": { "adds": 29, @@ -125,7 +134,9 @@ "size": null }, "void display(struct node *r)": { - "files": [], + "files": [ + "feature-K.c" + ], "history": { "F": { "adds": 14, From b8f69ad77b37bf389c5a704b7d8e7b404d855594 Mon Sep 17 00:00:00 2001 From: xinyan Date: Mon, 4 Mar 2019 00:46:22 +0800 Subject: [PATCH 58/84] Update graph_baseline for files attribute. --- test/test_analytics/utility/graph_baseline.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/test/test_analytics/utility/graph_baseline.py b/test/test_analytics/utility/graph_baseline.py index 4cb5b7b8191..8323f5455a5 100644 --- a/test/test_analytics/utility/graph_baseline.py +++ b/test/test_analytics/utility/graph_baseline.py @@ -24,8 +24,14 @@ def formatEdgeId(u: str, v: str): def graphToDict(ccg: CallCommitGraph): + nodes = ccg.nodes(data=True) + for name, attr in nodes: + if "files" in attr: + files = list(attr["files"]) + files.sort() + attr["files"] = files result = { - "nodes": dict(ccg.nodes(data=True)), + "nodes": dict(nodes), "edges": dict(((formatEdgeId(u, v), data) for (u, v, data) in ccg.edges(data=True))) } return result @@ -34,8 +40,8 @@ def graphToDict(ccg: CallCommitGraph): def fixGraphDict(graphData: dict): if "nodes" in graphData: for id, attr in graphData["nodes"].items(): - if "history" in attr: - attr["history"] = dict(attr["history"].items()) + if "files" in attr: + attr["files"] = set(attr["files"]) return graphData From c19bdf5efad42903569e0cc6e98b23bfa4a40a7d Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Sun, 3 Mar 2019 22:19:10 -0800 Subject: [PATCH 59/84] Fix error in README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 9e9c9b6caad..9664c714be2 100644 --- a/README.md +++ b/README.md @@ -41,7 +41,7 @@ git --version Add the following line to your `~/.bashrc` file. ``` -export PATH=$PATH:/path/to/dir +export PYTHONPATH=$PYTHONPATH:/path/to/dir ``` To update your path for the remainder of the session. From 2b12d630bfa1052c9d2ea5573883cc9130b08a18 Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Mon, 4 Mar 2019 10:33:09 -0800 Subject: [PATCH 60/84] Remove deprecated tests --- test/test_processor/A/A | 1 - test/test_processor/B/B | 1 - test/test_processor/C/C | 1 - test/test_processor/D/D | 1 - test/test_processor/E/E | 1 - test/test_processor/F/F | 1 - test/test_processor/G/G | 1 - test/test_processor/H/H | 1 - test/test_processor/I/I | 1 - test/test_processor/J/J | 1 - test/test_processor/K/K | 1 - test/test_processor/L/L | 1 - test/test_processor/cg.dot | 6 ------ 13 files changed, 18 deletions(-) delete mode 100644 test/test_processor/A/A delete mode 100644 test/test_processor/B/B delete mode 100644 test/test_processor/C/C delete mode 100644 test/test_processor/D/D delete mode 100644 test/test_processor/E/E delete mode 100644 test/test_processor/F/F delete mode 100644 test/test_processor/G/G delete mode 100644 test/test_processor/H/H delete mode 100644 test/test_processor/I/I delete mode 100644 test/test_processor/J/J delete mode 100644 test/test_processor/K/K delete mode 100644 test/test_processor/L/L delete mode 100644 test/test_processor/cg.dot diff --git a/test/test_processor/A/A b/test/test_processor/A/A deleted file mode 100644 index f70f10e4db1..00000000000 --- a/test/test_processor/A/A +++ /dev/null @@ -1 +0,0 @@ -A diff --git a/test/test_processor/B/B b/test/test_processor/B/B deleted file mode 100644 index 223b7836fb1..00000000000 --- a/test/test_processor/B/B +++ /dev/null @@ -1 +0,0 @@ -B diff --git a/test/test_processor/C/C b/test/test_processor/C/C deleted file mode 100644 index 3cc58df8375..00000000000 --- a/test/test_processor/C/C +++ /dev/null @@ -1 +0,0 @@ -C diff --git a/test/test_processor/D/D b/test/test_processor/D/D deleted file mode 100644 index 17848105018..00000000000 --- a/test/test_processor/D/D +++ /dev/null @@ -1 +0,0 @@ -D diff --git a/test/test_processor/E/E b/test/test_processor/E/E deleted file mode 100644 index 1c507261389..00000000000 --- a/test/test_processor/E/E +++ /dev/null @@ -1 +0,0 @@ -E diff --git a/test/test_processor/F/F b/test/test_processor/F/F deleted file mode 100644 index cf84443e49e..00000000000 --- a/test/test_processor/F/F +++ /dev/null @@ -1 +0,0 @@ -F diff --git a/test/test_processor/G/G b/test/test_processor/G/G deleted file mode 100644 index fd792352985..00000000000 --- a/test/test_processor/G/G +++ /dev/null @@ -1 +0,0 @@ -G diff --git a/test/test_processor/H/H b/test/test_processor/H/H deleted file mode 100644 index a9edc74f384..00000000000 --- a/test/test_processor/H/H +++ /dev/null @@ -1 +0,0 @@ -H diff --git a/test/test_processor/I/I b/test/test_processor/I/I deleted file mode 100644 index db1a5a09f7d..00000000000 --- a/test/test_processor/I/I +++ /dev/null @@ -1 +0,0 @@ -I diff --git a/test/test_processor/J/J b/test/test_processor/J/J deleted file mode 100644 index c8f145fff1f..00000000000 --- a/test/test_processor/J/J +++ /dev/null @@ -1 +0,0 @@ -J diff --git a/test/test_processor/K/K b/test/test_processor/K/K deleted file mode 100644 index 7692e7d41c0..00000000000 --- a/test/test_processor/K/K +++ /dev/null @@ -1 +0,0 @@ -K diff --git a/test/test_processor/L/L b/test/test_processor/L/L deleted file mode 100644 index 05bef1a55a5..00000000000 --- a/test/test_processor/L/L +++ /dev/null @@ -1 +0,0 @@ -L diff --git a/test/test_processor/cg.dot b/test/test_processor/cg.dot deleted file mode 100644 index 693bc891aff..00000000000 --- a/test/test_processor/cg.dot +++ /dev/null @@ -1,6 +0,0 @@ -digraph test_processor { - A -> B -> C -> D; - B -> E -> F -> G -> D; - A -> H -> I -> J -> G ; - A -> K -> L -> J ; -} \ No newline at end of file From 7e0bd3d62bd332948c150eede783b24da641b048 Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Mon, 4 Mar 2019 11:11:07 -0800 Subject: [PATCH 61/84] Fix import error --- persper/analytics/call_graph/c.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/persper/analytics/call_graph/c.py b/persper/analytics/call_graph/c.py index ab4499ef13e..ca413e3deda 100644 --- a/persper/analytics/call_graph/c.py +++ b/persper/analytics/call_graph/c.py @@ -1,5 +1,5 @@ import networkx as nx -from persper.graphs.call_graph.utils import remove_edges_of_node, ns, line_attr +from persper.analytics.call_graph.utils import remove_edges_of_node, ns, line_attr class NotFunctionCallError(Exception): From 6d0da87b3c451c2cce7b04f9eb055e9c63a9aa3c Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Mon, 4 Mar 2019 11:45:26 -0800 Subject: [PATCH 62/84] Change files from set back to list --- persper/analytics/call_commit_graph.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/persper/analytics/call_commit_graph.py b/persper/analytics/call_commit_graph.py index 7adddc20d80..4f14eaa65b3 100644 --- a/persper/analytics/call_commit_graph.py +++ b/persper/analytics/call_commit_graph.py @@ -83,7 +83,7 @@ def _next_cindex(self): # TODO: remove the default value of files def add_node(self, node, files=[]): - self._digraph.add_node(node, size=None, history={}, files=set(files)) + self._digraph.add_node(node, size=None, history={}, files=files) # add_node must be called on source and target first def add_edge(self, source, target): @@ -110,7 +110,7 @@ def _get_node_history(self, node): return self._digraph.nodes[node]['history'] def update_node_files(self, node, new_files): - self._digraph.nodes[node]['files'] = set(new_files) + self._digraph.nodes[node]['files'] = new_files # TODO: provide other options for computing a node's size def _set_all_nodes_size(self, black_set=None): From 54f342b07c66ef8cb04c937549f3de5428922a91 Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Mon, 4 Mar 2019 11:46:44 -0800 Subject: [PATCH 63/84] Pass --filename to srcml to set root.attrib['filename'] correctly --- persper/analytics/c.py | 6 +++--- persper/analytics/srcml.py | 27 ++++++++++++++++----------- 2 files changed, 19 insertions(+), 14 deletions(-) diff --git a/persper/analytics/c.py b/persper/analytics/c.py index e3d26b5f0ec..e6897bafa78 100644 --- a/persper/analytics/c.py +++ b/persper/analytics/c.py @@ -1,6 +1,6 @@ import re from persper.analytics.inverse_diff import inverse_diff -from persper.analytics.srcml import transform_src_to_tree +from persper.analytics.srcml import src_to_tree from persper.analytics.call_graph.c import update_graph, get_func_ranges_c from persper.analytics.detect_change import get_changed_functions from persper.analytics.patch_parser import PatchParser @@ -55,12 +55,12 @@ def update_graph(self, old_filename, old_src, new_filename, new_src, patch): # Parse source codes into ASTs if old_src: - old_ast = transform_src_to_tree(old_src) + old_ast = src_to_tree(old_filename, old_src) if old_ast is None: return -1 if new_src: - new_ast = transform_src_to_tree(new_src) + new_ast = src_to_tree(new_filename, new_src) if new_ast is None: return -1 ast_list = [new_ast] diff --git a/persper/analytics/srcml.py b/persper/analytics/srcml.py index 7b90d9090c5..f64cce5c95c 100755 --- a/persper/analytics/srcml.py +++ b/persper/analytics/srcml.py @@ -50,33 +50,38 @@ def transform_dir(input_dir, output_dir, extensions=('.c', '.h')): print("Tranformation completed, {} processed.".format(counter)) -def transform_src_to_tree(source_code, ext='.c'): - root = None +def src_to_tree(filename, src): + """ + Assume src is UTF-8 encoded. + the temp file needs to have the right ext so that srcml can open it + """ + _, ext = os.path.splitext(filename) + if ext == '': + print("ERROR: src_to_tree can't extract file extension.") + return None + try: - f = tempfile.NamedTemporaryFile(mode='wb+', delete=False) - f.write(source_code.encode('utf-8', 'replace')) + f = tempfile.NamedTemporaryFile(mode='wb+', suffix=ext, delete=False) + f.write(src.encode('utf-8', 'replace')) f.close() except UnicodeEncodeError as e: - print("UnicodeEncodeError in transform_src_to_tree!") + print("ERROR: src_to_tree encounters UnicodeEncodeError.") if not f.closed: f.close() os.remove(f.name) return None - # rename so that srcml can open it - new_fname = f.name + ext - os.rename(f.name, new_fname) xml_path = f.name + ".xml" - cmd = 'srcml {} --position -o {}'.format(new_fname, xml_path) + cmd = 'srcml {} --position --filename {} -o {}'.format(f.name, '\"/' + filename + '\"', xml_path) subprocess.call(cmd, shell=True) try: root = etree.parse(xml_path).getroot() except: - print("Unable to parse xml file!") + print("ERROR: src_to_tree unable to parse xml file.") finally: if not f.closed: f.close() - os.remove(new_fname) + os.remove(f.name) if os.path.exists(xml_path): os.remove(xml_path) From e24cd5c165d67d516f6b70b9d6873b79ae699700 Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Mon, 4 Mar 2019 11:47:19 -0800 Subject: [PATCH 64/84] Add tests for src_to_tree --- test/test_analytics/test_srcml.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 test/test_analytics/test_srcml.py diff --git a/test/test_analytics/test_srcml.py b/test/test_analytics/test_srcml.py new file mode 100644 index 00000000000..ac12016d9c7 --- /dev/null +++ b/test/test_analytics/test_srcml.py @@ -0,0 +1,12 @@ +import os +from persper.analytics.srcml import src_to_tree +from persper.util.path import root_path + + +def test_src_to_tree(): + filename = 'example.cc' + full_path = os.path.join(root_path, 'test/test_analytics', filename) + with open(full_path, 'r') as f: + src = f.read() + root = src_to_tree(filename, src) + assert(root.attrib['filename'] == filename) From 361377c70902283d16d834b288d02a3ff53e29e2 Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Mon, 4 Mar 2019 12:48:23 -0800 Subject: [PATCH 65/84] Get started with documentation with sphinx --- Pipfile | 1 + Pipfile.lock | 151 +++++++++++++++++---- docs/Makefile | 19 +++ docs/conf.py | 179 +++++++++++++++++++++++++ docs/index.rst | 18 +++ docs/make.bat | 35 +++++ persper/analytics/call_commit_graph.py | 18 ++- 7 files changed, 394 insertions(+), 27 deletions(-) create mode 100644 docs/Makefile create mode 100644 docs/conf.py create mode 100644 docs/index.rst create mode 100644 docs/make.bat diff --git a/Pipfile b/Pipfile index f394eb45afc..fa79d4c2768 100644 --- a/Pipfile +++ b/Pipfile @@ -22,6 +22,7 @@ python-jsonrpc-server = "==0.0.2" pytest-asyncio = "*" aenum = "*" pytest-cov = "*" +sphinx = "*" [dev-packages] diff --git a/Pipfile.lock b/Pipfile.lock index ed93767c09c..e77ee5639da 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "5c5de74c0a4afd69763b8b6896cd965013bbbda3f78de7544a2df4e2a9bcae74" + "sha256": "95e712caa2eee3aa33a2a0cfbdbe2805543febfd2429fbded1fea743ecb65cc2" }, "pipfile-spec": 6, "requires": { @@ -25,6 +25,13 @@ "index": "pypi", "version": "==2.1.2" }, + "alabaster": { + "hashes": [ + "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359", + "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02" + ], + "version": "==0.7.12" + }, "antlr4-python3-runtime": { "hashes": [ "sha256:168cdcec8fb9152e84a87ca6fd261b3d54c8f6358f42ab3b813b14a7193bb50b" @@ -54,6 +61,13 @@ ], "version": "==18.2.0" }, + "babel": { + "hashes": [ + "sha256:6778d85147d5d85345c14a26aada5e478ab04e39b078b0745ee6870c2b5cf669", + "sha256:8cba50f48c529ca3fa18cf81fa9403be176d374ac4d60738b839122dfaaa3d23" + ], + "version": "==2.6.0" + }, "backcall": { "hashes": [ "sha256:38ecd85be2c1e78f77fd91700c76e14667dc21e2713b63876c0eb901196e01e4", @@ -125,6 +139,14 @@ ], "version": "==4.3.2" }, + "docutils": { + "hashes": [ + "sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6", + "sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274", + "sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6" + ], + "version": "==0.14" + }, "et-xmlfile": { "hashes": [ "sha256:614d9722d572f6246302c4491846d2c393c199cfa4edc9af593437691683335b" @@ -159,6 +181,13 @@ ], "version": "==2.8" }, + "imagesize": { + "hashes": [ + "sha256:3f349de3eb99145973fefb7dbe38554414e5c30abd0c8e4b970a7c9d09f3a1d8", + "sha256:f3832918bc3c66617f92e35f5d70729187676313caa60c187eb0f28b8fe5e3b5" + ], + "version": "==1.1.0" + }, "ipykernel": { "hashes": [ "sha256:0aeb7ec277ac42cc2b59ae3d08b10909b2ec161dc6908096210527162b53675d", @@ -195,6 +224,13 @@ ], "version": "==0.13.3" }, + "jinja2": { + "hashes": [ + "sha256:74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd", + "sha256:f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4" + ], + "version": "==2.10" + }, "jupyter-client": { "hashes": [ "sha256:b5f9cb06105c1d2d30719db5ffb3ea67da60919fb68deaefa583deccd8813551", @@ -274,24 +310,57 @@ "index": "pypi", "version": "==4.3.2" }, + "markupsafe": { + "hashes": [ + "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", + "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", + "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", + "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", + "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", + "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", + "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", + "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", + "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", + "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", + "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", + "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", + "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", + "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", + "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", + "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", + "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", + "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", + "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", + "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", + "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", + "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", + "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", + "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", + "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", + "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", + "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", + "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7" + ], + "version": "==1.1.1" + }, "matplotlib": { "hashes": [ - "sha256:16aa61846efddf91df623bbb4598e63be1068a6b6a2e6361cc802b41c7a286eb", - "sha256:1975b71a33ac986bb39b6d5cfbc15c7b1f218f1134efb4eb3881839d6ae69984", - "sha256:2b222744bd54781e6cc0b717fa35a54e5f176ba2ced337f27c5b435b334ef854", - "sha256:317643c0e88fad55414347216362b2e229c130edd5655fea5f8159a803098468", - "sha256:4269ce3d1b897d46fc3cc2273a0cc2a730345bb47e4456af662e6fca85c89dd7", - "sha256:65214fd668975077cdf8d408ccf2b2d6bdf73b4e6895a79f8e99ce4f0b43fcdb", - "sha256:74bc213ab8a92d86a0b304d9359d1e1d14168d4c6121b83862c9d8a88b89a738", - "sha256:88949be0db54755995dfb0210d0099a8712a3c696c860441971354c3debfc4af", - "sha256:8e1223d868be89423ec95ada5f37aa408ee64fe76ccb8e4d5f533699ba4c0e4a", - "sha256:9fa00f2d7a552a95fa6016e498fdeb6d74df537853dda79a9055c53dfc8b6e1a", - "sha256:c27fd46cab905097ba4bc28d5ba5289930f313fb1970c9d41092c9975b80e9b4", - "sha256:c94b792af431f6adb6859eb218137acd9a35f4f7442cea57e4a59c54751c36af", - "sha256:f4c12a01eb2dc16693887a874ba948b18c92f425c4d329639ece6d3bb8e631bb" + "sha256:1ae6549976b6ceb6ee426272a28c0fc9715b3e3669694d560c8f661c5b39e2c5", + "sha256:4d4250bf508dd07cca3b43888097f873cadb66eec6ac63dbbfb798798ec07af2", + "sha256:53af2e01d7f1700ed2b64a9091bc865360c9c4032f625451c4589a826854c787", + "sha256:63e498067d32d627111cd1162cae1621f1221f9d4c6a9745dd7233f29de581b6", + "sha256:7169a34971e398dd58e87e173f97366fd88a3fa80852704530433eb224a8ca57", + "sha256:91c54d6bb9eeaaff965656c5ea6cbdcbf780bad8462ac99b30b451548194746f", + "sha256:aeef177647bb3fccfe09065481989d7dfc5ac59e9367d6a00a3481062cf651e4", + "sha256:cf8ae10559a78aee0409ede1e9d4fda03895433eeafe609dd9ed67e45f552db0", + "sha256:d51d0889d1c4d51c51a9822265c0494ea3e70a52bdd88358e0863daca46fa23a", + "sha256:de5ccd3500247f85fe4f9fad90f80a8bd397e4f110a4c33fabf95f07403e8372", + "sha256:e1d33589e32f482d0a7d1957bf473d43341115d40d33f578dad44432e47df7b7", + "sha256:e8d1939262aa6b36d0c51f50a50a43a04b9618d20db31e6c0192b1463067aeef", + "sha256:e918d51b1fda82a65fdf52d2f3914b2246481cc2a9cd10e223e6be6078916ff3" ], "index": "pypi", - "version": "==3.0.2" + "version": "==3.0.3" }, "more-itertools": { "hashes": [ @@ -351,6 +420,13 @@ "index": "pypi", "version": "==2.6.0" }, + "packaging": { + "hashes": [ + "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af", + "sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3" + ], + "version": "==19.0" + }, "parso": { "hashes": [ "sha256:4580328ae3f548b358f4901e38c0578229186835f0fa0846e47369796dd5bcc9", @@ -462,6 +538,13 @@ "index": "pypi", "version": "==0.0.2" }, + "pytz": { + "hashes": [ + "sha256:32b0891edff07e28efe91284ed9c31e123d84bea3fd98e1f72be2508f43ef8d9", + "sha256:d5f05e487007e29e03409f9398d074e158d920d36eb82eaf66fb1136b0c5374c" + ], + "version": "==2018.9" + }, "pyzmq": { "hashes": [ "sha256:07a03450418694fb07e76a0191b6bc9f411afc8e364ca2062edcf28bb0e51c63", @@ -595,17 +678,39 @@ ], "version": "==2.0.5" }, + "snowballstemmer": { + "hashes": [ + "sha256:919f26a68b2c17a7634da993d91339e288964f93c274f1343e3bbbe2096e1128", + "sha256:9f3bcd3c401c3e862ec0ebe6d2c069ebc012ce142cce209c098ccb5b09136e89" + ], + "version": "==1.2.1" + }, + "sphinx": { + "hashes": [ + "sha256:b53904fa7cb4b06a39409a492b949193a1b68cc7241a1a8ce9974f86f0d24287", + "sha256:c1c00fc4f6e8b101a0d037065043460dffc2d507257f2f11acaed71fd2b0c83c" + ], + "index": "pypi", + "version": "==1.8.4" + }, + "sphinxcontrib-websupport": { + "hashes": [ + "sha256:68ca7ff70785cbe1e7bccc71a48b5b6d965d79ca50629606c7861a21b206d9dd", + "sha256:9de47f375baf1ea07cdb3436ff39d7a9c76042c10a769c52353ec46e4e8fc3b9" + ], + "version": "==1.1.0" + }, "tornado": { "hashes": [ - "sha256:0662d28b1ca9f67108c7e3b77afabfb9c7e87bde174fbda78186ecedc2499a9d", - "sha256:4e5158d97583502a7e2739951553cbd88a72076f152b4b11b64b9a10c4c49409", - "sha256:732e836008c708de2e89a31cb2fa6c0e5a70cb60492bee6f1ea1047500feaf7f", - "sha256:8154ec22c450df4e06b35f131adc4f2f3a12ec85981a203301d310abf580500f", - "sha256:8e9d728c4579682e837c92fdd98036bd5cdefa1da2aaf6acf26947e6dd0c01c5", - "sha256:d4b3e5329f572f055b587efc57d29bd051589fb5a43ec8898c77a47ec2fa2bbb", - "sha256:e5f2585afccbff22390cddac29849df463b252b711aa2ce7c5f3f342a5b3b444" + "sha256:3f8db1394416371fb3a7a56062c77366cf10f8c4d81626df0135a2b2a7e26d2f", + "sha256:434a2821caa09cf96ffccbfaf101b8c27c4f9eee8f9e5ba933c6c4dc4c7eabbe", + "sha256:50e3fe9265938d36dd1744e395669555caf161ca5bf46c56d612866cbcda4869", + "sha256:5e9565c293e904c5642752b2ae0cefe932cd201e82a81cd6ee3480b0448d0250", + "sha256:981dfdac0308eca069447e4b4fbb029cc91627f58dd7a5f35a97d6245ab5824d", + "sha256:d675dd93cdad3545e619ce609451a7f77851bd5c3f34c58a1e01f5fd119f40f9", + "sha256:e90c5fa57a84b1cbd36c656a35a3dd07d130bcca8a2b5ee8cb0f16479bb6e4e3" ], - "version": "==5.1.1" + "version": "==6.0" }, "traitlets": { "hashes": [ diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 00000000000..298ea9e213e --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,19 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 00000000000..e2996fad074 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- +# +# Configuration file for the Sphinx documentation builder. +# +# This file does only contain a selection of the most common options. For a +# full list see the documentation: +# http://www.sphinx-doc.org/en/master/config + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys +sys.path.insert(0, os.path.abspath('..')) + + +# -- Project information ----------------------------------------------------- + +project = 'Persper Code Analytics' +copyright = '2019, Persper Foundation' +author = 'Persper Foundation' + +# The short X.Y version +version = '' +# The full version, including alpha/beta/rc tags +release = '' + + +# -- General configuration --------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.coverage', + 'sphinx.ext.napoleon', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = None + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'alabaster' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# The default sidebars (for documents that don't match any pattern) are +# defined by theme itself. Builtin themes are using these templates by +# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', +# 'searchbox.html']``. +# +# html_sidebars = {} + + +# -- Options for HTMLHelp output --------------------------------------------- + +# Output file base name for HTML help builder. +htmlhelp_basename = 'PersperCodeAnalyticsdoc' + + +# -- Options for LaTeX output ------------------------------------------------ + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'PersperCodeAnalytics.tex', 'Persper Code Analytics Documentation', + 'Persper Foundation', 'manual'), +] + + +# -- Options for manual page output ------------------------------------------ + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'perspercodeanalytics', 'Persper Code Analytics Documentation', + [author], 1) +] + + +# -- Options for Texinfo output ---------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'PersperCodeAnalytics', 'Persper Code Analytics Documentation', + author, 'PersperCodeAnalytics', 'One line description of project.', + 'Miscellaneous'), +] + + +# -- Options for Epub output ------------------------------------------------- + +# Bibliographic Dublin Core info. +epub_title = project + +# The unique identifier of the text. This can be a ISBN number +# or the project homepage. +# +# epub_identifier = '' + +# A unique identification for the text. +# +# epub_uid = '' + +# A list of files that should not be packed into the epub file. +epub_exclude_files = ['search.html'] + + +# -- Extension configuration ------------------------------------------------- diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 00000000000..1802d695d21 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,18 @@ +Welcome to Persper Code Analytics's documentation! +================================================== + +.. automodule:: persper.analytics.call_commit_graph + :members: + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 00000000000..27f573b87af --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% + +:end +popd diff --git a/persper/analytics/call_commit_graph.py b/persper/analytics/call_commit_graph.py index 4f14eaa65b3..05af148aec3 100644 --- a/persper/analytics/call_commit_graph.py +++ b/persper/analytics/call_commit_graph.py @@ -1,3 +1,8 @@ +""" +call_commit_graph.py +==================================== +CallCommitGraph stores all relevant analysis results +""" import networkx as nx from networkx.readwrite import json_graph from persper.analytics.devrank import devrank @@ -29,6 +34,10 @@ def fromHexsha(ordinal: int, hexsha: str, message: str): class CallCommitGraph: + """ + The key data structure that stores all functions' call relationships + and edit histories across commits. + """ def __init__(self, node_link_data=None, commit_id_generator=CommitIdGenerators.fromHexsha): if node_link_data: @@ -39,6 +48,7 @@ def __init__(self, node_link_data=None, commit_id_generator=CommitIdGenerators.f self._current_commit_id = None def reset(self): + """Reset all internal states""" self._digraph = self._new_graph() def _new_graph(self): @@ -46,21 +56,21 @@ def _new_graph(self): with appropriate arguments""" return nx.DiGraph(commits={}) - # Read-only access def nodes(self, data=False): + """Provide read-only access for nodes""" return self._digraph.nodes(data=data) - # Read-only access def edges(self, data=False): + """Provide read-only access for edges""" return self._digraph.edges(data=data) - # Read-only access def commits(self): + """Provide read-only access for commits""" # https://networkx.github.io/documentation/stable/tutorial.html#graph-attributes return self._digraph.graph['commits'] - # Read-only access def __contains__(self, node): + """Implement membership check""" return node in self._digraph def add_commit(self, hexsha, author_name, author_email, message): From 73fae2ff24217ad28adbb199b9e9b46a32912192 Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Mon, 4 Mar 2019 15:18:33 -0800 Subject: [PATCH 66/84] Fix a renaming error in tests --- test/test_analytics/test_detect_change.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/test_analytics/test_detect_change.py b/test/test_analytics/test_detect_change.py index 3d50dbbb429..6409232d327 100644 --- a/test/test_analytics/test_detect_change.py +++ b/test/test_analytics/test_detect_change.py @@ -2,7 +2,7 @@ from persper.analytics.patch_parser import PatchParser from persper.analytics.detect_change import get_changed_functions from persper.analytics.call_graph.cpp import get_func_ranges_cpp -from persper.analytics.srcml import transform_src_to_tree +from persper.analytics.srcml import src_to_tree dir_path = os.path.dirname(os.path.abspath(__file__)) @@ -40,7 +40,7 @@ def test_detect_change(): assert(parsing_result == parsing_truth) with open(os.path.join(dir_path, 'example.cc'), 'r') as f: - root = transform_src_to_tree(f.read(), ext='.cc') + root = src_to_tree('example.cc', f.read()) func_ranges_result = get_func_ranges_cpp(root) assert(func_ranges_result == func_ranges_truth) From 5647bc5eb8ebc310826916bb540d1dfed1d211f8 Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Mon, 4 Mar 2019 22:20:24 -0800 Subject: [PATCH 67/84] Update test for black list after using ignore space option --- test/test_analytics/test_call_commit_graph.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/test/test_analytics/test_call_commit_graph.py b/test/test_analytics/test_call_commit_graph.py index e9e6b758396..c61654b03fb 100644 --- a/test/test_analytics/test_call_commit_graph.py +++ b/test/test_analytics/test_call_commit_graph.py @@ -102,6 +102,7 @@ async def test_black_set(): Its parent: https://github.com/bitcoin/bitcoin/commit/5b721607b1057df4dfe97f80d235ed372312f398 Its grandparent: https://github.com/bitcoin/bitcoin/commit/2ef9cfa5b81877b1023f2fcb82f5a638b1eb013c Its great grandparent: https://github.com/bitcoin/bitcoin/commit/7d7797b141dbd4ed9db1dda94684beb3395c2534 + Its great great grandparent: https://github.com/bitcoin/bitcoin/commit/401926283a200994ecd7df8eae8ced8e0b067c46 """ repo_path = os.path.join(root_path, 'repos/bitcoin') bitcoin_url = 'https://github.com/bitcoin/bitcoin' @@ -109,13 +110,14 @@ async def test_black_set(): Repo.clone_from(bitcoin_url, repo_path) az = Analyzer(repo_path, CPPGraphServer(CPP_FILENAME_REGEXES)) crlf_sha = '0a61b0df1224a5470bcddab302bc199ca5a9e356' - ggparent_sha = '7d7797b141dbd4ed9db1dda94684beb3395c2534' - rev = ggparent_sha + '..' + crlf_sha + parent_sha = '5b721607b1057df4dfe97f80d235ed372312f398' + gggparent_sha = '401926283a200994ecd7df8eae8ced8e0b067c46' + rev = gggparent_sha + '..' + crlf_sha await az.analyze(rev=rev) ccgraph = az.get_graph() devdict = ccgraph.commit_devranks(0.85) - devdict2 = ccgraph.commit_devranks(0.85, black_set=set([crlf_sha])) + devdict2 = ccgraph.commit_devranks(0.85, black_set=set([parent_sha])) assert(len(devdict) == 3) assert(len(devdict2) == 2) - assert(crlf_sha in devdict) - assert(crlf_sha not in devdict2) + assert(parent_sha in devdict) + assert(parent_sha not in devdict2) From 405cb6d7d20ae7f3ca415938d7d79dcef8cafc9f Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Mon, 4 Mar 2019 22:21:00 -0800 Subject: [PATCH 68/84] Add tests for git diff's ignore space option --- test/test_analytics/test_diff.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 test/test_analytics/test_diff.py diff --git a/test/test_analytics/test_diff.py b/test/test_analytics/test_diff.py new file mode 100644 index 00000000000..75446ce46f9 --- /dev/null +++ b/test/test_analytics/test_diff.py @@ -0,0 +1,22 @@ +import os +from git import Repo +from persper.analytics.git_tools import diff_with_first_parent +from persper.util.path import root_path + + +def test_diff_ignore_space(): + """ + bitcoin project has a commit which only converts CRLF to LF + its diff with parent should be empty when + ignore space option is enabled + The CRLF commit: https://github.com/bitcoin/bitcoin/commit/0a61b0df1224a5470bcddab302bc199ca5a9e356 + """ + repo_path = os.path.join(root_path, "repos/bitcoin") + bitcoin_url = 'https://github.com/bitcoin/bitcoin' + if not os.path.exists(repo_path): + Repo.clone_from(bitcoin_url, repo_path) + r = Repo(repo_path) + crlf_sha = '0a61b0df1224a5470bcddab302bc199ca5a9e356' + crlf_commit = r.commit(crlf_sha) + diff_result = diff_with_first_parent(r, crlf_commit) + assert(len(diff_result) == 0) From 4f77bf1de9349b10105306535208c20f12ccf9b9 Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Mon, 4 Mar 2019 22:37:07 -0800 Subject: [PATCH 69/84] Extend c update_graph to support cpp --- persper/analytics/call_graph/c.py | 43 +++++++++++++++++-------------- 1 file changed, 24 insertions(+), 19 deletions(-) diff --git a/persper/analytics/call_graph/c.py b/persper/analytics/call_graph/c.py index ca413e3deda..961f147d755 100644 --- a/persper/analytics/call_graph/c.py +++ b/persper/analytics/call_graph/c.py @@ -1,50 +1,55 @@ -import networkx as nx -from persper.analytics.call_graph.utils import remove_edges_of_node, ns, line_attr +from persper.analytics.call_graph.utils import ns, line_attr class NotFunctionCallError(Exception): """Raise for false positive nodes""" -def handle_function(func_node): +def _handle_function(func_node): """Given a node, return function name and function range (start & end lineno)""" - + # function name name_node = func_node.find('srcml:name', ns) - func_name, start_line = handle_name(name_node) + func_name, start_line = _handle_function_name(name_node) if not func_name or not start_line: - print('Function name/start not found!') # very unlikely to happen + print('ERROR: _handle_function fails to extract name or location.') return None, None, None + # function body block_node = func_node.find('srcml:block', ns) if block_node is None: try: block_node = func_node.xpath('./following-sibling::srcml:block', namespaces=ns)[0] except: - print("Block node not found (in func {})".format(func_name)) - return func_name, None, None + print("ERROR: %s has no block_node." % func_namae) + return func_name, start_line, None try: pos_node = block_node.find('pos:position', ns) end_line = int(pos_node.attrib[line_attr]) except: - print("Block node doesn't have position node inside!") - return func_name, None, None + print("ERROR: %s's block_node doesn't have position info." % func_name) + return func_name, start_line, None return func_name, start_line, end_line -def handle_name(name_node): +def _handle_function_name(name_node): """Given an node, return its text content and position (line)""" - text, line = None, None + name, line = None, None if name_node is not None: - text = name_node.text - line = int(name_node.attrib[line_attr]) - return text, line + if name_node.text: + name = name_node.text + line = int(name_node.attrib[line_attr]) + else: + line = int(name_node[0].attrib[line_attr]) + name = name_node[2].text + + return name, line -def handle_call(call_node): +def _handle_call(call_node): """Given an node, return function name being called Throws NotFunctionCallException @@ -71,7 +76,7 @@ def update_graph(ccgraph, ast_list, change_stats): for ast in ast_list: filename = ast.attrib['filename'] for function in ast.findall('./srcml:function', namespaces=ns): - caller_name, _, _ = handle_function(function) + caller_name, _, _ = _handle_function(function) if not caller_name: continue @@ -84,7 +89,7 @@ def update_graph(ccgraph, ast_list, change_stats): for call in function.xpath('.//srcml:call', namespaces=ns): try: - callee_name = handle_call(call) + callee_name = _handle_call(call) except NotFunctionCallError: continue except: @@ -108,7 +113,7 @@ def get_func_ranges_c(root): func_names, func_ranges = [], [] for func_node in root.findall('./srcml:function', namespaces=ns): - func_name, start_line, end_line = handle_function(func_node) + func_name, start_line, end_line = _handle_function(func_node) if not (func_name and start_line and end_line): continue From b567904d0a9e5455bc9d1df8152b2677b1b3b8d7 Mon Sep 17 00:00:00 2001 From: xinyan Date: Thu, 7 Mar 2019 23:55:40 +0800 Subject: [PATCH 70/84] Revert "Change files from set back to list" This reverts commit 6d0da87b3c451c2cce7b04f9eb055e9c63a9aa3c. --- persper/analytics/call_commit_graph.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/persper/analytics/call_commit_graph.py b/persper/analytics/call_commit_graph.py index 05af148aec3..6f13108dd9d 100644 --- a/persper/analytics/call_commit_graph.py +++ b/persper/analytics/call_commit_graph.py @@ -93,7 +93,7 @@ def _next_cindex(self): # TODO: remove the default value of files def add_node(self, node, files=[]): - self._digraph.add_node(node, size=None, history={}, files=files) + self._digraph.add_node(node, size=None, history={}, files=set(files)) # add_node must be called on source and target first def add_edge(self, source, target): @@ -120,7 +120,7 @@ def _get_node_history(self, node): return self._digraph.nodes[node]['history'] def update_node_files(self, node, new_files): - self._digraph.nodes[node]['files'] = new_files + self._digraph.nodes[node]['files'] = set(new_files) # TODO: provide other options for computing a node's size def _set_all_nodes_size(self, black_set=None): From fa0fd67cc62fd83a25adad71fe8ff82f97429399 Mon Sep 17 00:00:00 2001 From: xinyan Date: Thu, 7 Mar 2019 23:59:06 +0800 Subject: [PATCH 71/84] Correct test_feature_branch repo. Add forward decl to conform C99. --- test/test_feature_branch/C/main.c | 2 ++ test/test_feature_branch/D/main.c | 2 ++ test/test_feature_branch/E/main.c | 2 ++ test/test_feature_branch/F/main.c | 2 ++ test/test_feature_branch/G/main.c | 2 ++ test/test_feature_branch/H/main.c | 2 ++ test/test_feature_branch/I/main.c | 2 ++ test/test_feature_branch/K/main.c | 2 ++ 8 files changed, 16 insertions(+) diff --git a/test/test_feature_branch/C/main.c b/test/test_feature_branch/C/main.c index 401fb0597d6..5eaeabc97a7 100644 --- a/test/test_feature_branch/C/main.c +++ b/test/test_feature_branch/C/main.c @@ -8,6 +8,8 @@ int str_len(char *string) /* str_append is deleted in B */ +int str_equals(char *equal1, char *eqaul2); // Forward decl. + /* added in B, edited in C */ char* str_append_chr(char* string, char append) { char* newstring = ""; diff --git a/test/test_feature_branch/D/main.c b/test/test_feature_branch/D/main.c index 401fb0597d6..c5a80c50643 100644 --- a/test/test_feature_branch/D/main.c +++ b/test/test_feature_branch/D/main.c @@ -8,6 +8,8 @@ int str_len(char *string) /* str_append is deleted in B */ +int str_equals(char *equal1, char *eqaul2); // Forward decl + /* added in B, edited in C */ char* str_append_chr(char* string, char append) { char* newstring = ""; diff --git a/test/test_feature_branch/E/main.c b/test/test_feature_branch/E/main.c index 401fb0597d6..c5a80c50643 100644 --- a/test/test_feature_branch/E/main.c +++ b/test/test_feature_branch/E/main.c @@ -8,6 +8,8 @@ int str_len(char *string) /* str_append is deleted in B */ +int str_equals(char *equal1, char *eqaul2); // Forward decl + /* added in B, edited in C */ char* str_append_chr(char* string, char append) { char* newstring = ""; diff --git a/test/test_feature_branch/F/main.c b/test/test_feature_branch/F/main.c index 401fb0597d6..c5a80c50643 100644 --- a/test/test_feature_branch/F/main.c +++ b/test/test_feature_branch/F/main.c @@ -8,6 +8,8 @@ int str_len(char *string) /* str_append is deleted in B */ +int str_equals(char *equal1, char *eqaul2); // Forward decl + /* added in B, edited in C */ char* str_append_chr(char* string, char append) { char* newstring = ""; diff --git a/test/test_feature_branch/G/main.c b/test/test_feature_branch/G/main.c index 19d8834f11f..67f1d720f3a 100644 --- a/test/test_feature_branch/G/main.c +++ b/test/test_feature_branch/G/main.c @@ -8,6 +8,8 @@ int str_len(char *string) /* str_append is deleted in B */ +int str_equals(char *equal1, char *eqaul2); // Forward decl + /* added in B*/ char* str_append_chr(char* string, char append) { char* newstring = NULL; diff --git a/test/test_feature_branch/H/main.c b/test/test_feature_branch/H/main.c index 401fb0597d6..c5a80c50643 100644 --- a/test/test_feature_branch/H/main.c +++ b/test/test_feature_branch/H/main.c @@ -8,6 +8,8 @@ int str_len(char *string) /* str_append is deleted in B */ +int str_equals(char *equal1, char *eqaul2); // Forward decl + /* added in B, edited in C */ char* str_append_chr(char* string, char append) { char* newstring = ""; diff --git a/test/test_feature_branch/I/main.c b/test/test_feature_branch/I/main.c index 401fb0597d6..c5a80c50643 100644 --- a/test/test_feature_branch/I/main.c +++ b/test/test_feature_branch/I/main.c @@ -8,6 +8,8 @@ int str_len(char *string) /* str_append is deleted in B */ +int str_equals(char *equal1, char *eqaul2); // Forward decl + /* added in B, edited in C */ char* str_append_chr(char* string, char append) { char* newstring = ""; diff --git a/test/test_feature_branch/K/main.c b/test/test_feature_branch/K/main.c index 401fb0597d6..c5a80c50643 100644 --- a/test/test_feature_branch/K/main.c +++ b/test/test_feature_branch/K/main.c @@ -8,6 +8,8 @@ int str_len(char *string) /* str_append is deleted in B */ +int str_equals(char *equal1, char *eqaul2); // Forward decl + /* added in B, edited in C */ char* str_append_chr(char* string, char append) { char* newstring = ""; From b1fd1479359b361e3a08651d0f3cf011a6f78b9c Mon Sep 17 00:00:00 2001 From: xinyan Date: Fri, 8 Mar 2019 00:00:56 +0800 Subject: [PATCH 72/84] Update cpp test baseline. ccls won't count in all the function decls for now. Make sure you have ccls PR 307 patched. --- .../baseline/cpp_test_repo/A.g.json | 105 +++--------------- .../baseline/cpp_test_repo/B.g.json | 96 +++------------- .../baseline/cpp_test_repo/C.g.json | 87 ++------------- .../baseline/cpp_test_repo/D.g.json | 87 ++------------- .../baseline/feature_branch/C.g.json | 9 +- .../baseline/feature_branch/D.g.json | 9 +- .../baseline/feature_branch/E.g.json | 9 +- .../baseline/feature_branch/F.g.json | 15 +-- .../baseline/feature_branch/G.g.json | 9 +- .../baseline/feature_branch/H.g.json | 9 +- .../baseline/feature_branch/I.g.json | 9 +- .../baseline/feature_branch/J.g.json | 9 +- .../baseline/feature_branch/K.g.json | 15 +-- .../feature_branch_first_parent/C.g.json | 9 +- .../feature_branch_first_parent/D.g.json | 15 +-- .../feature_branch_first_parent/E.g.json | 15 +-- .../feature_branch_first_parent/F.g.json | 15 +-- .../feature_branch_first_parent/K.g.json | 15 +-- 18 files changed, 85 insertions(+), 452 deletions(-) diff --git a/test/test_analytics/baseline/cpp_test_repo/A.g.json b/test/test_analytics/baseline/cpp_test_repo/A.g.json index 3a503bd6209..cd6aaa90876 100644 --- a/test/test_analytics/baseline/cpp_test_repo/A.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/A.g.json @@ -28,30 +28,10 @@ "addedBy": "A", "weight": null }, - "bool Confirm(const std::string &prompt)|->|bool Confirm(const std::string &prompt)": { - "addedBy": "A", - "weight": null - }, - "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { - "addedBy": "A", - "weight": null - }, "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)|->|enum class StringComparison : int {}": { "addedBy": "A", "weight": null }, - "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)|->|const char *FriendlyNameOf()": { - "addedBy": "A", - "weight": null - }, - "const char *FriendlyNameOf(const std::type_index &type)|->|const char *FriendlyNameOf()": { - "addedBy": "A", - "weight": null - }, - "const char *FriendlyNameOf(const type_info &type)|->|const char *FriendlyNameOf()": { - "addedBy": "A", - "weight": null - }, "int main(int argc, char *argv[])|->|TStream OpenAndValidate(const TPath arg1)": { "addedBy": "A", "weight": null @@ -59,10 +39,6 @@ "int main(int argc, char *argv[])|->|bool ConfigurationParser::GetBool(const std::string &key, bool defaultValue) const": { "addedBy": "A", "weight": null - }, - "void ReportException(const std::exception &ex, int level)|->|void ReportException(const std::exception &ex, int level)": { - "addedBy": "A", - "weight": null } }, "nodes": { @@ -188,12 +164,11 @@ }, "RowReader &operator>>(RowReader &reader, bool &rhs)": { "files": [ - "TextFileParsers.cpp", - "TextFileParsers.h" + "TextFileParsers.cpp" ], "history": { "A": { - "adds": 25, + "adds": 24, "dels": 0 } }, @@ -201,12 +176,11 @@ }, "RowReader &operator>>(RowReader &reader, double &rhs)": { "files": [ - "TextFileParsers.cpp", - "TextFileParsers.h" + "TextFileParsers.cpp" ], "history": { "A": { - "adds": 7, + "adds": 6, "dels": 0 } }, @@ -214,12 +188,11 @@ }, "RowReader &operator>>(RowReader &reader, float &rhs)": { "files": [ - "TextFileParsers.cpp", - "TextFileParsers.h" + "TextFileParsers.cpp" ], "history": { "A": { - "adds": 7, + "adds": 6, "dels": 0 } }, @@ -227,12 +200,11 @@ }, "RowReader &operator>>(RowReader &reader, int &rhs)": { "files": [ - "TextFileParsers.cpp", - "TextFileParsers.h" + "TextFileParsers.cpp" ], "history": { "A": { - "adds": 7, + "adds": 6, "dels": 0 } }, @@ -240,12 +212,11 @@ }, "RowReader &operator>>(RowReader &reader, long &rhs)": { "files": [ - "TextFileParsers.cpp", - "TextFileParsers.h" + "TextFileParsers.cpp" ], "history": { "A": { - "adds": 7, + "adds": 6, "dels": 0 } }, @@ -253,12 +224,11 @@ }, "RowReader &operator>>(RowReader &reader, std::string &rhs)": { "files": [ - "TextFileParsers.cpp", - "TextFileParsers.h" + "TextFileParsers.cpp" ], "history": { "A": { - "adds": 17, + "adds": 16, "dels": 0 } }, @@ -398,12 +368,11 @@ }, "bool Confirm(const std::string &prompt)": { "files": [ - "Utility.cpp", - "Utility.h" + "Utility.cpp" ], "history": { "A": { - "adds": 18, + "adds": 17, "dels": 0 } }, @@ -411,12 +380,11 @@ }, "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { "files": [ - "Utility.cpp", - "Utility.h" + "Utility.cpp" ], "history": { "A": { - "adds": 30, + "adds": 29, "dels": 0 } }, @@ -458,42 +426,6 @@ }, "size": null }, - "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)": { - "files": [ - "Utility.h" - ], - "history": { - "A": { - "adds": 1, - "dels": 0 - } - }, - "size": null - }, - "const char *FriendlyNameOf(const std::type_index &type)": { - "files": [ - "Utility.h" - ], - "history": { - "A": { - "adds": 1, - "dels": 0 - } - }, - "size": null - }, - "const char *FriendlyNameOf(const type_info &type)": { - "files": [ - "Utility.h" - ], - "history": { - "A": { - "adds": 1, - "dels": 0 - } - }, - "size": null - }, "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const": { "files": [ "TextFileParsers.cpp" @@ -604,12 +536,11 @@ }, "void ReportException(const std::exception &ex, int level)": { "files": [ - "Utility.cpp", - "Utility.h" + "Utility.cpp" ], "history": { "A": { - "adds": 21, + "adds": 20, "dels": 0 } }, diff --git a/test/test_analytics/baseline/cpp_test_repo/B.g.json b/test/test_analytics/baseline/cpp_test_repo/B.g.json index aebb4ca25f3..cdcca5b1baa 100644 --- a/test/test_analytics/baseline/cpp_test_repo/B.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/B.g.json @@ -28,30 +28,10 @@ "addedBy": "A", "weight": null }, - "bool Confirm(const std::string &prompt)|->|bool Confirm(const std::string &prompt)": { - "addedBy": "B", - "weight": null - }, - "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { - "addedBy": "B", - "weight": null - }, "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)|->|enum class StringComparison : int {}": { "addedBy": "B", "weight": null }, - "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)|->|const char *FriendlyNameOf()": { - "addedBy": "A", - "weight": null - }, - "const char *FriendlyNameOf(const std::type_index &type)|->|const char *FriendlyNameOf()": { - "addedBy": "A", - "weight": null - }, - "const char *FriendlyNameOf(const type_info &type)|->|const char *FriendlyNameOf()": { - "addedBy": "A", - "weight": null - }, "int main(int argc, char *argv[])|->|TStream OpenAndValidate(const TPath arg1)": { "addedBy": "B", "weight": null @@ -71,10 +51,6 @@ "int main(int argc, char *argv[])|->|void ReportException(const std::exception &ex, int level)": { "addedBy": "B", "weight": null - }, - "void ReportException(const std::exception &ex, int level)|->|void ReportException(const std::exception &ex, int level)": { - "addedBy": "B", - "weight": null } }, "nodes": { @@ -200,12 +176,11 @@ }, "RowReader &operator>>(RowReader &reader, bool &rhs)": { "files": [ - "TextFileParsers.cpp", - "TextFileParsers.h" + "TextFileParsers.cpp" ], "history": { "A": { - "adds": 25, + "adds": 24, "dels": 0 } }, @@ -213,12 +188,11 @@ }, "RowReader &operator>>(RowReader &reader, double &rhs)": { "files": [ - "TextFileParsers.cpp", - "TextFileParsers.h" + "TextFileParsers.cpp" ], "history": { "A": { - "adds": 7, + "adds": 6, "dels": 0 } }, @@ -226,12 +200,11 @@ }, "RowReader &operator>>(RowReader &reader, float &rhs)": { "files": [ - "TextFileParsers.cpp", - "TextFileParsers.h" + "TextFileParsers.cpp" ], "history": { "A": { - "adds": 7, + "adds": 6, "dels": 0 } }, @@ -239,12 +212,11 @@ }, "RowReader &operator>>(RowReader &reader, int &rhs)": { "files": [ - "TextFileParsers.cpp", - "TextFileParsers.h" + "TextFileParsers.cpp" ], "history": { "A": { - "adds": 7, + "adds": 6, "dels": 0 } }, @@ -252,12 +224,11 @@ }, "RowReader &operator>>(RowReader &reader, long &rhs)": { "files": [ - "TextFileParsers.cpp", - "TextFileParsers.h" + "TextFileParsers.cpp" ], "history": { "A": { - "adds": 7, + "adds": 6, "dels": 0 } }, @@ -265,12 +236,11 @@ }, "RowReader &operator>>(RowReader &reader, std::string &rhs)": { "files": [ - "TextFileParsers.cpp", - "TextFileParsers.h" + "TextFileParsers.cpp" ], "history": { "A": { - "adds": 17, + "adds": 16, "dels": 0 } }, @@ -414,7 +384,7 @@ ], "history": { "A": { - "adds": 18, + "adds": 17, "dels": 0 } }, @@ -426,7 +396,7 @@ ], "history": { "A": { - "adds": 30, + "adds": 29, "dels": 0 } }, @@ -468,42 +438,6 @@ }, "size": null }, - "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)": { - "files": [ - "Utility.h" - ], - "history": { - "A": { - "adds": 1, - "dels": 0 - } - }, - "size": null - }, - "const char *FriendlyNameOf(const std::type_index &type)": { - "files": [ - "Utility.h" - ], - "history": { - "A": { - "adds": 1, - "dels": 0 - } - }, - "size": null - }, - "const char *FriendlyNameOf(const type_info &type)": { - "files": [ - "Utility.h" - ], - "history": { - "A": { - "adds": 1, - "dels": 0 - } - }, - "size": null - }, "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const": { "files": [ "TextFileParsers.cpp" @@ -622,7 +556,7 @@ ], "history": { "A": { - "adds": 21, + "adds": 20, "dels": 0 } }, diff --git a/test/test_analytics/baseline/cpp_test_repo/C.g.json b/test/test_analytics/baseline/cpp_test_repo/C.g.json index 2e596bc9ef6..babf7d98549 100644 --- a/test/test_analytics/baseline/cpp_test_repo/C.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/C.g.json @@ -28,30 +28,10 @@ "addedBy": "C", "weight": null }, - "bool Confirm(const std::string &prompt)|->|bool Confirm(const std::string &prompt)": { - "addedBy": "C", - "weight": null - }, - "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { - "addedBy": "C", - "weight": null - }, "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)|->|enum class StringComparison : int {}": { "addedBy": "C", "weight": null }, - "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)|->|const char *FriendlyNameOf()": { - "addedBy": "C", - "weight": null - }, - "const char *FriendlyNameOf(const std::type_index &type)|->|const char *FriendlyNameOf()": { - "addedBy": "C", - "weight": null - }, - "const char *FriendlyNameOf(const type_info &type)|->|const char *FriendlyNameOf()": { - "addedBy": "C", - "weight": null - }, "int main(int argc, char *argv[])|->|TStream OpenAndValidate(const TPath arg1)": { "addedBy": "C", "weight": null @@ -71,10 +51,6 @@ "int main(int argc, char *argv[])|->|void ReportException(const std::exception &ex, int level)": { "addedBy": "B", "weight": null - }, - "void ReportException(const std::exception &ex, int level)|->|void ReportException(const std::exception &ex, int level)": { - "addedBy": "C", - "weight": null } }, "nodes": { @@ -204,7 +180,7 @@ ], "history": { "A": { - "adds": 25, + "adds": 24, "dels": 0 }, "C": { @@ -220,7 +196,7 @@ ], "history": { "A": { - "adds": 7, + "adds": 6, "dels": 0 } }, @@ -232,7 +208,7 @@ ], "history": { "A": { - "adds": 7, + "adds": 6, "dels": 0 } }, @@ -244,7 +220,7 @@ ], "history": { "A": { - "adds": 7, + "adds": 6, "dels": 0 } }, @@ -256,7 +232,7 @@ ], "history": { "A": { - "adds": 7, + "adds": 6, "dels": 0 } }, @@ -268,7 +244,7 @@ ], "history": { "A": { - "adds": 17, + "adds": 16, "dels": 0 } }, @@ -412,12 +388,11 @@ }, "bool Confirm(const std::string &prompt)": { "files": [ - "Utility.cpp", - "Utility.h" + "Utility.cpp" ], "history": { "A": { - "adds": 18, + "adds": 17, "dels": 0 } }, @@ -425,12 +400,11 @@ }, "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { "files": [ - "Utility.cpp", - "Utility.h" + "Utility.cpp" ], "history": { "A": { - "adds": 30, + "adds": 29, "dels": 0 }, "C": { @@ -476,42 +450,6 @@ }, "size": null }, - "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)": { - "files": [ - "Utility.h" - ], - "history": { - "A": { - "adds": 1, - "dels": 0 - } - }, - "size": null - }, - "const char *FriendlyNameOf(const std::type_index &type)": { - "files": [ - "Utility.h" - ], - "history": { - "A": { - "adds": 1, - "dels": 0 - } - }, - "size": null - }, - "const char *FriendlyNameOf(const type_info &type)": { - "files": [ - "Utility.h" - ], - "history": { - "A": { - "adds": 1, - "dels": 0 - } - }, - "size": null - }, "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const": { "files": [ "TextFileParsers.cpp" @@ -634,12 +572,11 @@ }, "void ReportException(const std::exception &ex, int level)": { "files": [ - "Utility.cpp", - "Utility.h" + "Utility.cpp" ], "history": { "A": { - "adds": 21, + "adds": 20, "dels": 0 } }, diff --git a/test/test_analytics/baseline/cpp_test_repo/D.g.json b/test/test_analytics/baseline/cpp_test_repo/D.g.json index 02fe0fde492..aa0ec8956b9 100644 --- a/test/test_analytics/baseline/cpp_test_repo/D.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/D.g.json @@ -28,30 +28,10 @@ "addedBy": "D", "weight": null }, - "bool Confirm(const std::string &prompt)|->|bool Confirm(const std::string &prompt)": { - "addedBy": "D", - "weight": null - }, - "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)|->|bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { - "addedBy": "D", - "weight": null - }, "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)|->|enum class StringComparison : int {}": { "addedBy": "D", "weight": null }, - "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)|->|const char *FriendlyNameOf()": { - "addedBy": "D", - "weight": null - }, - "const char *FriendlyNameOf(const std::type_index &type)|->|const char *FriendlyNameOf()": { - "addedBy": "D", - "weight": null - }, - "const char *FriendlyNameOf(const type_info &type)|->|const char *FriendlyNameOf()": { - "addedBy": "D", - "weight": null - }, "int main(int argc, char *argv[])|->|TStream OpenAndValidate(const TPath arg1)": { "addedBy": "C", "weight": null @@ -71,10 +51,6 @@ "int main(int argc, char *argv[])|->|void ReportException(const std::exception &ex, int level)": { "addedBy": "B", "weight": null - }, - "void ReportException(const std::exception &ex, int level)|->|void ReportException(const std::exception &ex, int level)": { - "addedBy": "D", - "weight": null } }, "nodes": { @@ -204,7 +180,7 @@ ], "history": { "A": { - "adds": 25, + "adds": 24, "dels": 0 }, "C": { @@ -224,7 +200,7 @@ ], "history": { "A": { - "adds": 7, + "adds": 6, "dels": 0 } }, @@ -236,7 +212,7 @@ ], "history": { "A": { - "adds": 7, + "adds": 6, "dels": 0 } }, @@ -248,7 +224,7 @@ ], "history": { "A": { - "adds": 7, + "adds": 6, "dels": 0 } }, @@ -260,7 +236,7 @@ ], "history": { "A": { - "adds": 7, + "adds": 6, "dels": 0 } }, @@ -272,7 +248,7 @@ ], "history": { "A": { - "adds": 17, + "adds": 16, "dels": 0 } }, @@ -420,12 +396,11 @@ }, "bool Confirm(const std::string &prompt)": { "files": [ - "Utility.cpp", - "Utility.h" + "Utility.cpp" ], "history": { "A": { - "adds": 18, + "adds": 17, "dels": 0 } }, @@ -433,12 +408,11 @@ }, "bool Equal(const std::string &lhs, const std::string &rhs, StringComparison comparision)": { "files": [ - "Utility.cpp", - "Utility.h" + "Utility.cpp" ], "history": { "A": { - "adds": 30, + "adds": 29, "dels": 0 }, "C": { @@ -488,42 +462,6 @@ }, "size": null }, - "const char *FriendlyNameOf(const ReliabilityNetworkEntry &instance)": { - "files": [ - "Utility.h" - ], - "history": { - "A": { - "adds": 1, - "dels": 0 - } - }, - "size": null - }, - "const char *FriendlyNameOf(const std::type_index &type)": { - "files": [ - "Utility.h" - ], - "history": { - "A": { - "adds": 1, - "dels": 0 - } - }, - "size": null - }, - "const char *FriendlyNameOf(const type_info &type)": { - "files": [ - "Utility.h" - ], - "history": { - "A": { - "adds": 1, - "dels": 0 - } - }, - "size": null - }, "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const": { "files": [ "TextFileParsers.cpp" @@ -654,12 +592,11 @@ }, "void ReportException(const std::exception &ex, int level)": { "files": [ - "Utility.cpp", - "Utility.h" + "Utility.cpp" ], "history": { "A": { - "adds": 21, + "adds": 20, "dels": 0 } }, diff --git a/test/test_analytics/baseline/feature_branch/C.g.json b/test/test_analytics/baseline/feature_branch/C.g.json index 4b791785f4e..9992cb0fd09 100644 --- a/test/test_analytics/baseline/feature_branch/C.g.json +++ b/test/test_analytics/baseline/feature_branch/C.g.json @@ -1,6 +1,6 @@ { "edges": { - "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "char *str_append_chr(char *string, char append)|->|int str_equals(char *equal1, char *eqaul2)": { "addedBy": "C", "weight": null }, @@ -42,13 +42,6 @@ }, "size": null }, - "int str_equals()": { - "files": [ - "main.c" - ], - "history": {}, - "size": null - }, "int str_equals(char *equal1, char *eqaul2)": { "files": [ "main.c" diff --git a/test/test_analytics/baseline/feature_branch/D.g.json b/test/test_analytics/baseline/feature_branch/D.g.json index 4b931b0a2c0..488d426a937 100644 --- a/test/test_analytics/baseline/feature_branch/D.g.json +++ b/test/test_analytics/baseline/feature_branch/D.g.json @@ -1,6 +1,6 @@ { "edges": { - "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "char *str_append_chr(char *string, char append)|->|int str_equals(char *equal1, char *eqaul2)": { "addedBy": "D", "weight": null }, @@ -66,13 +66,6 @@ }, "size": null }, - "int str_equals()": { - "files": [ - "main.c" - ], - "history": {}, - "size": null - }, "int str_equals(char *equal1, char *eqaul2)": { "files": [ "main.c" diff --git a/test/test_analytics/baseline/feature_branch/E.g.json b/test/test_analytics/baseline/feature_branch/E.g.json index 414ed9fd459..26ef09a68c0 100644 --- a/test/test_analytics/baseline/feature_branch/E.g.json +++ b/test/test_analytics/baseline/feature_branch/E.g.json @@ -1,6 +1,6 @@ { "edges": { - "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "char *str_append_chr(char *string, char append)|->|int str_equals(char *equal1, char *eqaul2)": { "addedBy": "D", "weight": null }, @@ -74,13 +74,6 @@ }, "size": null }, - "int str_equals()": { - "files": [ - "main.c" - ], - "history": {}, - "size": null - }, "int str_equals(char *equal1, char *eqaul2)": { "files": [ "main.c" diff --git a/test/test_analytics/baseline/feature_branch/F.g.json b/test/test_analytics/baseline/feature_branch/F.g.json index 31a14cda404..4c8ee3397cc 100644 --- a/test/test_analytics/baseline/feature_branch/F.g.json +++ b/test/test_analytics/baseline/feature_branch/F.g.json @@ -4,7 +4,7 @@ "addedBy": "F", "weight": null }, - "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "char *str_append_chr(char *string, char append)|->|int str_equals(char *equal1, char *eqaul2)": { "addedBy": "F", "weight": null }, @@ -16,12 +16,8 @@ "addedBy": "F", "weight": null }, - "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals()": { - "addedBy": "F", - "weight": null - }, "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals(char *equal1, char *eqaul2)": { - "addedBy": "G", + "addedBy": "F", "weight": null }, "char *str_replace(char *search, char *replace, char *subject)|->|int str_len(char *string)": { @@ -94,13 +90,6 @@ }, "size": null }, - "int str_equals()": { - "files": [ - "main.c" - ], - "history": {}, - "size": null - }, "int str_equals(char *equal1, char *eqaul2)": { "files": [ "main.c" diff --git a/test/test_analytics/baseline/feature_branch/G.g.json b/test/test_analytics/baseline/feature_branch/G.g.json index 9be01d4cc7f..e933b06f594 100644 --- a/test/test_analytics/baseline/feature_branch/G.g.json +++ b/test/test_analytics/baseline/feature_branch/G.g.json @@ -1,6 +1,6 @@ { "edges": { - "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "char *str_append_chr(char *string, char append)|->|int str_equals(char *equal1, char *eqaul2)": { "addedBy": "C", "weight": null }, @@ -66,13 +66,6 @@ }, "size": null }, - "int str_equals()": { - "files": [ - "main.c" - ], - "history": {}, - "size": null - }, "int str_equals(char *equal1, char *eqaul2)": { "files": [ "main.c" diff --git a/test/test_analytics/baseline/feature_branch/H.g.json b/test/test_analytics/baseline/feature_branch/H.g.json index 8089ef9ccbf..6d72823868b 100644 --- a/test/test_analytics/baseline/feature_branch/H.g.json +++ b/test/test_analytics/baseline/feature_branch/H.g.json @@ -1,6 +1,6 @@ { "edges": { - "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "char *str_append_chr(char *string, char append)|->|int str_equals(char *equal1, char *eqaul2)": { "addedBy": "D", "weight": null }, @@ -74,13 +74,6 @@ }, "size": null }, - "int str_equals()": { - "files": [ - "main.c" - ], - "history": {}, - "size": null - }, "int str_equals(char *equal1, char *eqaul2)": { "files": [ "main.c" diff --git a/test/test_analytics/baseline/feature_branch/I.g.json b/test/test_analytics/baseline/feature_branch/I.g.json index 414ed9fd459..26ef09a68c0 100644 --- a/test/test_analytics/baseline/feature_branch/I.g.json +++ b/test/test_analytics/baseline/feature_branch/I.g.json @@ -1,6 +1,6 @@ { "edges": { - "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "char *str_append_chr(char *string, char append)|->|int str_equals(char *equal1, char *eqaul2)": { "addedBy": "D", "weight": null }, @@ -74,13 +74,6 @@ }, "size": null }, - "int str_equals()": { - "files": [ - "main.c" - ], - "history": {}, - "size": null - }, "int str_equals(char *equal1, char *eqaul2)": { "files": [ "main.c" diff --git a/test/test_analytics/baseline/feature_branch/J.g.json b/test/test_analytics/baseline/feature_branch/J.g.json index 7606ab45a52..ae29e94aeef 100644 --- a/test/test_analytics/baseline/feature_branch/J.g.json +++ b/test/test_analytics/baseline/feature_branch/J.g.json @@ -1,6 +1,6 @@ { "edges": { - "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "char *str_append_chr(char *string, char append)|->|int str_equals(char *equal1, char *eqaul2)": { "addedBy": "D", "weight": null }, @@ -86,13 +86,6 @@ }, "size": null }, - "int str_equals()": { - "files": [ - "main.c" - ], - "history": {}, - "size": null - }, "int str_equals(char *equal1, char *eqaul2)": { "files": [ "main.c" diff --git a/test/test_analytics/baseline/feature_branch/K.g.json b/test/test_analytics/baseline/feature_branch/K.g.json index 555bdff3d1e..c406a16e2d7 100644 --- a/test/test_analytics/baseline/feature_branch/K.g.json +++ b/test/test_analytics/baseline/feature_branch/K.g.json @@ -4,7 +4,7 @@ "addedBy": "F", "weight": null }, - "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "char *str_append_chr(char *string, char append)|->|int str_equals(char *equal1, char *eqaul2)": { "addedBy": "F", "weight": null }, @@ -16,12 +16,8 @@ "addedBy": "F", "weight": null }, - "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals()": { - "addedBy": "F", - "weight": null - }, "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals(char *equal1, char *eqaul2)": { - "addedBy": "G", + "addedBy": "F", "weight": null }, "char *str_replace(char *search, char *replace, char *subject)|->|int str_len(char *string)": { @@ -94,13 +90,6 @@ }, "size": null }, - "int str_equals()": { - "files": [ - "main.c" - ], - "history": {}, - "size": null - }, "int str_equals(char *equal1, char *eqaul2)": { "files": [ "main.c" diff --git a/test/test_analytics/baseline/feature_branch_first_parent/C.g.json b/test/test_analytics/baseline/feature_branch_first_parent/C.g.json index 4b791785f4e..9992cb0fd09 100644 --- a/test/test_analytics/baseline/feature_branch_first_parent/C.g.json +++ b/test/test_analytics/baseline/feature_branch_first_parent/C.g.json @@ -1,6 +1,6 @@ { "edges": { - "char *str_append_chr(char *string, char append)|->|int str_equals()": { + "char *str_append_chr(char *string, char append)|->|int str_equals(char *equal1, char *eqaul2)": { "addedBy": "C", "weight": null }, @@ -42,13 +42,6 @@ }, "size": null }, - "int str_equals()": { - "files": [ - "main.c" - ], - "history": {}, - "size": null - }, "int str_equals(char *equal1, char *eqaul2)": { "files": [ "main.c" diff --git a/test/test_analytics/baseline/feature_branch_first_parent/D.g.json b/test/test_analytics/baseline/feature_branch_first_parent/D.g.json index f289e724860..a1579f2d346 100644 --- a/test/test_analytics/baseline/feature_branch_first_parent/D.g.json +++ b/test/test_analytics/baseline/feature_branch_first_parent/D.g.json @@ -1,18 +1,18 @@ { "edges": { - "char *str_append_chr(char *string, char append)|->|int str_equals()": { - "addedBy": "C", + "char *str_append_chr(char *string, char append)|->|int str_equals(char *equal1, char *eqaul2)": { + "addedBy": "D", "weight": null }, "char *str_append_chr(char *string, char append)|->|int str_len(char *string)": { - "addedBy": "C", + "addedBy": "D", "weight": null }, "char *str_replace(char *search, char *replace, char *subject)|->|char *str_append_chr(char *string, char append)": { "addedBy": "D", "weight": null }, - "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals()": { + "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals(char *equal1, char *eqaul2)": { "addedBy": "D", "weight": null }, @@ -66,13 +66,6 @@ }, "size": null }, - "int str_equals()": { - "files": [ - "main.c" - ], - "history": {}, - "size": null - }, "int str_equals(char *equal1, char *eqaul2)": { "files": [ "main.c" diff --git a/test/test_analytics/baseline/feature_branch_first_parent/E.g.json b/test/test_analytics/baseline/feature_branch_first_parent/E.g.json index 8d6ec786302..3c24ccfc482 100644 --- a/test/test_analytics/baseline/feature_branch_first_parent/E.g.json +++ b/test/test_analytics/baseline/feature_branch_first_parent/E.g.json @@ -1,18 +1,18 @@ { "edges": { - "char *str_append_chr(char *string, char append)|->|int str_equals()": { - "addedBy": "C", + "char *str_append_chr(char *string, char append)|->|int str_equals(char *equal1, char *eqaul2)": { + "addedBy": "D", "weight": null }, "char *str_append_chr(char *string, char append)|->|int str_len(char *string)": { - "addedBy": "C", + "addedBy": "D", "weight": null }, "char *str_replace(char *search, char *replace, char *subject)|->|char *str_append_chr(char *string, char append)": { "addedBy": "D", "weight": null }, - "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals()": { + "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals(char *equal1, char *eqaul2)": { "addedBy": "D", "weight": null }, @@ -66,13 +66,6 @@ }, "size": null }, - "int str_equals()": { - "files": [ - "main.c" - ], - "history": {}, - "size": null - }, "int str_equals(char *equal1, char *eqaul2)": { "files": [ "main.c" diff --git a/test/test_analytics/baseline/feature_branch_first_parent/F.g.json b/test/test_analytics/baseline/feature_branch_first_parent/F.g.json index da60e2d5a1b..1ea3182ab6d 100644 --- a/test/test_analytics/baseline/feature_branch_first_parent/F.g.json +++ b/test/test_analytics/baseline/feature_branch_first_parent/F.g.json @@ -1,18 +1,18 @@ { "edges": { - "char *str_append_chr(char *string, char append)|->|int str_equals()": { - "addedBy": "C", + "char *str_append_chr(char *string, char append)|->|int str_equals(char *equal1, char *eqaul2)": { + "addedBy": "D", "weight": null }, "char *str_append_chr(char *string, char append)|->|int str_len(char *string)": { - "addedBy": "C", + "addedBy": "D", "weight": null }, "char *str_replace(char *search, char *replace, char *subject)|->|char *str_append_chr(char *string, char append)": { "addedBy": "D", "weight": null }, - "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals()": { + "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals(char *equal1, char *eqaul2)": { "addedBy": "D", "weight": null }, @@ -78,13 +78,6 @@ }, "size": null }, - "int str_equals()": { - "files": [ - "main.c" - ], - "history": {}, - "size": null - }, "int str_equals(char *equal1, char *eqaul2)": { "files": [ "main.c" diff --git a/test/test_analytics/baseline/feature_branch_first_parent/K.g.json b/test/test_analytics/baseline/feature_branch_first_parent/K.g.json index 1c8c8e2e3a9..32ca12c74da 100644 --- a/test/test_analytics/baseline/feature_branch_first_parent/K.g.json +++ b/test/test_analytics/baseline/feature_branch_first_parent/K.g.json @@ -1,18 +1,18 @@ { "edges": { - "char *str_append_chr(char *string, char append)|->|int str_equals()": { - "addedBy": "C", + "char *str_append_chr(char *string, char append)|->|int str_equals(char *equal1, char *eqaul2)": { + "addedBy": "D", "weight": null }, "char *str_append_chr(char *string, char append)|->|int str_len(char *string)": { - "addedBy": "C", + "addedBy": "D", "weight": null }, "char *str_replace(char *search, char *replace, char *subject)|->|char *str_append_chr(char *string, char append)": { "addedBy": "D", "weight": null }, - "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals()": { + "char *str_replace(char *search, char *replace, char *subject)|->|int str_equals(char *equal1, char *eqaul2)": { "addedBy": "D", "weight": null }, @@ -78,13 +78,6 @@ }, "size": null }, - "int str_equals()": { - "files": [ - "main.c" - ], - "history": {}, - "size": null - }, "int str_equals(char *equal1, char *eqaul2)": { "files": [ "main.c" From 7ac2a760d5ef6da7af30090f1896e919e711a5dc Mon Sep 17 00:00:00 2001 From: xinyan Date: Sun, 10 Mar 2019 18:31:26 +0800 Subject: [PATCH 73/84] Bug fix: TokenizedDocument should check the children of DocumentSymbol. Minor cleanup. --- .../analytics/lsp_graph_server/callgraph/builder.py | 11 ++++++++--- .../analytics/lsp_graph_server/callgraph/manager.py | 3 ++- .../lsp_graph_server/languageclient/lspcontract.py | 2 +- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/persper/analytics/lsp_graph_server/callgraph/builder.py b/persper/analytics/lsp_graph_server/callgraph/builder.py index b54bd46bec6..37923c2aeb3 100644 --- a/persper/analytics/lsp_graph_server/callgraph/builder.py +++ b/persper/analytics/lsp_graph_server/callgraph/builder.py @@ -58,7 +58,7 @@ def PopulateSymbols(symbols): if filterResult == None: continue if filterResult == False: - if isinstance(s, SymbolInformation): + if isinstance(s, DocumentSymbol): PopulateSymbols(s.children) continue if isinstance(s, DocumentSymbol): @@ -66,12 +66,12 @@ def PopulateSymbols(symbols): symbolKinds[s.selectionRange.start.toTuple()] = s.kind self._scopes.append(CallGraphScope(s.detail or s.name, s.kind, fileName, s.range.start, s.range.end)) + if s.children: + PopulateSymbols(s.children) elif isinstance(s, SymbolInformation): symbolKinds[(s.location.range.start.line, s.name)] = (s.location.range.start.character, s.kind) self._scopes.append(CallGraphScope(s.containerName, s.kind, fileName, s.location.range.start, s.location.range.end)) - if s.children: - PopulateSymbols(s.children) else: _logger.error("Invalid DocumentSymbol in %s: %s", fileName, s) @@ -293,6 +293,11 @@ def filterSymbol(self, symbol: Union[DocumentSymbol, SymbolInformation]) -> bool False symbol should be excluded, while its children will pass filterSymbol None symbol and its children will be excluded """ + if symbol.kind in { + SymbolKind.Parameter, + SymbolKind.TypeParameter + }: + return None return symbol.kind in { SymbolKind.Constructor, SymbolKind.Enum, diff --git a/persper/analytics/lsp_graph_server/callgraph/manager.py b/persper/analytics/lsp_graph_server/callgraph/manager.py index b4b51a0296a..85b6cd6d00b 100644 --- a/persper/analytics/lsp_graph_server/callgraph/manager.py +++ b/persper/analytics/lsp_graph_server/callgraph/manager.py @@ -42,6 +42,7 @@ async def buildGraph(self, fileNames: Union[str, Iterable[str]] = None, globPatt branchCounter = 0 # with dups fileCounter = 0 await self._builder.waitForFileSystem() + def pushBranch(branch): nonlocal branchCounter try: @@ -64,7 +65,7 @@ def pushBranch(branch): pushBranch(b) if globPattern or not fileNames: async for b in self._builder.buildCallGraphInFiles(globPattern): - pushBranch(b) + pushBranch(b) if fileNames and not globPattern: _logger.info("Added %d branches from %d files.", branchCounter, fileCounter) else: diff --git a/persper/analytics/lsp_graph_server/languageclient/lspcontract.py b/persper/analytics/lsp_graph_server/languageclient/lspcontract.py index 930e3741e03..390f0f0da42 100644 --- a/persper/analytics/lsp_graph_server/languageclient/lspcontract.py +++ b/persper/analytics/lsp_graph_server/languageclient/lspcontract.py @@ -237,7 +237,7 @@ def getSymbolRange(self): return self.selectionRange def __str__(self): - return self.name + "[" + self.kind + "]" + return str.format("{0}({1}){2}", self.name, self.kind, self.children or "") def toDict(self): raise NotImplementedError() From 2098cb5291e78ed1da3b5892e206912292ec2ebf Mon Sep 17 00:00:00 2001 From: xinyan Date: Sun, 10 Mar 2019 18:51:11 +0800 Subject: [PATCH 74/84] Update baseline for test_analyzer_lsp_ccls. Refactor test_analyzer_lsp_ccls. --- .../baseline/cpp_test_repo/A.g.json | 292 ++++++++++++++++++ .../baseline/cpp_test_repo/B.g.json | 292 ++++++++++++++++++ .../baseline/cpp_test_repo/C.g.json | 292 ++++++++++++++++++ .../baseline/cpp_test_repo/D.g.json | 292 ++++++++++++++++++ test/test_analytics/test_analyzer_lsp_ccls.py | 27 +- test/test_analytics/utility/graph_baseline.py | 18 +- 6 files changed, 1191 insertions(+), 22 deletions(-) diff --git a/test/test_analytics/baseline/cpp_test_repo/A.g.json b/test/test_analytics/baseline/cpp_test_repo/A.g.json index cd6aaa90876..2594c7d3aa5 100644 --- a/test/test_analytics/baseline/cpp_test_repo/A.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/A.g.json @@ -1,5 +1,9 @@ { "edges": { + "ArgumentException::ArgumentException(const std::string &message, const std::string &argumentName)|->|static std::string ArgumentException::BuildMessage(const std::string &message, const std::string &argumentName)": { + "addedBy": "A", + "weight": null + }, "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)|->|TStream OpenAndValidate(const TPath arg1)": { "addedBy": "A", "weight": null @@ -138,6 +142,66 @@ }, "size": null }, + "ArgumentException::ArgumentException(const std::string &message)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 2, + "dels": 0 + } + }, + "size": null + }, + "ArgumentException::ArgumentException(const std::string &message, const std::string &argumentName)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 2, + "dels": 0 + } + }, + "size": null + }, + "BlockExitHandler &BlockExitHandler::operator=(const BlockExitHandler &) = delete": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "BlockExitHandler::BlockExitHandler(const BlockExitHandler &) = delete": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "BlockExitHandler::~BlockExitHandler() noexcept": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 11, + "dels": 0 + } + }, + "size": null + }, "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)": { "files": [ "TextFileParsers.cpp" @@ -162,6 +226,42 @@ }, "size": null }, + "Exception::Exception()": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "Exception::~Exception() noexcept": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "InvalidCastException::InvalidCastException(const std::string &message)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "RowReader &operator>>(RowReader &reader, bool &rhs)": { "files": [ "TextFileParsers.cpp" @@ -234,6 +334,18 @@ }, "size": null }, + "RowReader::operator bool() const": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 4, + "dels": 0 + } + }, + "size": null + }, "TDest safe_cast(TSrc obj)": { "files": [ "Utility.h" @@ -270,6 +382,30 @@ }, "size": null }, + "TIterator _RangeToEnumerable::begin()": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "TIterator _RangeToEnumerable::end()": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "TStream &operator>>(TStream &s, RowReader &reader)": { "files": [ "TextFileParsers.h" @@ -330,6 +466,18 @@ }, "size": null }, + "_RangeToEnumerable::_RangeToEnumerable(const std::pair range)": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "_RangeToEnumerable RangeToEnumerable(const std::pair range)": { "files": [ "Utility.h" @@ -390,6 +538,30 @@ }, "size": null }, + "bool RowReader::operator!() const": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 4, + "dels": 0 + } + }, + "size": null + }, + "bool _RangeToEnumerable::empty()": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "bool dynamic_kind_of(const TSrc *obj)": { "files": [ "Utility.h" @@ -414,6 +586,18 @@ }, "size": null }, + "const char *Exception::what() const noexcept": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "const char *FriendlyNameOf()": { "files": [ "Utility.h" @@ -426,6 +610,30 @@ }, "size": null }, + "const std::string &ArgumentException::ArgumentName() const": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "const std::stringstream &RowReader::LineStream() const": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const": { "files": [ "TextFileParsers.cpp" @@ -450,6 +658,54 @@ }, "size": null }, + "explicit BlockExitHandler::BlockExitHandler(const std::function &handler)": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 4, + "dels": 0 + } + }, + "size": null + }, + "explicit Exception::Exception(const std::string &message)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 6, + "dels": 0 + } + }, + "size": null + }, + "explicit OperationFailureException::OperationFailureException(int errorCode)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 4, + "dels": 0 + } + }, + "size": null + }, + "explicit RowReader::RowReader(bool keepWhitespace, char delim)": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 3, + "dels": 0 + } + }, + "size": null + }, "inline std::string to_string(const std::pair &value)": { "files": [ "Utility.h" @@ -486,6 +742,30 @@ }, "size": null }, + "size_t RowReader::LineNumber() const": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "static std::string ArgumentException::BuildMessage(const std::string &message, const std::string &argumentName)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 5, + "dels": 0 + } + }, + "size": null + }, "std::shared_ptr safe_pointer_cast(const std::shared_ptr &obj)": { "files": [ "Utility.h" @@ -546,6 +826,18 @@ }, "size": null }, + "void RowReader::ResetLineNumber()": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "void ValidateStream(const TStream &stream)": { "files": [ "Utility.h" diff --git a/test/test_analytics/baseline/cpp_test_repo/B.g.json b/test/test_analytics/baseline/cpp_test_repo/B.g.json index cdcca5b1baa..d0db7b42971 100644 --- a/test/test_analytics/baseline/cpp_test_repo/B.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/B.g.json @@ -1,5 +1,9 @@ { "edges": { + "ArgumentException::ArgumentException(const std::string &message, const std::string &argumentName)|->|static std::string ArgumentException::BuildMessage(const std::string &message, const std::string &argumentName)": { + "addedBy": "A", + "weight": null + }, "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)|->|TStream OpenAndValidate(const TPath arg1)": { "addedBy": "A", "weight": null @@ -150,6 +154,66 @@ }, "size": null }, + "ArgumentException::ArgumentException(const std::string &message)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 2, + "dels": 0 + } + }, + "size": null + }, + "ArgumentException::ArgumentException(const std::string &message, const std::string &argumentName)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 2, + "dels": 0 + } + }, + "size": null + }, + "BlockExitHandler &BlockExitHandler::operator=(const BlockExitHandler &) = delete": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "BlockExitHandler::BlockExitHandler(const BlockExitHandler &) = delete": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "BlockExitHandler::~BlockExitHandler() noexcept": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 11, + "dels": 0 + } + }, + "size": null + }, "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)": { "files": [ "TextFileParsers.cpp" @@ -174,6 +238,42 @@ }, "size": null }, + "Exception::Exception()": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "Exception::~Exception() noexcept": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "InvalidCastException::InvalidCastException(const std::string &message)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "RowReader &operator>>(RowReader &reader, bool &rhs)": { "files": [ "TextFileParsers.cpp" @@ -246,6 +346,18 @@ }, "size": null }, + "RowReader::operator bool() const": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 4, + "dels": 0 + } + }, + "size": null + }, "TDest safe_cast(TSrc obj)": { "files": [ "Utility.h" @@ -282,6 +394,30 @@ }, "size": null }, + "TIterator _RangeToEnumerable::begin()": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "TIterator _RangeToEnumerable::end()": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "TStream &operator>>(TStream &s, RowReader &reader)": { "files": [ "TextFileParsers.h" @@ -342,6 +478,18 @@ }, "size": null }, + "_RangeToEnumerable::_RangeToEnumerable(const std::pair range)": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "_RangeToEnumerable RangeToEnumerable(const std::pair range)": { "files": [ "Utility.h" @@ -402,6 +550,30 @@ }, "size": null }, + "bool RowReader::operator!() const": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 4, + "dels": 0 + } + }, + "size": null + }, + "bool _RangeToEnumerable::empty()": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "bool dynamic_kind_of(const TSrc *obj)": { "files": [ "Utility.h" @@ -426,6 +598,18 @@ }, "size": null }, + "const char *Exception::what() const noexcept": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "const char *FriendlyNameOf()": { "files": [ "Utility.h" @@ -438,6 +622,30 @@ }, "size": null }, + "const std::string &ArgumentException::ArgumentName() const": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "const std::stringstream &RowReader::LineStream() const": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const": { "files": [ "TextFileParsers.cpp" @@ -462,6 +670,54 @@ }, "size": null }, + "explicit BlockExitHandler::BlockExitHandler(const std::function &handler)": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 4, + "dels": 0 + } + }, + "size": null + }, + "explicit Exception::Exception(const std::string &message)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 6, + "dels": 0 + } + }, + "size": null + }, + "explicit OperationFailureException::OperationFailureException(int errorCode)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 4, + "dels": 0 + } + }, + "size": null + }, + "explicit RowReader::RowReader(bool keepWhitespace, char delim)": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 3, + "dels": 0 + } + }, + "size": null + }, "inline std::string to_string(const std::pair &value)": { "files": [ "Utility.h" @@ -502,6 +758,30 @@ }, "size": null }, + "size_t RowReader::LineNumber() const": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "static std::string ArgumentException::BuildMessage(const std::string &message, const std::string &argumentName)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 5, + "dels": 0 + } + }, + "size": null + }, "std::shared_ptr safe_pointer_cast(const std::shared_ptr &obj)": { "files": [ "Utility.h" @@ -562,6 +842,18 @@ }, "size": null }, + "void RowReader::ResetLineNumber()": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "void ValidateStream(const TStream &stream)": { "files": [ "Utility.h" diff --git a/test/test_analytics/baseline/cpp_test_repo/C.g.json b/test/test_analytics/baseline/cpp_test_repo/C.g.json index babf7d98549..e8db0ec0531 100644 --- a/test/test_analytics/baseline/cpp_test_repo/C.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/C.g.json @@ -1,5 +1,9 @@ { "edges": { + "ArgumentException::ArgumentException(const std::string &message, const std::string &argumentName)|->|static std::string ArgumentException::BuildMessage(const std::string &message, const std::string &argumentName)": { + "addedBy": "A", + "weight": null + }, "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)|->|TStream OpenAndValidate(const TPath arg1)": { "addedBy": "C", "weight": null @@ -150,6 +154,66 @@ }, "size": null }, + "ArgumentException::ArgumentException(const std::string &message)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 2, + "dels": 0 + } + }, + "size": null + }, + "ArgumentException::ArgumentException(const std::string &message, const std::string &argumentName)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 2, + "dels": 0 + } + }, + "size": null + }, + "BlockExitHandler &BlockExitHandler::operator=(const BlockExitHandler &) = delete": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "BlockExitHandler::BlockExitHandler(const BlockExitHandler &) = delete": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "BlockExitHandler::~BlockExitHandler() noexcept": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 11, + "dels": 0 + } + }, + "size": null + }, "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)": { "files": [ "TextFileParsers.cpp" @@ -174,6 +238,42 @@ }, "size": null }, + "Exception::Exception()": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "Exception::~Exception() noexcept": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "InvalidCastException::InvalidCastException(const std::string &message)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "RowReader &operator>>(RowReader &reader, bool &rhs)": { "files": [ "TextFileParsers.cpp" @@ -250,6 +350,18 @@ }, "size": null }, + "RowReader::operator bool() const": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 4, + "dels": 0 + } + }, + "size": null + }, "TDest safe_cast(TSrc obj)": { "files": [ "Utility.h" @@ -286,6 +398,30 @@ }, "size": null }, + "TIterator _RangeToEnumerable::begin()": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "TIterator _RangeToEnumerable::end()": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "TStream &operator>>(TStream &s, RowReader &reader)": { "files": [ "TextFileParsers.h" @@ -346,6 +482,18 @@ }, "size": null }, + "_RangeToEnumerable::_RangeToEnumerable(const std::pair range)": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "_RangeToEnumerable RangeToEnumerable(const std::pair range)": { "files": [ "Utility.h" @@ -414,6 +562,30 @@ }, "size": null }, + "bool RowReader::operator!() const": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 4, + "dels": 0 + } + }, + "size": null + }, + "bool _RangeToEnumerable::empty()": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "bool dynamic_kind_of(const TSrc *obj)": { "files": [ "Utility.h" @@ -438,6 +610,18 @@ }, "size": null }, + "const char *Exception::what() const noexcept": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "const char *FriendlyNameOf()": { "files": [ "Utility.h" @@ -450,6 +634,30 @@ }, "size": null }, + "const std::string &ArgumentException::ArgumentName() const": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "const std::stringstream &RowReader::LineStream() const": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const": { "files": [ "TextFileParsers.cpp" @@ -478,6 +686,54 @@ }, "size": null }, + "explicit BlockExitHandler::BlockExitHandler(const std::function &handler)": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 4, + "dels": 0 + } + }, + "size": null + }, + "explicit Exception::Exception(const std::string &message)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 6, + "dels": 0 + } + }, + "size": null + }, + "explicit OperationFailureException::OperationFailureException(int errorCode)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 4, + "dels": 0 + } + }, + "size": null + }, + "explicit RowReader::RowReader(bool keepWhitespace, char delim)": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 3, + "dels": 0 + } + }, + "size": null + }, "inline std::string to_string(const std::pair &value)": { "files": [ "Utility.h" @@ -522,6 +778,30 @@ }, "size": null }, + "size_t RowReader::LineNumber() const": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "static std::string ArgumentException::BuildMessage(const std::string &message, const std::string &argumentName)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 5, + "dels": 0 + } + }, + "size": null + }, "std::shared_ptr safe_pointer_cast(const std::shared_ptr &obj)": { "files": [ "Utility.h" @@ -582,6 +862,18 @@ }, "size": null }, + "void RowReader::ResetLineNumber()": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "void ValidateStream(const TStream &stream)": { "files": [ "Utility.h" diff --git a/test/test_analytics/baseline/cpp_test_repo/D.g.json b/test/test_analytics/baseline/cpp_test_repo/D.g.json index aa0ec8956b9..e5eb44a4f6d 100644 --- a/test/test_analytics/baseline/cpp_test_repo/D.g.json +++ b/test/test_analytics/baseline/cpp_test_repo/D.g.json @@ -1,5 +1,9 @@ { "edges": { + "ArgumentException::ArgumentException(const std::string &message, const std::string &argumentName)|->|static std::string ArgumentException::BuildMessage(const std::string &message, const std::string &argumentName)": { + "addedBy": "A", + "weight": null + }, "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)|->|TStream OpenAndValidate(const TPath arg1)": { "addedBy": "D", "weight": null @@ -150,6 +154,66 @@ }, "size": null }, + "ArgumentException::ArgumentException(const std::string &message)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 2, + "dels": 0 + } + }, + "size": null + }, + "ArgumentException::ArgumentException(const std::string &message, const std::string &argumentName)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 2, + "dels": 0 + } + }, + "size": null + }, + "BlockExitHandler &BlockExitHandler::operator=(const BlockExitHandler &) = delete": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "BlockExitHandler::BlockExitHandler(const BlockExitHandler &) = delete": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "BlockExitHandler::~BlockExitHandler() noexcept": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 11, + "dels": 0 + } + }, + "size": null + }, "ConfigurationParser::ConfigurationParser(std::filesystem::path filePath)": { "files": [ "TextFileParsers.cpp" @@ -174,6 +238,42 @@ }, "size": null }, + "Exception::Exception()": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "Exception::~Exception() noexcept": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "InvalidCastException::InvalidCastException(const std::string &message)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "RowReader &operator>>(RowReader &reader, bool &rhs)": { "files": [ "TextFileParsers.cpp" @@ -254,6 +354,18 @@ }, "size": null }, + "RowReader::operator bool() const": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 4, + "dels": 0 + } + }, + "size": null + }, "TDest safe_cast(TSrc obj)": { "files": [ "Utility.h" @@ -290,6 +402,30 @@ }, "size": null }, + "TIterator _RangeToEnumerable::begin()": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "TIterator _RangeToEnumerable::end()": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "TStream &operator>>(TStream &s, RowReader &reader)": { "files": [ "TextFileParsers.h" @@ -350,6 +486,18 @@ }, "size": null }, + "_RangeToEnumerable::_RangeToEnumerable(const std::pair range)": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "_RangeToEnumerable RangeToEnumerable(const std::pair range)": { "files": [ "Utility.h" @@ -426,6 +574,30 @@ }, "size": null }, + "bool RowReader::operator!() const": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 4, + "dels": 0 + } + }, + "size": null + }, + "bool _RangeToEnumerable::empty()": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "bool dynamic_kind_of(const TSrc *obj)": { "files": [ "Utility.h" @@ -450,6 +622,18 @@ }, "size": null }, + "const char *Exception::what() const noexcept": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "const char *FriendlyNameOf()": { "files": [ "Utility.h" @@ -462,6 +646,30 @@ }, "size": null }, + "const std::string &ArgumentException::ArgumentName() const": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "const std::stringstream &RowReader::LineStream() const": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "double ConfigurationParser::GetDouble(const std::string &key, double defaultValue) const": { "files": [ "TextFileParsers.cpp" @@ -494,6 +702,54 @@ }, "size": null }, + "explicit BlockExitHandler::BlockExitHandler(const std::function &handler)": { + "files": [ + "Utility.h" + ], + "history": { + "A": { + "adds": 4, + "dels": 0 + } + }, + "size": null + }, + "explicit Exception::Exception(const std::string &message)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 6, + "dels": 0 + } + }, + "size": null + }, + "explicit OperationFailureException::OperationFailureException(int errorCode)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 4, + "dels": 0 + } + }, + "size": null + }, + "explicit RowReader::RowReader(bool keepWhitespace, char delim)": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 3, + "dels": 0 + } + }, + "size": null + }, "inline std::string to_string(const std::pair &value)": { "files": [ "Utility.h" @@ -542,6 +798,30 @@ }, "size": null }, + "size_t RowReader::LineNumber() const": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, + "static std::string ArgumentException::BuildMessage(const std::string &message, const std::string &argumentName)": { + "files": [ + "Exceptions.h" + ], + "history": { + "A": { + "adds": 5, + "dels": 0 + } + }, + "size": null + }, "std::shared_ptr safe_pointer_cast(const std::shared_ptr &obj)": { "files": [ "Utility.h" @@ -602,6 +882,18 @@ }, "size": null }, + "void RowReader::ResetLineNumber()": { + "files": [ + "TextFileParsers.h" + ], + "history": { + "A": { + "adds": 1, + "dels": 0 + } + }, + "size": null + }, "void ValidateStream(const TStream &stream)": { "files": [ "Utility.h" diff --git a/test/test_analytics/test_analyzer_lsp_ccls.py b/test/test_analytics/test_analyzer_lsp_ccls.py index 0b903fb9e2e..420de70a369 100644 --- a/test/test_analytics/test_analyzer_lsp_ccls.py +++ b/test/test_analytics/test_analyzer_lsp_ccls.py @@ -4,7 +4,6 @@ import subprocess from pathlib import Path from tempfile import mkdtemp -from .utility.graph_baseline import GraphDumpAnalyzerObserver import networkx.readwrite.json_graph import pytest @@ -17,6 +16,11 @@ from persper.analytics.lsp_graph_server.ccls import CclsGraphServer from persper.util.path import root_path +from .utility.graph_baseline import GraphDumpAnalyzerObserver + +# Whether we are generating graph dump baseline, rather than testing for regression. +IS_GENERATING_BASELINE = True + _logger = logging.getLogger() testDataRoot = os.path.dirname(os.path.abspath(__file__)) @@ -49,6 +53,14 @@ def createCclsGraphServer(): return graphServer +def createGraphDumpAnalyzerObserver(testName: str): + return GraphDumpAnalyzerObserver( + None if IS_GENERATING_BASELINE else + os.path.join(testDataRoot, "baseline/" + testName), + os.path.join(testDataRoot, "actualdump/" + testName), + dumpNaming=CommitIdGenerators.fromComment) + + @pytest.mark.asyncio async def testFeatureBranchFirstParent(): """ @@ -58,9 +70,8 @@ async def testFeatureBranchFirstParent(): graphServer = createCclsGraphServer() analyzer = Analyzer(repoPath, graphServer, firstParentOnly=True) async with graphServer: - analyzer.observer = GraphDumpAnalyzerObserver( - os.path.join(testDataRoot, "baseline/feature_branch_first_parent"), - os.path.join(testDataRoot, "actualdump/feature_branch_first_parent")) + analyzer.observer = createGraphDumpAnalyzerObserver( + "feature_branch_first_parent") await analyzer.analyze() @@ -73,9 +84,7 @@ async def testFeatureBranch(): graphServer = createCclsGraphServer() analyzer = Analyzer(repoPath, graphServer, firstParentOnly=False) async with graphServer: - analyzer.observer = GraphDumpAnalyzerObserver( - os.path.join(testDataRoot, "baseline/feature_branch"), - os.path.join(testDataRoot, "actualdump/feature_branch")) + analyzer.observer = createGraphDumpAnalyzerObserver("feature_branch") await analyzer.analyze() @@ -85,7 +94,5 @@ async def testCppTestRepo(): graphServer = createCclsGraphServer() analyzer = Analyzer(repoPath, graphServer) async with graphServer: - analyzer.observer = GraphDumpAnalyzerObserver( - os.path.join(testDataRoot, "baseline/cpp_test_repo"), - os.path.join(testDataRoot, "actualdump/cpp_test_repo")) + analyzer.observer = createGraphDumpAnalyzerObserver("cpp_test_repo") await analyzer.analyze() diff --git a/test/test_analytics/utility/graph_baseline.py b/test/test_analytics/utility/graph_baseline.py index 8323f5455a5..96527e86e8b 100644 --- a/test/test_analytics/utility/graph_baseline.py +++ b/test/test_analytics/utility/graph_baseline.py @@ -11,7 +11,7 @@ from networkx import Graph from persper.analytics.analyzer2 import Analyzer, AnalyzerObserver -from persper.analytics.call_commit_graph import CallCommitGraph +from persper.analytics.call_commit_graph import CallCommitGraph, CommitIdGenerators from persper.analytics.graph_server import CommitSeekingMode _logger = logging.getLogger() @@ -66,12 +66,6 @@ def assertGraphMatches(baseline: dict, ccg: CallCommitGraph): assert not baselineEdgeIds, str.format( "Branch(es) missing: {0}.", baselineEdgeIds) - -class GraphDumpNamingRule(Enum): - CommitMessage = 0, - CommitHexSha = 1 - - class GraphDumpAnalyzerObserver(AnalyzerObserver): """ An implementation of AnalyzerObserver that generates graph dump after each commit, @@ -79,7 +73,7 @@ class GraphDumpAnalyzerObserver(AnalyzerObserver): """ def __init__(self, graphBaselineDumpPath: str = None, graphTestDumpPath: str = None, - dumpOnlyOnError: bool = None, dumpNaming: GraphDumpNamingRule = GraphDumpNamingRule.CommitHexSha): + dumpOnlyOnError: bool = None, dumpNaming = CommitIdGenerators.fromHexsha): """ Params: graphBaselineDumpPath: root folder of the baseline graph dump files. Set to values other than `None` @@ -105,11 +99,13 @@ def __init__(self, graphBaselineDumpPath: str = None, graphTestDumpPath: str = N else: self._dumpPath = None self._dumpOnlyOnError = graphBaselineDumpPath != None if dumpOnlyOnError == None else dumpOnlyOnError + self._dumpNaming = dumpNaming def onAfterCommit(self, analyzer: Analyzer, commit: Commit, seeking_mode: CommitSeekingMode): if seeking_mode == CommitSeekingMode.Rewind: return graph: CallCommitGraph = analyzer.graph + graphDumpLocalName = self._dumpNaming(-1, commit.hexsha, commit.message) + ".g.json" def dumpGraph(warnIfNotAvailable: bool): if not self._dumpPath: @@ -118,16 +114,14 @@ def dumpGraph(warnIfNotAvailable: bool): "Cannot dump call commit graph because no dump path has been specified. Commit %s: %s.", commit.hexsha, commit.message) return False data = graphToDict(graph) - graphPath = self._dumpPath.joinpath( - commit.message.strip() + ".g.json") + graphPath = self._dumpPath.joinpath(graphDumpLocalName) with open(graphPath, "wt") as f: json.dump(data, f, sort_keys=True, indent=4) return True # check baseline for regression if self._baselinePath: try: - graphPath = self._baselinePath.joinpath( - commit.message.strip() + ".g.json") + graphPath = self._baselinePath.joinpath(graphDumpLocalName) baselineData: dict = None with open(graphPath, "rt") as f: baselineData = fixGraphDict(json.load(f)) From f9f8387ef9f5137973f79d7df9805b206bb1f0ba Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Sun, 10 Mar 2019 17:07:41 -0700 Subject: [PATCH 75/84] Add commit classifier to the new Analyzer --- persper/analytics/analyzer2.py | 17 ++++++++++--- persper/analytics/commit_classifier.py | 33 ++++++++++++++++++++++++++ 2 files changed, 47 insertions(+), 3 deletions(-) create mode 100644 persper/analytics/commit_classifier.py diff --git a/persper/analytics/analyzer2.py b/persper/analytics/analyzer2.py index 4db0b058e67..6c32b321d5b 100644 --- a/persper/analytics/analyzer2.py +++ b/persper/analytics/analyzer2.py @@ -1,18 +1,20 @@ import asyncio from abc import ABC import collections.abc -from typing import Union, Set +from typing import Union, Set, Optional from git import Commit, Diff, DiffIndex, Repo from persper.analytics.git_tools import (diff_with_commit, get_contents) from persper.analytics.graph_server import CommitSeekingMode, GraphServer +from persper.analytics.commit_classifier import CommitClassifier class Analyzer: def __init__(self, repositoryRoot: str, graphServer: GraphServer, terminalCommit: str = "master", - firstParentOnly: bool = False): + firstParentOnly: bool = False, + commit_classifier: Optional[CommitClassifier] = None): self._repositoryRoot = repositoryRoot self._graphServer = graphServer self._repo = Repo(repositoryRoot) @@ -22,6 +24,8 @@ def __init__(self, repositoryRoot: str, graphServer: GraphServer, self._visitedCommits = set() self._s_visitedCommits = _ReadOnlySet(self._visitedCommits) self._observer: AnalyzerObserver = emptyAnalyzerObserver + self._commit_classifier = commit_classifier + self._clf_results: Dict[str, List[float]] = {} def __getstate__(self): state = self.__dict__.copy() @@ -97,10 +101,11 @@ def visitedCommits(self) -> Set[str]: return self._s_visitedCommits async def analyze(self, maxAnalyzedCommits=1000): - graphServerLastCommit:str = None + graphServerLastCommit: str = None commitSpec = self._terminalCommit if self._originCommit: commitSpec = self._originCommit.hexsha + ".." + self._terminalCommit.hexsha + analyzedCommits = 0 for commit in self._repo.iter_commits(commitSpec, topo_order=True, reverse=True, first_parent=self._firstParentOnly): @@ -148,6 +153,7 @@ async def _analyzeCommit(self, commit: Union[Commit, str], parentCommit: Union[C """ if type(commit) != Commit: commit = self._repo.commit(commit) + self._observer.onBeforeCommit(self, commit, seekingMode) result = self._graphServer.start_commit(commit.hexsha, seekingMode, commit.author.name, commit.author.email, commit.message) @@ -155,6 +161,11 @@ async def _analyzeCommit(self, commit: Union[Commit, str], parentCommit: Union[C await result diff_index = diff_with_commit(self._repo, commit, parentCommit) + # commit classification + if self._commit_classifier and commit.hexsha not in self._clf_results: + prob = self._commit_classifier.predict(commit, diff_index) + self._clf_results[commit.hexsha] = prob + for diff in diff_index: old_fname, new_fname = _get_fnames(diff) # apply filter diff --git a/persper/analytics/commit_classifier.py b/persper/analytics/commit_classifier.py new file mode 100644 index 00000000000..126073cd267 --- /dev/null +++ b/persper/analytics/commit_classifier.py @@ -0,0 +1,33 @@ + +from git import Commit, DiffIndex + + +class CommitClassifier(ABC): + """ + Defines the interface of any commit classifier + """ + + @abstractmethod + def predict(self, commit: Commit, diff_index: DiffIndex): + """ + Args: + commit: A gitpython's Commit object. + diff_index: A gitpython's DiffIndex object. + It is a list of Diff object, each containing the + diff information between a pair of old/new source files. + + + Returns: + A list, representing the probability distribution of each label + """ + pass + + @property + @abstractmethod + def labels(self): + """ + Returns: + A list of label (str), + in the same order as `predict` method's output. + """ + pass From 4cb4d29dfbb4fe21b157085fe85589dd5cb16853 Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Sun, 10 Mar 2019 18:12:36 -0700 Subject: [PATCH 76/84] Combine devranks with commit classification results --- persper/analytics/analyzer2.py | 12 +++++++++++ persper/analytics/call_commit_graph.py | 12 +---------- persper/util/score.py | 29 ++++++++++++++++++++++++++ 3 files changed, 42 insertions(+), 11 deletions(-) create mode 100644 persper/util/score.py diff --git a/persper/analytics/analyzer2.py b/persper/analytics/analyzer2.py index 6c32b321d5b..c62bb6265a4 100644 --- a/persper/analytics/analyzer2.py +++ b/persper/analytics/analyzer2.py @@ -8,6 +8,7 @@ from persper.analytics.git_tools import (diff_with_commit, get_contents) from persper.analytics.graph_server import CommitSeekingMode, GraphServer from persper.analytics.commit_classifier import CommitClassifier +from persper.util.score import commit_overall_scores class Analyzer: @@ -100,6 +101,17 @@ def visitedCommits(self) -> Set[str]: """ return self._s_visitedCommits + def compute_commit_scores(self, alpha: float, label_weights: List[float], + top_one=False): + """ + Compute the overall scores for all commits by combining DevRank and + commit classification. + """ + return commit_overall_scores(self.graph.commit_devranks(alpha), + self._clf_results, + label_weights, + top_one=top_one) + async def analyze(self, maxAnalyzedCommits=1000): graphServerLastCommit: str = None commitSpec = self._terminalCommit diff --git a/persper/analytics/call_commit_graph.py b/persper/analytics/call_commit_graph.py index 05af148aec3..862035e4fba 100644 --- a/persper/analytics/call_commit_graph.py +++ b/persper/analytics/call_commit_graph.py @@ -6,17 +6,7 @@ import networkx as nx from networkx.readwrite import json_graph from persper.analytics.devrank import devrank - - -def normalize(devranks): - normalized_devranks = {} - dr_sum = 0 - for _, dr in devranks.items(): - dr_sum += dr - - for idx in devranks: - normalized_devranks[idx] = devranks[idx] / dr_sum - return normalized_devranks +from persper.util.score import normalize class CommitIdGenerators: diff --git a/persper/util/score.py b/persper/util/score.py new file mode 100644 index 00000000000..42dcb1686c6 --- /dev/null +++ b/persper/util/score.py @@ -0,0 +1,29 @@ +from typing import Dict, List +import numpy as np + + +def normalize(scores: Dict[str, float]) -> Dict[str, float]: + normalized_scores = {} + score_sum = 0 + for _, score in scores.items(): + score_sum += score + + for idx in scores: + normalized_scores[idx] = scores[idx] / score_sum + return normalized_scores + + +def commit_overall_scores(commit_devranks: Dict[str, float], + clf_results: Dict[str, List[float]], + label_weights: List[float], + top_one=False) -> Dict[str, float]: + overall_scores = {} + for sha, dr in commit_devranks.items(): + assert sha in clf_results, "Commit %s does not have label." + if top_one: + top_idx = np.argmax(clf_results[sha]) + overall_scores[sha] = label_weights[top_idx] * dr + else: + overall_scores[sha] = np.dot(clf_results[sha], label_weights) * dr + + return normalize(overall_scores) From 80f4bb8718f261c1931f092970141c8fae4de3b7 Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Sun, 10 Mar 2019 22:37:38 -0700 Subject: [PATCH 77/84] Fix import error --- persper/analytics/analyzer2.py | 4 ++-- persper/analytics/call_commit_graph.py | 2 +- persper/analytics/commit_classifier.py | 2 +- persper/{util => analytics}/score.py | 0 4 files changed, 4 insertions(+), 4 deletions(-) rename persper/{util => analytics}/score.py (100%) diff --git a/persper/analytics/analyzer2.py b/persper/analytics/analyzer2.py index c62bb6265a4..fcc4b9aa577 100644 --- a/persper/analytics/analyzer2.py +++ b/persper/analytics/analyzer2.py @@ -1,14 +1,14 @@ import asyncio from abc import ABC import collections.abc -from typing import Union, Set, Optional +from typing import Union, Set, Optional, List from git import Commit, Diff, DiffIndex, Repo from persper.analytics.git_tools import (diff_with_commit, get_contents) from persper.analytics.graph_server import CommitSeekingMode, GraphServer from persper.analytics.commit_classifier import CommitClassifier -from persper.util.score import commit_overall_scores +from persper.analytics.score import commit_overall_scores class Analyzer: diff --git a/persper/analytics/call_commit_graph.py b/persper/analytics/call_commit_graph.py index 862035e4fba..f99ff124554 100644 --- a/persper/analytics/call_commit_graph.py +++ b/persper/analytics/call_commit_graph.py @@ -6,7 +6,7 @@ import networkx as nx from networkx.readwrite import json_graph from persper.analytics.devrank import devrank -from persper.util.score import normalize +from persper.analytics.score import normalize class CommitIdGenerators: diff --git a/persper/analytics/commit_classifier.py b/persper/analytics/commit_classifier.py index 126073cd267..7e3a5cb58f7 100644 --- a/persper/analytics/commit_classifier.py +++ b/persper/analytics/commit_classifier.py @@ -1,4 +1,4 @@ - +from abc import ABC, abstractmethod from git import Commit, DiffIndex diff --git a/persper/util/score.py b/persper/analytics/score.py similarity index 100% rename from persper/util/score.py rename to persper/analytics/score.py From 746076749c549f9f3740c2eb047cb6d56e03ee4f Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Sun, 10 Mar 2019 22:37:52 -0700 Subject: [PATCH 78/84] Add tests for commit_overall_score --- test/test_analytics/test_score.py | 39 +++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 test/test_analytics/test_score.py diff --git a/test/test_analytics/test_score.py b/test/test_analytics/test_score.py new file mode 100644 index 00000000000..427d2fa4d44 --- /dev/null +++ b/test/test_analytics/test_score.py @@ -0,0 +1,39 @@ +from persper.analytics.score import commit_overall_scores + + +def test_commit_overall_scores(): + # sums up to 1 + commit_devranks = { + 'abcdefg': 0.2, + 'bcdefgh': 0.3, + 'cdefghi': 0.4, + 'defghij': 0.1, + } + + # suppose a commit can be one of two types + clf_results = { + 'abcdefg': [0.3, 0.7], + 'bcdefgh': [0.9, 0.1], + 'cdefghi': [0.2, 0.8], + 'defghij': [0.6, 0.4], + } + + # the first type is twice as valuable as the second type + label_weights = [2, 1] + + score_truth = { + 'abcdefg': 0.17687074829931967, + 'bcdefgh': 0.3877551020408163, + 'cdefghi': 0.326530612244898, + 'defghij': 0.108843537414966 + } + + top_one_score_truth = { + 'abcdefg': 0.14285714285714285, + 'bcdefgh': 0.4285714285714285, + 'cdefghi': 0.2857142857142857, + 'defghij': 0.14285714285714285 + } + + assert score_truth == commit_overall_scores(commit_devranks, clf_results, label_weights) + assert top_one_score_truth == commit_overall_scores(commit_devranks, clf_results, label_weights, top_one=True) From 9ebba6b94c28e4ab0f64cd9c4ba4d88c5e3a2d04 Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Sun, 10 Mar 2019 22:45:50 -0700 Subject: [PATCH 79/84] Fix the usage of assert statement --- test/test_analytics/test_analyzer.py | 4 +- test/test_analytics/test_analyzer_go.py | 4 +- test/test_analytics/test_analyzer_go_1.py | 4 +- test/test_analytics/test_analyzer_go_2.py | 4 +- test/test_analytics/test_analyzer_js.py | 4 +- test/test_analytics/test_call_commit_graph.py | 50 +++++++++---------- test/test_analytics/test_detect_change.py | 18 +++---- test/test_analytics/test_devrank.py | 8 +-- test/test_analytics/test_diff.py | 2 +- test/test_analytics/test_inverse_diff.py | 2 +- test/test_analytics/test_iterator.py | 30 +++++------ test/test_analytics/test_srcml.py | 2 +- 12 files changed, 66 insertions(+), 66 deletions(-) diff --git a/test/test_analytics/test_analyzer.py b/test/test_analytics/test_analyzer.py index 07063fa65cc..fd011d463aa 100644 --- a/test/test_analytics/test_analyzer.py +++ b/test/test_analytics/test_analyzer.py @@ -97,7 +97,7 @@ async def test_analyzer_master_only(az): for cid, chist in history.items(): message = commits[cid]['message'] - assert(chist == history_truth[message.strip()][func]) + assert chist == history_truth[message.strip()][func] edges_truth = [ # Edges existing in final snapshot @@ -120,4 +120,4 @@ async def test_analyzer_master_only(az): ('str_append_chr', 'snprintf'), ('str_append', 'malloc') ] - assert(set(az._graph_server.get_graph().edges()) == set(edges_truth)) + assert set(az._graph_server.get_graph().edges()) == set(edges_truth) diff --git a/test/test_analytics/test_analyzer_go.py b/test/test_analytics/test_analyzer_go.py index 01e73e6fc19..8f114b205c5 100644 --- a/test/test_analytics/test_analyzer_go.py +++ b/test/test_analytics/test_analyzer_go.py @@ -64,7 +64,7 @@ def test_analzyer_go(az): history = data['history'] for cindex, csize in history.items(): commit_message = commits[int(cindex)]['message'] - assert(csize == history_truth[commit_message.strip()][func]) + assert csize == history_truth[commit_message.strip()][func] edges_added_by_A = set([ ('Abs', 'Sqrt'), @@ -93,4 +93,4 @@ def test_analzyer_go(az): print(set(az._graph_server.get_graph().edges())) all_edges = edges_added_by_A.union(edges_added_by_B).union(edges_added_by_C).union(edges_added_by_D) - assert(set(az._graph_server.get_graph().edges()) == all_edges) + assert set(az._graph_server.get_graph().edges()) == all_edges diff --git a/test/test_analytics/test_analyzer_go_1.py b/test/test_analytics/test_analyzer_go_1.py index ed9e0d5a840..d78dccf926d 100644 --- a/test/test_analytics/test_analyzer_go_1.py +++ b/test/test_analytics/test_analyzer_go_1.py @@ -56,7 +56,7 @@ def test_analzyer_go(az): history = data['history'] for cindex, csize in history.items(): commit_message = commits[int(cindex)]['message'] - assert(csize == history_truth[commit_message.strip()][func]) + assert csize == history_truth[commit_message.strip()][func] edges_added_by_A = set([ ('Abs', 'Sqrt'), @@ -75,4 +75,4 @@ def test_analzyer_go(az): all_edges = edges_added_by_A.union(edges_added_by_B) - assert(set(az._graph_server.get_graph().edges()) == all_edges) + assert set(az._graph_server.get_graph().edges()) == all_edges diff --git a/test/test_analytics/test_analyzer_go_2.py b/test/test_analytics/test_analyzer_go_2.py index 440fe37cca4..e2103a0c317 100644 --- a/test/test_analytics/test_analyzer_go_2.py +++ b/test/test_analytics/test_analyzer_go_2.py @@ -54,7 +54,7 @@ def test_analzyer_go(az): history = data['history'] for cindex, csize in history.items(): commit_message = commits[int(cindex)]['message'] - assert(csize == history_truth[commit_message.strip()][func]) + assert csize == history_truth[commit_message.strip()][func] edges_added_by_A = set([ ('main', 'printInfo'), @@ -72,4 +72,4 @@ def test_analzyer_go(az): ]) all_edges = edges_added_by_A.union(edges_added_by_B) - assert(set(az._graph_server.get_graph().edges()) == all_edges) + assert set(az._graph_server.get_graph().edges()) == all_edges diff --git a/test/test_analytics/test_analyzer_js.py b/test/test_analytics/test_analyzer_js.py index bb8592d5673..616cb093413 100644 --- a/test/test_analytics/test_analyzer_js.py +++ b/test/test_analytics/test_analyzer_js.py @@ -67,7 +67,7 @@ def test_az(az: Analyzer): history = data['history'] for cindex, csize in history.items(): commit_message = commits[cindex]['message'] - assert(csize == history_truth[commit_message.strip()][func]) + assert csize == history_truth[commit_message.strip()][func] edges_truth = [ ('main.js:funcB:9:12', 'Native:Window_prototype_print'), @@ -77,7 +77,7 @@ def test_az(az: Analyzer): ('main.js:main:7:16', 'main.js:funcA:3:5'), ('main.js:global', 'main.js:main:7:16') ] - assert(set(az.graph_server.get_graph().edges()) == set(edges_truth)) + assert set(az.graph_server.get_graph().edges()) == set(edges_truth) finally: p.terminate() diff --git a/test/test_analytics/test_call_commit_graph.py b/test/test_analytics/test_call_commit_graph.py index c61654b03fb..aeae9cca858 100644 --- a/test/test_analytics/test_call_commit_graph.py +++ b/test/test_analytics/test_call_commit_graph.py @@ -32,10 +32,10 @@ def test_call_commit_graph(): func_drs = ccgraph.function_devranks(0.85) commit_drs = ccgraph.commit_devranks(0.85) dev_drs = ccgraph.developer_devranks(0.85) - assert(isclose(func_drs['f1'], 0.35, rel_tol=1e-2)) - assert(isclose(func_drs['f2'], 0.65, rel_tol=1e-2)) - assert(isclose(commit_drs[first_commit['hexsha']], 1)) - assert(isclose(dev_drs[first_commit['authorEmail']], 1)) + assert isclose(func_drs['f1'], 0.35, rel_tol=1e-2) + assert isclose(func_drs['f2'], 0.65, rel_tol=1e-2) + assert isclose(commit_drs[first_commit['hexsha']], 1) + assert isclose(dev_drs[first_commit['authorEmail']], 1) second_commit = { 'hexsha': '0x02', @@ -54,13 +54,13 @@ def test_call_commit_graph(): func_drs2 = ccgraph.function_devranks(0.85) commit_drs2 = ccgraph.commit_devranks(0.85) dev_drs2 = ccgraph.developer_devranks(0.85) - assert(isclose(func_drs2['f1'], 0.26, rel_tol=1e-2)) - assert(isclose(func_drs2['f2'], 0.37, rel_tol=1e-2)) - assert(isclose(func_drs2['f3'], 0.37, rel_tol=1e-2)) - assert(isclose(commit_drs2[first_commit['hexsha']], 0.63, rel_tol=1e-2)) - assert(isclose(commit_drs2[second_commit['hexsha']], 0.37, rel_tol=1e-2)) - assert(isclose(dev_drs2[first_commit['authorEmail']], 0.63, rel_tol=1e-2)) - assert(isclose(dev_drs2[second_commit['authorEmail']], 0.37, rel_tol=1e-2)) + assert isclose(func_drs2['f1'], 0.26, rel_tol=1e-2) + assert isclose(func_drs2['f2'], 0.37, rel_tol=1e-2) + assert isclose(func_drs2['f3'], 0.37, rel_tol=1e-2) + assert isclose(commit_drs2[first_commit['hexsha']], 0.63, rel_tol=1e-2) + assert isclose(commit_drs2[second_commit['hexsha']], 0.37, rel_tol=1e-2) + assert isclose(dev_drs2[first_commit['authorEmail']], 0.63, rel_tol=1e-2) + assert isclose(dev_drs2[second_commit['authorEmail']], 0.37, rel_tol=1e-2) third_commit = { 'hexsha': '0x03', @@ -83,16 +83,16 @@ def test_call_commit_graph(): func_drs3 = ccgraph.function_devranks(0.85) commit_drs3 = ccgraph.commit_devranks(0.85) dev_drs3 = ccgraph.developer_devranks(0.85) - assert(isclose(func_drs3['f1'], 0.141, rel_tol=1e-2)) - assert(isclose(func_drs3['f2'], 0.201, rel_tol=1e-2)) - assert(isclose(func_drs3['f3'], 0.201, rel_tol=1e-2)) - assert(isclose(func_drs3['f4'], 0.227, rel_tol=1e-2)) - assert(isclose(func_drs3['f5'], 0.227, rel_tol=1e-2)) - assert(isclose(commit_drs3[first_commit['hexsha']], 0.343, rel_tol=1e-2)) - assert(isclose(commit_drs3[second_commit['hexsha']], 0.201, rel_tol=1e-2)) - assert(isclose(commit_drs3[third_commit['hexsha']], 0.454, rel_tol=1e-2)) - assert(isclose(dev_drs3[first_commit['authorEmail']], 0.798, rel_tol=1e-2)) - assert(isclose(dev_drs3[second_commit['authorEmail']], 0.201, rel_tol=1e-2)) + assert isclose(func_drs3['f1'], 0.141, rel_tol=1e-2) + assert isclose(func_drs3['f2'], 0.201, rel_tol=1e-2) + assert isclose(func_drs3['f3'], 0.201, rel_tol=1e-2) + assert isclose(func_drs3['f4'], 0.227, rel_tol=1e-2) + assert isclose(func_drs3['f5'], 0.227, rel_tol=1e-2) + assert isclose(commit_drs3[first_commit['hexsha']], 0.343, rel_tol=1e-2) + assert isclose(commit_drs3[second_commit['hexsha']], 0.201, rel_tol=1e-2) + assert isclose(commit_drs3[third_commit['hexsha']], 0.454, rel_tol=1e-2) + assert isclose(dev_drs3[first_commit['authorEmail']], 0.798, rel_tol=1e-2) + assert isclose(dev_drs3[second_commit['authorEmail']], 0.201, rel_tol=1e-2) @pytest.mark.asyncio @@ -117,7 +117,7 @@ async def test_black_set(): ccgraph = az.get_graph() devdict = ccgraph.commit_devranks(0.85) devdict2 = ccgraph.commit_devranks(0.85, black_set=set([parent_sha])) - assert(len(devdict) == 3) - assert(len(devdict2) == 2) - assert(parent_sha in devdict) - assert(parent_sha not in devdict2) + assert len(devdict) == 3 + assert len(devdict2) == 2 + assert parent_sha in devdict + assert parent_sha not in devdict2 diff --git a/test/test_analytics/test_detect_change.py b/test/test_analytics/test_detect_change.py index 6409232d327..fc79d463ad5 100644 --- a/test/test_analytics/test_detect_change.py +++ b/test/test_analytics/test_detect_change.py @@ -37,15 +37,15 @@ def test_detect_change(): with open(os.path.join(dir_path, 'example.patch'), 'r') as f: example_patch = f.read() parsing_result = parser.parse(example_patch) - assert(parsing_result == parsing_truth) + assert parsing_result == parsing_truth with open(os.path.join(dir_path, 'example.cc'), 'r') as f: root = src_to_tree('example.cc', f.read()) func_ranges_result = get_func_ranges_cpp(root) - assert(func_ranges_result == func_ranges_truth) + assert func_ranges_result == func_ranges_truth - assert(changed_result == get_changed_functions( - *func_ranges_result, *parsing_result)) + assert changed_result == get_changed_functions( + *func_ranges_result, *parsing_result) def test_patch_parser(): @@ -58,7 +58,7 @@ def test_patch_parser(): with open(os.path.join(dir_path, 'example2.patch'), 'r') as f: example2_patch = f.read() parsing_result = parser.parse(example2_patch) - assert(parsing_result == patch2_truth) + assert parsing_result == patch2_truth # view patch3_truth here # https://github.com/UltimateBeaver/test_feature_branch/commit/caaac10f604ea7ac759c2147df8fb2b588ee2a27 @@ -69,7 +69,7 @@ def test_patch_parser(): with open(os.path.join(dir_path, 'example3.patch'), 'r') as f: example3_patch = f.read() parsing_result = parser.parse(example3_patch) - assert(parsing_result == patch3_truth) + assert parsing_result == patch3_truth # view patch4_truth here # https://github.com/UltimateBeaver/test_feature_branch/commit/364d5cc49aeb2e354da458924ce84c0ab731ac77 @@ -80,7 +80,7 @@ def test_patch_parser(): with open(os.path.join(dir_path, 'example4.patch'), 'r') as f: example4_patch = f.read() parsing_result = parser.parse(example4_patch) - assert(parsing_result == patch4_truth) + assert parsing_result == patch4_truth def test_no_newline_at_the_end_of_file(): @@ -91,7 +91,7 @@ def test_no_newline_at_the_end_of_file(): with open(os.path.join(dir_path, 'example5.patch'), 'r') as f: example5_patch = f.read() parsing_result = parser.parse(example5_patch) - assert(parsing_result == patch5_truth) + assert parsing_result == patch5_truth patch6_truth = ( [[17, 1], [20, 3], [30, 5]], @@ -100,4 +100,4 @@ def test_no_newline_at_the_end_of_file(): with open(os.path.join(dir_path, 'example6.patch'), 'r') as f: example6_patch = f.read() parsing_result = parser.parse(example6_patch) - assert(parsing_result == patch6_truth) + assert parsing_result == patch6_truth diff --git a/test/test_analytics/test_devrank.py b/test/test_analytics/test_devrank.py index 4bbede1e7cb..5e0a29e6460 100644 --- a/test/test_analytics/test_devrank.py +++ b/test/test_analytics/test_devrank.py @@ -8,18 +8,18 @@ def test_devrank(): G.add_node(2, weight=10) G.add_edge(1, 2) G.add_edge(2, 1) - assert(devrank(G, 'weight') == {1: 0.5, 2: 0.5}) + assert devrank(G, 'weight') == {1: 0.5, 2: 0.5} G2 = nx.DiGraph() G2.add_edges_from([(1, 2), (2, 3), (3, 4), (4, 1)]) for u in G2: G2.node[u]['weight'] = 10 - assert(devrank(G2, 'weight') == {1: 0.25, 2: 0.25, 3: 0.25, 4: 0.25}) + assert devrank(G2, 'weight') == {1: 0.25, 2: 0.25, 3: 0.25, 4: 0.25} G3 = nx.DiGraph() G3.add_edge(1, 2) for u in G3: G3.node[u]['weight'] = 10 dr = devrank(G3, 'weight', alpha=1.0) - assert(abs(dr[1] - 0.3333) < 0.0001) - assert(abs(dr[2] - 0.6666) < 0.0001) + assert abs(dr[1] - 0.3333) < 0.0001 + assert abs(dr[2] - 0.6666) < 0.0001 diff --git a/test/test_analytics/test_diff.py b/test/test_analytics/test_diff.py index 75446ce46f9..fc07cbb9708 100644 --- a/test/test_analytics/test_diff.py +++ b/test/test_analytics/test_diff.py @@ -19,4 +19,4 @@ def test_diff_ignore_space(): crlf_sha = '0a61b0df1224a5470bcddab302bc199ca5a9e356' crlf_commit = r.commit(crlf_sha) diff_result = diff_with_first_parent(r, crlf_commit) - assert(len(diff_result) == 0) + assert len(diff_result) == 0 diff --git a/test/test_analytics/test_inverse_diff.py b/test/test_analytics/test_inverse_diff.py index 883dd45ce6d..4b8da4cc1c2 100644 --- a/test/test_analytics/test_inverse_diff.py +++ b/test/test_analytics/test_inverse_diff.py @@ -15,4 +15,4 @@ def test_inverse_diff(): ) inv_result = inverse_diff(*adds_dels) - assert(inv_truth == inv_result) + assert inv_truth == inv_result diff --git a/test/test_analytics/test_iterator.py b/test/test_analytics/test_iterator.py index 79a576d6692..745b879c83e 100644 --- a/test/test_analytics/test_iterator.py +++ b/test/test_analytics/test_iterator.py @@ -29,37 +29,37 @@ def test_iterator(ri): commits, branch_commits = ri.iter(from_beginning=True, into_branches=True) # from A to L # use `git log --graph` to view ground truth - assert(len(ri.visited) == 12) - assert(len(commits) == 4) - assert(len(branch_commits) == 8) - assert(serialized_messages(commits) == 'D C B A') - assert(serialized_messages(branch_commits) == 'G F E J I H L K') + assert len(ri.visited) == 12 + assert len(commits) == 4 + assert len(branch_commits) == 8 + assert serialized_messages(commits) == 'D C B A' + assert serialized_messages(branch_commits) == 'G F E J I H L K' def test_continue_iter(ri): commits, branch_commits = ri.iter( from_beginning=True, num_commits=2, into_branches=True) - assert(serialized_messages(commits) == 'B A') - assert(serialized_messages(branch_commits) == '') + assert serialized_messages(commits) == 'B A' + assert serialized_messages(branch_commits) == '' commits2, branch_commits2 = ri.iter( continue_iter=True, num_commits=2, into_branches=True) - assert(serialized_messages(commits2) == 'D C') - assert(serialized_messages(branch_commits2) == 'G F E J I H L K') + assert serialized_messages(commits2) == 'D C' + assert serialized_messages(branch_commits2) == 'G F E J I H L K' def test_rev(ri): commits, branch_commits = ri.iter(rev='C', into_branches=True) - assert(serialized_messages(commits) == 'C B A') - assert(serialized_messages(branch_commits) == '') + assert serialized_messages(commits) == 'C B A' + assert serialized_messages(branch_commits) == '' commits2, branch_commits2 = ri.iter( continue_iter=True, end_commit_sha='D', into_branches=True) - assert(serialized_messages(commits2) == 'D') - assert(serialized_messages(branch_commits2) == 'G F E J I H L K') + assert serialized_messages(commits2) == 'D' + assert serialized_messages(branch_commits2) == 'G F E J I H L K' def test_iter_twice(ri): commits, branch_commits = ri.iter(from_beginning=True, into_branches=True) commits2, branch_commits2 = ri.iter( from_beginning=True, into_branches=True) - assert(commits == commits2) - assert(branch_commits == branch_commits2) + assert commits == commits2 + assert branch_commits == branch_commits2 diff --git a/test/test_analytics/test_srcml.py b/test/test_analytics/test_srcml.py index ac12016d9c7..e04fde8a426 100644 --- a/test/test_analytics/test_srcml.py +++ b/test/test_analytics/test_srcml.py @@ -9,4 +9,4 @@ def test_src_to_tree(): with open(full_path, 'r') as f: src = f.read() root = src_to_tree(filename, src) - assert(root.attrib['filename'] == filename) + assert root.attrib['filename'] == filename From aed6710b237c5359651a85107a84fc4c4b0bb778 Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Mon, 11 Mar 2019 00:06:06 -0700 Subject: [PATCH 80/84] Set the default value of maxAnalyzedCommits to None --- persper/analytics/analyzer2.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/persper/analytics/analyzer2.py b/persper/analytics/analyzer2.py index fcc4b9aa577..c07290c4d75 100644 --- a/persper/analytics/analyzer2.py +++ b/persper/analytics/analyzer2.py @@ -112,7 +112,7 @@ def compute_commit_scores(self, alpha: float, label_weights: List[float], label_weights, top_one=top_one) - async def analyze(self, maxAnalyzedCommits=1000): + async def analyze(self, maxAnalyzedCommits=None): graphServerLastCommit: str = None commitSpec = self._terminalCommit if self._originCommit: From 87781b23f341d125f0fc7565272156d6c672de73 Mon Sep 17 00:00:00 2001 From: xinyan Date: Mon, 11 Mar 2019 23:36:03 +0800 Subject: [PATCH 81/84] Add pickle test. Bug fix: analyzer2.Analyzer didn't populate _visitedCommits. --- persper/analytics/analyzer2.py | 7 ++++- test/test_analytics/test_analyzer_lsp_ccls.py | 26 +++++++++++++++++++ 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/persper/analytics/analyzer2.py b/persper/analytics/analyzer2.py index 4db0b058e67..cbea197c685 100644 --- a/persper/analytics/analyzer2.py +++ b/persper/analytics/analyzer2.py @@ -39,6 +39,10 @@ def __setstate__(self, state): self.terminalCommit = state["_terminalCommit"] self._s_visitedCommits = _ReadOnlySet(self._visitedCommits) + @property + def graphServer(self): + return self._graphServer + @property def observer(self): """ @@ -97,7 +101,7 @@ def visitedCommits(self) -> Set[str]: return self._s_visitedCommits async def analyze(self, maxAnalyzedCommits=1000): - graphServerLastCommit:str = None + graphServerLastCommit: str = None commitSpec = self._terminalCommit if self._originCommit: commitSpec = self._originCommit.hexsha + ".." + self._terminalCommit.hexsha @@ -138,6 +142,7 @@ def printCommitStatus(status: str): # then go on with current commit printCommitStatus("Going forward.") await self._analyzeCommit(commit, parent, CommitSeekingMode.NormalForward) + self._visitedCommits.add(commit.hexsha) graphServerLastCommit = commit.hexsha analyzedCommits += 1 diff --git a/test/test_analytics/test_analyzer_lsp_ccls.py b/test/test_analytics/test_analyzer_lsp_ccls.py index 420de70a369..33d17e06f32 100644 --- a/test/test_analytics/test_analyzer_lsp_ccls.py +++ b/test/test_analytics/test_analyzer_lsp_ccls.py @@ -1,6 +1,7 @@ import json import logging import os +import pickle import subprocess from pathlib import Path from tempfile import mkdtemp @@ -96,3 +97,28 @@ async def testCppTestRepo(): async with graphServer: analyzer.observer = createGraphDumpAnalyzerObserver("cpp_test_repo") await analyzer.analyze() + + +@pytest.mark.asyncio +async def testAnalyzerWithPickle(): + repoPath = prepareRepo("test_feature_branch") + graphServer = createCclsGraphServer() + analyzer = Analyzer(repoPath, graphServer) + pickleContent = None + async with graphServer: + analyzer.observer = createGraphDumpAnalyzerObserver( + "analyzer_pickling") + assert len(analyzer.visitedCommits) == 0 + await analyzer.analyze(2) + assert len(analyzer.visitedCommits) == 2 + await analyzer.analyze(2) + assert len(analyzer.visitedCommits) == 4 + pickleContent = pickle.dumps(analyzer) + + analyzer1: Analyzer = pickle.loads(pickleContent) + # Perhaps we need to set another temp folder for this. + graphServer1 = analyzer1.graphServer + analyzer1.observer = analyzer.observer + async with graphServer1: + assert analyzer1.visitedCommits == analyzer.visitedCommits + await analyzer1.analyze() From e87f8eec82d96a0086fbd909bb91239955cc0294 Mon Sep 17 00:00:00 2001 From: xinyan Date: Mon, 11 Mar 2019 23:46:46 +0800 Subject: [PATCH 82/84] Code cleanup. --- persper/analytics/analyzer2.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/persper/analytics/analyzer2.py b/persper/analytics/analyzer2.py index a2aef476bd8..c670ed74bd5 100644 --- a/persper/analytics/analyzer2.py +++ b/persper/analytics/analyzer2.py @@ -1,13 +1,13 @@ import asyncio -from abc import ABC import collections.abc -from typing import Union, Set, Optional, List +from abc import ABC +from typing import List, Optional, Set, Union from git import Commit, Diff, DiffIndex, Repo -from persper.analytics.git_tools import (diff_with_commit, get_contents) -from persper.analytics.graph_server import CommitSeekingMode, GraphServer from persper.analytics.commit_classifier import CommitClassifier +from persper.analytics.git_tools import diff_with_commit, get_contents +from persper.analytics.graph_server import CommitSeekingMode, GraphServer from persper.analytics.score import commit_overall_scores From 2c9bd1875dcdd934f691db730632198a7d7374df Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Mon, 11 Mar 2019 13:51:48 -0700 Subject: [PATCH 83/84] Encapsulate files attribute of function nodes --- persper/analytics/call_commit_graph.py | 11 ++++++++--- persper/analytics/call_graph/c.py | 6 ++++-- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/persper/analytics/call_commit_graph.py b/persper/analytics/call_commit_graph.py index 3b2627b53f5..ec5a3a2b3b0 100644 --- a/persper/analytics/call_commit_graph.py +++ b/persper/analytics/call_commit_graph.py @@ -7,6 +7,7 @@ from networkx.readwrite import json_graph from persper.analytics.devrank import devrank from persper.analytics.score import normalize +from typing import Union, Set, List, Dict class CommitIdGenerators: @@ -59,6 +60,10 @@ def commits(self): # https://networkx.github.io/documentation/stable/tutorial.html#graph-attributes return self._digraph.graph['commits'] + def files(self, node: str) -> Set[str]: + """Provide read-only access to `files` attribute of a node""" + return self.nodes()[node]['files'] + def __contains__(self, node): """Implement membership check""" return node in self._digraph @@ -82,7 +87,7 @@ def _next_cindex(self): return self._cur_cindex() + 1 # TODO: remove the default value of files - def add_node(self, node, files=[]): + def add_node(self, node: str, files: Union[Set[str], List[str]] = []): self._digraph.add_node(node, size=None, history={}, files=set(files)) # add_node must be called on source and target first @@ -106,10 +111,10 @@ def update_node_history(self, node, num_adds, num_dels): node_history[self._current_commit_id] = {'adds': num_adds, 'dels': num_dels} # read/write access to node history are thourgh this function - def _get_node_history(self, node): + def _get_node_history(self, node: str) -> Dict[str, Dict[str, int]]: return self._digraph.nodes[node]['history'] - def update_node_files(self, node, new_files): + def update_node_files(self, node: str, new_files: Union[Set[str], List[str]]): self._digraph.nodes[node]['files'] = set(new_files) # TODO: provide other options for computing a node's size diff --git a/persper/analytics/call_graph/c.py b/persper/analytics/call_graph/c.py index 961f147d755..18d84449ede 100644 --- a/persper/analytics/call_graph/c.py +++ b/persper/analytics/call_graph/c.py @@ -1,4 +1,5 @@ from persper.analytics.call_graph.utils import ns, line_attr +from typing import Set class NotFunctionCallError(Exception): @@ -83,9 +84,10 @@ def update_graph(ccgraph, ast_list, change_stats): if caller_name not in ccgraph: ccgraph.add_node(caller_name, [filename]) else: - files = ccgraph.nodes()[caller_name]['files'] + files: Set[str] = ccgraph.files(caller_name) if filename not in files: - ccgraph.update_node_files(caller_name, files + [filename]) + files.add(filename) + ccgraph.update_node_files(caller_name, files) for call in function.xpath('.//srcml:call', namespaces=ns): try: From 8e1e55bed9fd5dfbc4b87e54ec96d75a0cb11ba2 Mon Sep 17 00:00:00 2001 From: Hezheng Yin Date: Mon, 11 Mar 2019 16:11:42 -0700 Subject: [PATCH 84/84] Fix transforming graph_data to networkx's node link format --- persper/analytics/call_commit_graph.py | 17 +++++++++++++---- persper/analytics/go.py | 5 +---- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/persper/analytics/call_commit_graph.py b/persper/analytics/call_commit_graph.py index ec5a3a2b3b0..6d3db3bb0f1 100644 --- a/persper/analytics/call_commit_graph.py +++ b/persper/analytics/call_commit_graph.py @@ -7,7 +7,7 @@ from networkx.readwrite import json_graph from persper.analytics.devrank import devrank from persper.analytics.score import normalize -from typing import Union, Set, List, Dict +from typing import Union, Set, List, Dict, Optional class CommitIdGenerators: @@ -30,14 +30,23 @@ class CallCommitGraph: and edit histories across commits. """ - def __init__(self, node_link_data=None, commit_id_generator=CommitIdGenerators.fromHexsha): - if node_link_data: - self._digraph = json_graph.node_link_graph(node_link_data) + def __init__(self, graph_data: Optional[Dict] = None, commit_id_generator=CommitIdGenerators.fromHexsha): + if graph_data: + self._digraph = json_graph.node_link_graph( + CallCommitGraph._to_networkx_format(graph_data)) else: self._digraph = self._new_graph() self._commit_id_generator = commit_id_generator self._current_commit_id = None + @staticmethod + def _to_networkx_format(graph_data: Dict) -> Dict: + graph_data['multigraph'] = False + graph_data['directed'] = True + for node in graph_data['nodes']: + node['files'] = set(node['files']) + return graph_data + def reset(self): """Reset all internal states""" self._digraph = self._new_graph() diff --git a/persper/analytics/go.py b/persper/analytics/go.py index 284ab5cf6d0..e4a39fb178d 100644 --- a/persper/analytics/go.py +++ b/persper/analytics/go.py @@ -40,10 +40,7 @@ def update_graph(self, old_filename, old_src, def get_graph(self): graph_url = self.server_addr + '/callgraph' r = requests.get(graph_url) - graph_data = r.json() - graph_data['directed'] = True - graph_data['multigraph'] = False - return CallCommitGraph(graph_data) + return CallCommitGraph(graph_data=r.json()) def reset_graph(self): reset_url = urllib.parse.urljoin(self.server_addr, '/reset')