Skip to content

Fix Tests #42

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 6 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .github/workflows/python-app.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,6 @@ jobs:
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
# flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics

# - name: Test with pytest
# run: |
# pytest
- name: Test with pytest
run: |
pytest
28 changes: 14 additions & 14 deletions api/analyzers/python/analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def process_class_definition(self, node: Node, path: Path) -> tuple[Class, list[
# Extract class name
class_name = node.child_by_field_name("name")
class_name = class_name.text.decode("utf8")
logger.info(f"Class declaration: {class_name}")
logger.info("Class declaration: %s", class_name)

# Extract docstring
docstring_node = None
Expand All @@ -58,7 +58,7 @@ def process_class_definition(self, node: Node, path: Path) -> tuple[Class, list[
docstring_node = body_node.child(0).child(0)

docstring = docstring_node.text.decode('utf-8') if docstring_node else None
logger.debug(f"Class docstring: {docstring}")
logger.debug("Class docstring: %s", docstring)

# Extract inherited classes
inherited_classes_node = node.child_by_field_name('superclasses')
Expand All @@ -67,7 +67,7 @@ def process_class_definition(self, node: Node, path: Path) -> tuple[Class, list[
for child in inherited_classes_node.children:
if child.type == 'identifier':
inherited_classes.append(child.text.decode('utf-8'))
logger.debug(f"Class inherited classes: {inherited_classes}")
logger.debug("Class inherited classes: %s", inherited_classes)

# Create Class object
c = Class(str(path), class_name, docstring,
Expand Down Expand Up @@ -145,7 +145,7 @@ def process_function_definition(self, node: Node, path: Path, source_code: str)
arg_type = arg_type_node.text.decode('utf-8')

else:
logger.debug(f'Unknown function parameter node type: {param.type}')
logger.debug('Unknown function parameter node type: %s', param.type)
continue

args.append((arg_name, arg_type))
Expand Down Expand Up @@ -222,10 +222,10 @@ def first_pass(self, path: Path, f: io.TextIOWrapper, graph:Graph) -> None:
"""

if path.suffix != '.py':
logger.debug(f"Skipping none Python file {path}")
logger.debug("Skipping none Python file %s", path)
return

logger.info(f"Python Processing {path}")
logger.info("Python Processing %s", path)

# Create file entity
file = File(os.path.dirname(path), path.name, path.suffix)
Expand All @@ -237,7 +237,7 @@ def first_pass(self, path: Path, f: io.TextIOWrapper, graph:Graph) -> None:
try:
source_code = source_code.decode('utf-8')
except Exception as e:
logger.error(f"Failed decoding source code: {e}")
logger.error("Failed decoding source code: %s", e)
source_code = ''

# Walk thought the AST
Expand Down Expand Up @@ -281,7 +281,7 @@ def process_function_call(self, node) -> Optional[str]:
logger.warning("Unknown function call pattern")
return None

logger.debug(f"callee_name: {callee_name}")
logger.debug("callee_name: %s", callee_name)
return callee_name

def process_call_node(self, caller: Union[Function, File], callee_name: str,
Expand All @@ -306,7 +306,7 @@ def process_call_node(self, caller: Union[Function, File], callee_name: str,
# Create Function callee_name
# Assuming this is a call to either a native or imported Function
# Although this call might just be a Class instantiation.
logger.info(f"Creating missing Class/Function {callee_name}")
logger.info("Creating missing Class/Function %s", callee_name)
callee = Function('/', callee_name, None, None, None,0, 0)
graph.add_function(callee)

Expand All @@ -316,7 +316,7 @@ def process_call_node(self, caller: Union[Function, File], callee_name: str,
def process_inheritance(self, cls: Class, super_classes: list[str],
graph: Graph) -> None:
for super_class in super_classes:
logger.info(f"Class {cls.name} inherits {super_class}")
logger.info("Class %s inherits %s", cls.name, super_class)

# Try to get Class object from graph
_super_class = graph.get_class_by_name(super_class)
Expand Down Expand Up @@ -370,15 +370,15 @@ def second_pass(self, path: Path, f: io.TextIOWrapper, graph: Graph) -> None:
"""

if path.suffix != '.py':
logger.debug(f"Skipping none Python file {path}")
logger.debug("Skipping none Python file %s", path)
return

logger.info(f"Processing {path}")
logger.info("Processing %s", path)

# Get file entity
file = graph.get_file(os.path.dirname(path), path.name, path.suffix)
if file is None:
logger.error(f"File entity not found for: {path}")
logger.error("File entity not found for: %s", path)
return

try:
Expand All @@ -389,4 +389,4 @@ def second_pass(self, path: Path, f: io.TextIOWrapper, graph: Graph) -> None:
# Walk thought the AST
self.second_pass_traverse(file, tree.root_node, path, graph, source_code)
except Exception as e:
logger.error(f"Failed to process file {path}: {e}")
logger.error("Failed to process file %s: %s", path, e)
23 changes: 11 additions & 12 deletions api/analyzers/source_analyzer.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import os
import shutil
import concurrent.futures
import logging

from pathlib import Path
from typing import Optional, List
Expand All @@ -9,7 +9,6 @@
from .c.analyzer import CAnalyzer
from .python.analyzer import PythonAnalyzer

import logging
# Configure logging
logging.basicConfig(level=logging.DEBUG, format='%(filename)s - %(asctime)s - %(levelname)s - %(message)s')

Expand Down Expand Up @@ -41,11 +40,11 @@ def first_pass(self, ignore: List[str], executor: concurrent.futures.Executor) -
if dirpath in ignore:
# in-place clear dirnames to prevent os.walk from recursing into
# any of the nested directories
logging.info(f'ignoring directory: {dirpath}')
logging.info('ignoring directory: %s', dirpath)
dirnames[:] = []
continue

logging.info(f'Processing directory: {dirpath}')
logging.info('Processing directory: %s', dirpath)

# Process each file in the current directory
for filename in filenames:
Expand All @@ -54,10 +53,10 @@ def first_pass(self, ignore: List[str], executor: concurrent.futures.Executor) -
# Skip none supported files
ext = file_path.suffix
if ext not in analyzers:
logging.info(f"Skipping none supported file {file_path}")
logging.info("Skipping none supported file %s", file_path)
continue

logging.info(f'Processing file: {file_path}')
logging.info('Processing file: %s', file_path)

def process_file(path: Path) -> None:
with open(path, 'rb') as f:
Expand Down Expand Up @@ -88,11 +87,11 @@ def second_pass(self, ignore: List[str], executor: concurrent.futures.Executor)
if dirpath in ignore:
# in-place clear dirnames to prevent os.walk from recursing into
# any of the nested directories
logging.info(f'ignoring directory: {dirpath}')
logging.info('ignoring directory: %s', dirpath)
dirnames[:] = []
continue

logging.info(f'Processing directory: {dirpath}')
logging.info('Processing directory: %s', dirpath)

# Process each file in the current directory
for filename in filenames:
Expand All @@ -103,7 +102,7 @@ def second_pass(self, ignore: List[str], executor: concurrent.futures.Executor)
if ext not in analyzers:
continue

logging.info(f'Processing file: {file_path}')
logging.info('Processing file: %s', file_path)

def process_file(path: Path) -> None:
with open(path, 'rb') as f:
Expand All @@ -118,8 +117,8 @@ def process_file(path: Path) -> None:

def analyze_file(self, path: Path, graph: Graph) -> None:
ext = path.suffix
logging.info(f"analyze_file: path: {path}")
logging.info(f"analyze_file: ext: {ext}")
logging.info("analyze_file: path: %s", path)
logging.info("analyze_file: ext: %s", ext)
if ext not in analyzers:
return

Expand All @@ -144,7 +143,7 @@ def analyze_local_folder(self, path: str, g: Graph, ignore: Optional[List[str]]
ignore (List(str)): List of paths to skip
"""

logging.info(f"Analyzing local folder {path}")
logging.info("Analyzing local folder %s", path)

# Save original working directory for later restore
original_dir = Path.cwd()
Expand Down
2 changes: 1 addition & 1 deletion api/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def get_repos() -> List[str]:
password=os.getenv('FALKORDB_PASSWORD', None))

graphs = db.list_graphs()
graphs = [g for g in graphs if not g.endswith('_git')]
graphs = [g for g in graphs if not (g.endswith('_git') or g.endswith('_schema'))]
return graphs

class Graph():
Expand Down
8 changes: 3 additions & 5 deletions api/llm.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import os
import logging

from graphrag_sdk.models.openai import OpenAiGenerativeModel
from graphrag_sdk.models.litellm import LiteModel
from graphrag_sdk import (
Ontology,
Entity,
Expand Down Expand Up @@ -199,15 +199,13 @@ def _define_ontology() -> Ontology:
def _create_kg_agent(repo_name: str):
global ontology

openapi_model = OpenAiGenerativeModel("gpt-4o")
#gemini_model = GeminiGenerativeModel("gemini-1.5-flash-001")
#gemini_model_pro = GeminiGenerativeModel("gemini-1.5-pro")
model = LiteModel(model_name="gemini/gemini-2.0-flash-exp")

#ontology = _define_ontology()
code_graph_kg = KnowledgeGraph(
name=repo_name,
ontology=ontology,
model_config=KnowledgeGraphModelConfig.with_model(openapi_model),
model_config=KnowledgeGraphModelConfig.with_model(model),
host=os.getenv('FALKORDB_HOST', 'localhost'),
port=os.getenv('FALKORDB_PORT', 6379),
username=os.getenv('FALKORDB_USERNAME', None),
Expand Down
Loading
Loading