File size: 2,701 Bytes
c5880fb 3f48755 c5880fb 3f48755 c5880fb 3f48755 c5880fb 3f48755 c5880fb 3f48755 c5880fb 3f48755 c5880fb 3f48755 c5880fb 3f48755 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 |
"""
HITL-KG Core Module
Human-in-the-Loop Knowledge Graph Reasoning System
Components:
- KnowledgeGraph: Core graph with entities and reasoning nodes
- ReasoningEngine: LLM and local reasoning generators
- EmbeddingService: Multilingual semantic search
- SessionManager: Per-user session handling
- DatasetLoader: Generic ontology loading
Usage:
from src.core import load_knowledge_graph, create_engine, LLMProvider
kg = load_knowledge_graph()
engine = create_engine(LLMProvider.LOCAL, kg)
context = engine.build_context("fever and cough")
for node in engine.generate(context, GenerationConfig()):
print(node.content)
"""
# Configuration
from .config import (
AppConfig,
DatasetConfig,
EmbeddingConfig,
LLMConfig,
get_config,
load_config,
)
# Knowledge Graph
from .knowledge_graph import (
KnowledgeGraph,
ReasoningNode,
ReasoningEdge,
Entity,
NodeType,
EdgeType,
EntityCategory,
NODE_TYPE_INFO,
create_node_id,
)
# LLM Engine
from .llm_engine import (
ReasoningEngine,
OpenAIEngine,
LocalEngine,
GraphSynchronizer,
GenerationConfig,
ReasoningContext,
ReasoningChainCache,
LLMProvider,
create_engine,
detect_language,
LANGUAGE_NAMES,
)
# Dataset Loading
from .dataset_loader import (
DatasetLoader,
OntologyTerm,
load_knowledge_graph,
build_knowledge_graph,
)
# Embedding Service
from .embedding_service import (
EmbeddingService,
SearchResult,
get_embedding_service,
)
# Session Management
from .session_manager import (
Session,
ChatMessage,
UserInteraction,
SessionManager,
get_session_manager,
generate_session_id,
)
__all__ = [
# Config
"AppConfig",
"DatasetConfig",
"EmbeddingConfig",
"LLMConfig",
"get_config",
"load_config",
# Knowledge Graph
"KnowledgeGraph",
"ReasoningNode",
"ReasoningEdge",
"Entity",
"NodeType",
"EdgeType",
"EntityCategory",
"NODE_TYPE_INFO",
"create_node_id",
# LLM Engine
"ReasoningEngine",
"OpenAIEngine",
"LocalEngine",
"GraphSynchronizer",
"GenerationConfig",
"ReasoningContext",
"ReasoningChainCache",
"LLMProvider",
"create_engine",
"detect_language",
"LANGUAGE_NAMES",
# Dataset
"DatasetLoader",
"OntologyTerm",
"load_knowledge_graph",
"build_knowledge_graph",
# Embeddings
"EmbeddingService",
"SearchResult",
"get_embedding_service",
# Sessions
"Session",
"ChatMessage",
"UserInteraction",
"SessionManager",
"get_session_manager",
"generate_session_id",
]
|