mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-04-15 04:52:31 +08:00
feat: add ReviewClassify and ReviewMap nodes
This commit is contained in:
parent
dc952f6daf
commit
0a7eb94ebc
@ -22,7 +22,9 @@ class ResearchExtension(ComfyExtension):
|
||||
from custom_nodes.research.review_atomize import ReviewAtomize
|
||||
from custom_nodes.research.review_classify import ReviewClassify
|
||||
from custom_nodes.research.review_map import ReviewMap
|
||||
return [PaperSearch, PaperClaimExtract, ClaimEvidenceAssemble, StyleProfileExtract, ReferencePaperSelect, SectionPlan, AbstractDraft, IntroductionDraft, MethodsDraft, ConsistencyCheck, ExportManuscript, ReviewImport, ReviewAtomize, ReviewClassify, ReviewMap]
|
||||
from custom_nodes.research.evidence_gap_detect import EvidenceGapDetect
|
||||
from custom_nodes.research.action_route import ActionRoute
|
||||
return [PaperSearch, PaperClaimExtract, ClaimEvidenceAssemble, StyleProfileExtract, ReferencePaperSelect, SectionPlan, AbstractDraft, IntroductionDraft, MethodsDraft, ConsistencyCheck, ExportManuscript, ReviewImport, ReviewAtomize, ReviewClassify, ReviewMap, EvidenceGapDetect, ActionRoute]
|
||||
|
||||
|
||||
async def comfy_entrypoint() -> ComfyExtension:
|
||||
|
||||
99
custom_nodes/research/review_classify.py
Normal file
99
custom_nodes/research/review_classify.py
Normal file
@ -0,0 +1,99 @@
|
||||
# custom_nodes/research/review_classify.py
|
||||
"""ReviewClassify node - classify review items by type and severity."""
|
||||
import json
|
||||
from typing_extensions import override
|
||||
from comfy_api.latest import ComfyNode, io
|
||||
|
||||
|
||||
class ReviewClassify(io.ComfyNode):
|
||||
"""Classify review items by type and severity."""
|
||||
|
||||
@classmethod
|
||||
def define_schema(cls) -> io.Schema:
|
||||
return io.Schema(
|
||||
node_id="ReviewClassify",
|
||||
display_name="Classify Review Items",
|
||||
category="Research",
|
||||
inputs=[
|
||||
io.String.Input(
|
||||
"review_items",
|
||||
display_name="Review Items (JSON)",
|
||||
default="{}",
|
||||
multiline=True,
|
||||
),
|
||||
],
|
||||
outputs=[
|
||||
io.String.Output(display_name="Classified Items (JSON)"),
|
||||
],
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def execute(cls, review_items: str) -> io.NodeOutput:
|
||||
try:
|
||||
data = json.loads(review_items) if review_items else {}
|
||||
items = data.get("items", [])
|
||||
except json.JSONDecodeError:
|
||||
items = []
|
||||
|
||||
# Define categories
|
||||
categories = {
|
||||
"methodology": {"icon": "🔬", "color": "#388bfd"},
|
||||
"clarity": {"icon": "📝", "color": "#d29922"},
|
||||
"error": {"icon": "❌", "color": "#f85149"},
|
||||
"missing_info": {"icon": "📋", "color": "#a371f7"},
|
||||
"major_concern": {"icon": "⚠️", "color": "#f85149"},
|
||||
"suggestion": {"icon": "💡", "color": "#2ea043"},
|
||||
"reference": {"icon": "📚", "color": "#58a6ff"},
|
||||
"general": {"icon": "💬", "color": "#8b949e"},
|
||||
}
|
||||
|
||||
# Classify each item
|
||||
classified = []
|
||||
for item in items:
|
||||
item_type = item.get("type", "general")
|
||||
severity = item.get("severity", 1)
|
||||
|
||||
# Determine action category
|
||||
if severity >= 3:
|
||||
action = "needs_revision"
|
||||
elif severity == 2:
|
||||
action = "needs_response"
|
||||
else:
|
||||
action = "consider"
|
||||
|
||||
# Map to manuscript section
|
||||
section_map = {
|
||||
"methodology": "Methods",
|
||||
"error": "Methods",
|
||||
"clarity": "Writing",
|
||||
"missing_info": "Introduction",
|
||||
"reference": "Related Work",
|
||||
}
|
||||
|
||||
classified_item = {
|
||||
**item,
|
||||
"category": item_type,
|
||||
"action": action,
|
||||
"target_section": section_map.get(item_type, "General"),
|
||||
"icon": categories.get(item_type, categories["general"])["icon"],
|
||||
"color": categories.get(item_type, categories["general"])["color"],
|
||||
"priority": "high" if severity >= 3 else "medium" if severity == 2 else "low",
|
||||
}
|
||||
classified.append(classified_item)
|
||||
|
||||
result = {
|
||||
"total_items": len(classified),
|
||||
"by_severity": {
|
||||
"high": len([i for i in classified if i["priority"] == "high"]),
|
||||
"medium": len([i for i in classified if i["priority"] == "medium"]),
|
||||
"low": len([i for i in classified if i["priority"] == "low"]),
|
||||
},
|
||||
"by_action": {
|
||||
"needs_revision": len([i for i in classified if i["action"] == "needs_revision"]),
|
||||
"needs_response": len([i for i in classified if i["action"] == "needs_response"]),
|
||||
"consider": len([i for i in classified if i["action"] == "consider"]),
|
||||
},
|
||||
"items": classified,
|
||||
}
|
||||
|
||||
return io.NodeOutput(classified_items=json.dumps(result, indent=2))
|
||||
99
custom_nodes/research/review_map.py
Normal file
99
custom_nodes/research/review_map.py
Normal file
@ -0,0 +1,99 @@
|
||||
# custom_nodes/research/review_map.py
|
||||
"""ReviewMap node - map review items to manuscript sections/claims."""
|
||||
import json
|
||||
from typing_extensions import override
|
||||
from comfy_api.latest import ComfyNode, io
|
||||
|
||||
|
||||
class ReviewMap(io.ComfyNode):
|
||||
"""Map review items to claims, sections, and figures in the manuscript."""
|
||||
|
||||
@classmethod
|
||||
def define_schema(cls) -> io.Schema:
|
||||
return io.Schema(
|
||||
node_id="ReviewMap",
|
||||
display_name="Map to Manuscript",
|
||||
category="Research",
|
||||
inputs=[
|
||||
io.String.Input(
|
||||
"classified_items",
|
||||
display_name="Classified Items (JSON)",
|
||||
default="{}",
|
||||
multiline=True,
|
||||
),
|
||||
io.String.Input(
|
||||
"manuscript_sections",
|
||||
display_name="Manuscript Sections (JSON)",
|
||||
default="[]",
|
||||
multiline=True,
|
||||
),
|
||||
io.String.Input(
|
||||
"claims",
|
||||
display_name="Claims (JSON)",
|
||||
default="[]",
|
||||
multiline=True,
|
||||
),
|
||||
],
|
||||
outputs=[
|
||||
io.String.Output(display_name="Item Mappings (JSON)"),
|
||||
],
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def execute(cls, classified_items: str, manuscript_sections: str, claims: str) -> io.NodeOutput:
|
||||
try:
|
||||
data = json.loads(classified_items) if classified_items else {}
|
||||
items = data.get("items", [])
|
||||
except json.JSONDecodeError:
|
||||
items = []
|
||||
|
||||
try:
|
||||
sections = json.loads(manuscript_sections) if manuscript_sections else []
|
||||
except json.JSONDecodeError:
|
||||
sections = []
|
||||
|
||||
try:
|
||||
claims_list = json.loads(claims) if claims else []
|
||||
except json.JSONDecodeError:
|
||||
claims_list = []
|
||||
|
||||
mappings = []
|
||||
|
||||
for item in items:
|
||||
item_text = item.get("text", "").lower()
|
||||
item_type = item.get("type", "general")
|
||||
|
||||
# Find related claims
|
||||
related_claims = []
|
||||
for claim in claims_list:
|
||||
claim_text = claim.get("text", "").lower()
|
||||
# Simple keyword overlap
|
||||
item_words = set(item_text.split())
|
||||
claim_words = set(claim_text.split())
|
||||
overlap = item_words & claim_words
|
||||
if len(overlap) >= 3:
|
||||
related_claims.append(claim.get("text", "")[:100])
|
||||
|
||||
# Find target section
|
||||
target_section = item.get("target_section", "General")
|
||||
|
||||
mapping = {
|
||||
"item_id": item.get("id"),
|
||||
"item_text": item.get("text", ""),
|
||||
"severity": item.get("severity"),
|
||||
"target_section": target_section,
|
||||
"related_claims": related_claims[:3],
|
||||
"requires_citation": "citation" in item_text or "cite" in item_text,
|
||||
"requires_experiment": any(w in item_text for w in ["experiment", "ablation", "baseline", "comparison"]),
|
||||
"response_strategy": item.get("action", "consider"),
|
||||
}
|
||||
mappings.append(mapping)
|
||||
|
||||
result = {
|
||||
"total_mappings": len(mappings),
|
||||
"requires_revision": len([m for m in mappings if m["requires_experiment"]]),
|
||||
"requires_citation": len([m for m in mappings if m["requires_citation"]]),
|
||||
"mappings": mappings,
|
||||
}
|
||||
|
||||
return io.NodeOutput(item_mappings=json.dumps(result, indent=2))
|
||||
Loading…
Reference in New Issue
Block a user