[ { "@graph" : [ { "@id" : "https://w3id.org/np/RAaGcIMVdjU39we9S6HyZATb-2KUg6GD1QQrFmMRlcSxs", "@type" : [ "http://www.nanopub.org/nschema#Nanopublication" ], "http://www.nanopub.org/nschema#hasAssertion" : [ { "@id" : "https://w3id.org/np/RAaGcIMVdjU39we9S6HyZATb-2KUg6GD1QQrFmMRlcSxs/assertion" } ], "http://www.nanopub.org/nschema#hasProvenance" : [ { "@id" : "https://w3id.org/np/RAaGcIMVdjU39we9S6HyZATb-2KUg6GD1QQrFmMRlcSxs/provenance" } ], "http://www.nanopub.org/nschema#hasPublicationInfo" : [ { "@id" : "https://w3id.org/np/RAaGcIMVdjU39we9S6HyZATb-2KUg6GD1QQrFmMRlcSxs/pubinfo" } ] } ], "@id" : "https://w3id.org/np/RAaGcIMVdjU39we9S6HyZATb-2KUg6GD1QQrFmMRlcSxs/Head" }, { "@graph" : [ { "@id" : "https://doi.org/10.48550/arXiv.2510.17934", "@type" : [ "http://www.w3.org/ns/prov#Entity" ], "http://purl.org/dc/terms/title" : [ { "@value" : "ATLASKV: AUGMENTING LLMS WITH BILLION-SCALE KNOWLEDGE GRAPHS IN 20GB VRAM" } ], "http://purl.org/spar/cito/describes" : [ { "@id" : "https://neverblink.eu/ontologies/llm-kg/methods#AtlasKV" }, { "@id" : "https://neverblink.eu/ontologies/llm-kg/methods#HiKVP" }, { "@id" : "https://neverblink.eu/ontologies/llm-kg/methods#KG2KV" } ], "http://purl.org/spar/cito/discusses" : [ { "@id" : "https://neverblink.eu/ontologies/llm-kg/methods#InContextLearning" }, { "@id" : "https://neverblink.eu/ontologies/llm-kg/methods#KBLaM" }, { "@id" : "https://neverblink.eu/ontologies/llm-kg/methods#RetrievalAugmentedGeneration" }, { "@id" : "https://neverblink.eu/ontologies/llm-kg/methods#ZeroShotLearning" } ] }, { "@id" : "https://neverblink.eu/ontologies/llm-kg/methods#AtlasKV", "@type" : [ "http://purl.org/spar/fabio/Workflow" ], "http://purl.org/dc/terms/subject" : [ { "@id" : "https://neverblink.eu/ontologies/llm-kg/categories#KGEnhancedLLMInference" } ], "http://www.w3.org/2000/01/rdf-schema#comment" : [ { "@value" : "AtlasKV is a parametric knowledge integration method that augments LLMs with billion-scale KGs. It is designed to overcome limitations of existing methods by improving LLM performance in terms of knowledge grounding, generalization, and scalability during the inference stage, without requiring external retrievers or retraining." } ], "http://www.w3.org/2000/01/rdf-schema#label" : [ { "@value" : "AtlasKV" } ], "https://neverblink.eu/ontologies/llm-kg/hasTopCategory" : [ { "@id" : "https://neverblink.eu/ontologies/llm-kg/top-categories#KGEnhancedLLM" } ] }, { "@id" : "https://neverblink.eu/ontologies/llm-kg/methods#HiKVP", "@type" : [ "http://purl.org/spar/fabio/Workflow" ], "http://purl.org/dc/terms/subject" : [ { "@id" : "https://neverblink.eu/ontologies/llm-kg/categories#KGEnhancedLLMInference" } ], "http://www.w3.org/2000/01/rdf-schema#comment" : [ { "@value" : "HiKVP (Hierarchical Key-Value Pruning) is an algorithm that dramatically reduces computational and memory overhead during LLM inference by hierarchically clustering and pruning KGKVs. It maintains high knowledge grounding accuracy while enabling scalable integration of billion-scale KGs into LLMs at inference time." } ], "http://www.w3.org/2000/01/rdf-schema#label" : [ { "@value" : "HiKVP" } ], "https://neverblink.eu/ontologies/llm-kg/hasTopCategory" : [ { "@id" : "https://neverblink.eu/ontologies/llm-kg/top-categories#KGEnhancedLLM" } ] }, { "@id" : "https://neverblink.eu/ontologies/llm-kg/methods#InContextLearning", "@type" : [ "http://purl.org/spar/fabio/Workflow" ], "http://www.w3.org/2000/01/rdf-schema#label" : [ { "@value" : "In-context learning" } ] }, { "@id" : "https://neverblink.eu/ontologies/llm-kg/methods#KBLaM", "@type" : [ "http://purl.org/spar/fabio/Workflow" ], "http://www.w3.org/2000/01/rdf-schema#label" : [ { "@value" : "KBLaM" } ] }, { "@id" : "https://neverblink.eu/ontologies/llm-kg/methods#KG2KV", "@type" : [ "http://purl.org/spar/fabio/Workflow" ], "http://purl.org/dc/terms/subject" : [ { "@id" : "https://neverblink.eu/ontologies/llm-kg/categories#KGEnhancedLLMPretraining" } ], "http://www.w3.org/2000/01/rdf-schema#comment" : [ { "@value" : "KG2KV is a pipeline that transforms KG triples into high-quality Q-K-V data, which serves as training data for LLMs. This method enhances the generalization performance and efficient knowledge integration by enabling better injection of KGs into LLMs' parametric representations, thus improving their knowledge expression." } ], "http://www.w3.org/2000/01/rdf-schema#label" : [ { "@value" : "KG2KV" } ], "https://neverblink.eu/ontologies/llm-kg/hasTopCategory" : [ { "@id" : "https://neverblink.eu/ontologies/llm-kg/top-categories#KGEnhancedLLM" } ] }, { "@id" : "https://neverblink.eu/ontologies/llm-kg/methods#RetrievalAugmentedGeneration", "@type" : [ "http://purl.org/spar/fabio/Workflow" ], "http://www.w3.org/2000/01/rdf-schema#label" : [ { "@value" : "Retrieval-Augmented Generation" } ] }, { "@id" : "https://neverblink.eu/ontologies/llm-kg/methods#ZeroShotLearning", "@type" : [ "http://purl.org/spar/fabio/Workflow" ], "http://www.w3.org/2000/01/rdf-schema#label" : [ { "@value" : "Zero-shot learning" } ] } ], "@id" : "https://w3id.org/np/RAaGcIMVdjU39we9S6HyZATb-2KUg6GD1QQrFmMRlcSxs/assertion" }, { "@graph" : [ { "@id" : "https://w3id.org/np/RAaGcIMVdjU39we9S6HyZATb-2KUg6GD1QQrFmMRlcSxs/assertion", "http://www.w3.org/ns/prov#wasAttributedTo" : [ { "@id" : "https://neverblink.eu/ontologies/llm-kg/agent" } ], "http://www.w3.org/ns/prov#wasDerivedFrom" : [ { "@id" : "https://doi.org/10.48550/arXiv.2510.17934" } ] } ], "@id" : "https://w3id.org/np/RAaGcIMVdjU39we9S6HyZATb-2KUg6GD1QQrFmMRlcSxs/provenance" }, { "@graph" : [ { "@id" : "https://w3id.org/np/RAaGcIMVdjU39we9S6HyZATb-2KUg6GD1QQrFmMRlcSxs", "http://purl.org/dc/terms/created" : [ { "@type" : "http://www.w3.org/2001/XMLSchema#dateTime", "@value" : "2026-02-26T16:25:34.999Z" } ], "http://purl.org/dc/terms/creator" : [ { "@id" : "https://neverblink.eu/ontologies/llm-kg/agent" } ], "http://purl.org/nanopub/x/hasNanopubType" : [ { "@id" : "https://neverblink.eu/ontologies/llm-kg/PaperAssessmentResult" } ], "http://www.w3.org/2000/01/rdf-schema#label" : [ { "@value" : "LLM-KG assessment for paper 10.48550/arXiv.2510.17934" } ] }, { "@id" : "https://w3id.org/np/RAaGcIMVdjU39we9S6HyZATb-2KUg6GD1QQrFmMRlcSxs/sig", "http://purl.org/nanopub/x/hasAlgorithm" : [ { "@value" : "RSA" } ], "http://purl.org/nanopub/x/hasPublicKey" : [ { "@value" : "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwNz2QK3SEifno78S7+48zUB0xpTex3mAzW73ZimHqNcdEMU5/apslrGrTHGFAt/Chocgo++r6JQp5ygY7NyJHGWdaIqnt85pjX4PbNfLAvapyUO00qZP34fY61w4eZ9UMtleWEsmZKRtQPyJ8ODl46i/rfPuZlcJGpM9Nmy5mpGWuepqIEvF4a/t7pLVeCEDFSYXT+yaiygt6ynIK5f7TtEDhZpeUf/Q74WhMPJXm4yTU/hqOX4IW+50kWHNArGGZwUaXwzyG6M3Zd6UMModryGkLqS4H/MSE3ZA1Ylnms7BfWLEXhMWlaKi6HRV4nGRDLhxVSi9LSRi3LWKLhNIIQIDAQAB" } ], "http://purl.org/nanopub/x/hasSignature" : [ { "@value" : "kI8bqlYDgCuCaY9kYe5Ri7LxFScfd38o3qzlVJ2Hrl3u3GxHKcVsTgwktXxMpDAxCPmymjiBiu5Oz788n+bV+y9PzFaK5HZHgSX/xma78Fm3cVElSwVxKQCuGgv/o96v0WbP4BmlJIukJN/pKvV3Wodd0YKwXty2zKymN9PKjIUNLZ7zsePmEGi9Cbi8JWTV1QJodzHXyaDvuwsIOY19p6PiiCSlY1sQVIFk0X7FZDGXO30oD6IzxCiD0Jdgj1s/pPrc7vDVxPc5Y6VSN4VG5S/I9+nAJ6MEqtzndBzBoQ+MN0r43MAPqgnqYJy3Et2Q4KnjHquCRj8ffK0/l45GHw==" } ], "http://purl.org/nanopub/x/hasSignatureTarget" : [ { "@id" : "https://w3id.org/np/RAaGcIMVdjU39we9S6HyZATb-2KUg6GD1QQrFmMRlcSxs" } ], "http://purl.org/nanopub/x/signedBy" : [ { "@id" : "https://neverblink.eu/ontologies/llm-kg/agent" } ] } ], "@id" : "https://w3id.org/np/RAaGcIMVdjU39we9S6HyZATb-2KUg6GD1QQrFmMRlcSxs/pubinfo" } ]