diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 0000000..c44fac1 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,2 @@ +# Scala Steward: Reformat with scalafmt 3.7.17 +dc0d4368f9742bdc645162c29f5d1fdddb065f60 diff --git a/.scalafmt.conf b/.scalafmt.conf index 4243104..4fa335a 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version = "3.7.15" +version = "3.7.17" style = defaultWithAlign maxColumn = 120 runner.dialect = scala212source3 diff --git a/modules/shacl/src/main/scala/es/weso/shacl/Component.scala b/modules/shacl/src/main/scala/es/weso/shacl/Component.scala index bfb1f3f..8405321 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/Component.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/Component.scala @@ -1,96 +1,96 @@ -package es.weso.shacl - -import es.weso.rdf.nodes.{IRI, Literal, RDFNode} - -sealed abstract class Component { - val name: String -} - -case class ClassComponent(value: RDFNode) extends Component { - override val name: String = "class" -} -case class Datatype(value: IRI) extends Component { - override val name: String = "datatype" -} -case class NodeKind(value: NodeKindType) extends Component { - override val name: String = "nodeKind" -} -case class MinCount(value: Int) extends Component { - override val name: String = "minCount" -} -case class MaxCount(value: Int) extends Component { - override val name: String = "maxCount" -} -case class MinExclusive(value: Literal) extends Component { - override val name: String = "minExclusive" -} -case class MinInclusive(value: Literal) extends Component { - override val name: String = "minInclusive" -} -case class MaxExclusive(value: Literal) extends Component { - override val name: String = "maxExclusive" -} -case class MaxInclusive(value: Literal) extends Component { - override val name: String = "maxInclusive" -} -case class MinLength(value: Int) extends Component { - override val name: String = "minLength" -} -case class MaxLength(value: Int) extends Component { - override val name: String = "maxLength" -} -case class Pattern(pattern: String, flags: Option[String]) extends Component { - override val name: String = "pattern" -} -case class UniqueLang(value: Boolean) extends Component { - override val name: String = "uniqueLang" -} -case class LanguageIn(langs: List[String]) extends Component { - override val name: String = "languageIn" -} -case class Equals(p: IRI) extends Component { - override val name: String = "equals" -} -case class Disjoint(p: IRI) extends Component { - override val name: String = "disjoint" -} -case class LessThan(p: IRI) extends Component { - override val name: String = "lessThan" -} -case class LessThanOrEquals(p: IRI) extends Component { - override val name: String = "lessThanOrEquals" -} -case class Or(shapes: List[RefNode]) extends Component { - override val name: String = "or" -} -case class And(shapes: List[RefNode]) extends Component { - override val name: String = "and" -} -case class Not(shape: RefNode) extends Component { - override val name: String = "not" -} -case class Xone(shapes: List[RefNode]) extends Component { - override val name: String = "xone" -} -case class Closed(isClosed: Boolean, ignoredProperties: List[IRI]) extends Component { - override val name: String = "closed" -} -case class NodeComponent(shape: RefNode) extends Component { - override val name: String = "node" -} -case class HasValue(value: Value) extends Component { - override val name: String = "hasValue" -} -case class In(list: List[Value]) extends Component { - override val name: String = "in" -} - -// TODO: Change representation to include optional parent shape -case class QualifiedValueShape( - shape: RefNode, - qualifiedMinCount: Option[Int], - qualifiedMaxCount: Option[Int], - qualifiedValueShapesDisjoint: Option[Boolean]) extends Component { - override val name: String = "qualifiedValueShape" -} - +package es.weso.shacl + +import es.weso.rdf.nodes.{IRI, Literal, RDFNode} + +sealed abstract class Component { + val name: String +} + +case class ClassComponent(value: RDFNode) extends Component { + override val name: String = "class" +} +case class Datatype(value: IRI) extends Component { + override val name: String = "datatype" +} +case class NodeKind(value: NodeKindType) extends Component { + override val name: String = "nodeKind" +} +case class MinCount(value: Int) extends Component { + override val name: String = "minCount" +} +case class MaxCount(value: Int) extends Component { + override val name: String = "maxCount" +} +case class MinExclusive(value: Literal) extends Component { + override val name: String = "minExclusive" +} +case class MinInclusive(value: Literal) extends Component { + override val name: String = "minInclusive" +} +case class MaxExclusive(value: Literal) extends Component { + override val name: String = "maxExclusive" +} +case class MaxInclusive(value: Literal) extends Component { + override val name: String = "maxInclusive" +} +case class MinLength(value: Int) extends Component { + override val name: String = "minLength" +} +case class MaxLength(value: Int) extends Component { + override val name: String = "maxLength" +} +case class Pattern(pattern: String, flags: Option[String]) extends Component { + override val name: String = "pattern" +} +case class UniqueLang(value: Boolean) extends Component { + override val name: String = "uniqueLang" +} +case class LanguageIn(langs: List[String]) extends Component { + override val name: String = "languageIn" +} +case class Equals(p: IRI) extends Component { + override val name: String = "equals" +} +case class Disjoint(p: IRI) extends Component { + override val name: String = "disjoint" +} +case class LessThan(p: IRI) extends Component { + override val name: String = "lessThan" +} +case class LessThanOrEquals(p: IRI) extends Component { + override val name: String = "lessThanOrEquals" +} +case class Or(shapes: List[RefNode]) extends Component { + override val name: String = "or" +} +case class And(shapes: List[RefNode]) extends Component { + override val name: String = "and" +} +case class Not(shape: RefNode) extends Component { + override val name: String = "not" +} +case class Xone(shapes: List[RefNode]) extends Component { + override val name: String = "xone" +} +case class Closed(isClosed: Boolean, ignoredProperties: List[IRI]) extends Component { + override val name: String = "closed" +} +case class NodeComponent(shape: RefNode) extends Component { + override val name: String = "node" +} +case class HasValue(value: Value) extends Component { + override val name: String = "hasValue" +} +case class In(list: List[Value]) extends Component { + override val name: String = "in" +} + +// TODO: Change representation to include optional parent shape +case class QualifiedValueShape( + shape: RefNode, + qualifiedMinCount: Option[Int], + qualifiedMaxCount: Option[Int], + qualifiedValueShapesDisjoint: Option[Boolean] +) extends Component { + override val name: String = "qualifiedValueShape" +} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/MessageMap.scala b/modules/shacl/src/main/scala/es/weso/shacl/MessageMap.scala index 36d6fe1..a31d0e0 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/MessageMap.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/MessageMap.scala @@ -1,67 +1,70 @@ -package es.weso.shacl -import cats._ -import cats.implicits._ -import es.weso.rdf.nodes.{Lang, LangLiteral, RDFNode, StringLiteral} - -case class MessageMap(mmap: Map[Option[Lang], String]) { - - def getRDFNodes: List[RDFNode] = mmap.toList.map { - case (maybeLang, str) => - maybeLang match { - case None => StringLiteral(str) - case Some(lang) => LangLiteral(str, lang) - } - } - - def addMessage(node: RDFNode): Either[String, MessageMap] = node match { - case StringLiteral(str) => mmap.get(None) match { - case None => Right(MessageMap(mmap.updated(None, str))) - case Some(other) => Left(s"Trying to create two messages without language tag: $other and $str") - } - case LangLiteral(str,lang) => mmap.get(Some(lang)) match { - case None => Right(MessageMap(mmap.updated(Some(lang), str))) - case Some(other) => Left(s"Trying to create two messages with same language tag ($lang): $other and $str") - } - case _ => Left(s"Node $node must be a string or a language tagged string to be a message") - } - - override def toString(): String = Show[MessageMap].show(this) - -} - -object MessageMap { - - def fromString(msg: String): MessageMap = { - MessageMap(Map(None -> msg)) - } - - def fromRDFNodes(nodes: List[RDFNode]): Either[String, MessageMap] = { - val zero: Either[String,MessageMap] = Right(MessageMap(Map())) - def cmb(rest: Either[String,MessageMap], x: RDFNode): Either[String,MessageMap] = for { - mmap <- rest - r <- mmap.addMessage(x) - } yield r - nodes.foldLeft(zero)(cmb) - } - - implicit def monoidMessageMap: Monoid[MessageMap] = new Monoid[MessageMap] { - override def empty: MessageMap = MessageMap(Map()) - - override def combine(m1: MessageMap, m2: MessageMap): MessageMap = - MessageMap(m1.mmap |+| m2.mmap) - } - - implicit def showMessageMap: Show[MessageMap] = new Show[MessageMap] { - override def show(m: MessageMap): String = { - m.mmap.toList.map { case (maybeLang,msg) => - maybeLang match { - case None => msg - case Some(lang) => s"$msg@$lang" - } - }.mkString(",") - } - } - - def empty: MessageMap = Monoid[MessageMap].empty - -} \ No newline at end of file +package es.weso.shacl +import cats._ +import cats.implicits._ +import es.weso.rdf.nodes.{Lang, LangLiteral, RDFNode, StringLiteral} + +case class MessageMap(mmap: Map[Option[Lang], String]) { + + def getRDFNodes: List[RDFNode] = mmap.toList.map { case (maybeLang, str) => + maybeLang match { + case None => StringLiteral(str) + case Some(lang) => LangLiteral(str, lang) + } + } + + def addMessage(node: RDFNode): Either[String, MessageMap] = node match { + case StringLiteral(str) => + mmap.get(None) match { + case None => Right(MessageMap(mmap.updated(None, str))) + case Some(other) => Left(s"Trying to create two messages without language tag: $other and $str") + } + case LangLiteral(str, lang) => + mmap.get(Some(lang)) match { + case None => Right(MessageMap(mmap.updated(Some(lang), str))) + case Some(other) => Left(s"Trying to create two messages with same language tag ($lang): $other and $str") + } + case _ => Left(s"Node $node must be a string or a language tagged string to be a message") + } + + override def toString(): String = Show[MessageMap].show(this) + +} + +object MessageMap { + + def fromString(msg: String): MessageMap = { + MessageMap(Map(None -> msg)) + } + + def fromRDFNodes(nodes: List[RDFNode]): Either[String, MessageMap] = { + val zero: Either[String, MessageMap] = Right(MessageMap(Map())) + def cmb(rest: Either[String, MessageMap], x: RDFNode): Either[String, MessageMap] = for { + mmap <- rest + r <- mmap.addMessage(x) + } yield r + nodes.foldLeft(zero)(cmb) + } + + implicit def monoidMessageMap: Monoid[MessageMap] = new Monoid[MessageMap] { + override def empty: MessageMap = MessageMap(Map()) + + override def combine(m1: MessageMap, m2: MessageMap): MessageMap = + MessageMap(m1.mmap |+| m2.mmap) + } + + implicit def showMessageMap: Show[MessageMap] = new Show[MessageMap] { + override def show(m: MessageMap): String = { + m.mmap.toList + .map { case (maybeLang, msg) => + maybeLang match { + case None => msg + case Some(lang) => s"$msg@$lang" + } + } + .mkString(",") + } + } + + def empty: MessageMap = Monoid[MessageMap].empty + +} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/NodeKindType.scala b/modules/shacl/src/main/scala/es/weso/shacl/NodeKindType.scala index f3ed145..5ebf385 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/NodeKindType.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/NodeKindType.scala @@ -1,26 +1,26 @@ -package es.weso.shacl - -import es.weso.rdf.nodes.IRI -import es.weso.shacl.SHACLPrefixes._ - -sealed trait NodeKindType { - def id: IRI -} -case object IRIKind extends NodeKindType { - override def id = `sh:IRI` -} -case object LiteralKind extends NodeKindType { - override def id = `sh:Literal` -} -case object BlankNodeKind extends NodeKindType { - override def id = `sh:BlankNode` -} -case object BlankNodeOrIRI extends NodeKindType { - override def id = `sh:BlankNodeOrIRI` -} -case object BlankNodeOrLiteral extends NodeKindType { - override def id = `sh:BlankNodeOrLiteral` -} -case object IRIOrLiteral extends NodeKindType { - override def id = `sh:IRIOrLiteral` -} +package es.weso.shacl + +import es.weso.rdf.nodes.IRI +import es.weso.shacl.SHACLPrefixes._ + +sealed trait NodeKindType { + def id: IRI +} +case object IRIKind extends NodeKindType { + override def id = `sh:IRI` +} +case object LiteralKind extends NodeKindType { + override def id = `sh:Literal` +} +case object BlankNodeKind extends NodeKindType { + override def id = `sh:BlankNode` +} +case object BlankNodeOrIRI extends NodeKindType { + override def id = `sh:BlankNodeOrIRI` +} +case object BlankNodeOrLiteral extends NodeKindType { + override def id = `sh:BlankNodeOrLiteral` +} +case object IRIOrLiteral extends NodeKindType { + override def id = `sh:IRIOrLiteral` +} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/PropertyGroup.scala b/modules/shacl/src/main/scala/es/weso/shacl/PropertyGroup.scala index 13457ec..480bd18 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/PropertyGroup.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/PropertyGroup.scala @@ -1,7 +1,7 @@ -package es.weso.shacl -import es.weso.rdf.nodes.{DecimalLiteral, RDFNode} - -case class PropertyGroup( - order: Option[DecimalLiteral], - label: Set[RDFNode] - ) +package es.weso.shacl +import es.weso.rdf.nodes.{DecimalLiteral, RDFNode} + +case class PropertyGroup( + order: Option[DecimalLiteral], + label: Set[RDFNode] +) diff --git a/modules/shacl/src/main/scala/es/weso/shacl/RefNode.scala b/modules/shacl/src/main/scala/es/weso/shacl/RefNode.scala index b371767..4c482bb 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/RefNode.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/RefNode.scala @@ -1,7 +1,7 @@ -package es.weso.shacl - -import es.weso.rdf.nodes.RDFNode - -case class RefNode(id: RDFNode) extends AnyVal { - def showId = id.toString -} +package es.weso.shacl + +import es.weso.rdf.nodes.RDFNode + +case class RefNode(id: RDFNode) extends AnyVal { + def showId = id.toString +} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/SHACLPrefixes.scala b/modules/shacl/src/main/scala/es/weso/shacl/SHACLPrefixes.scala index ebaf934..3ee7ad0 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/SHACLPrefixes.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/SHACLPrefixes.scala @@ -1,97 +1,98 @@ -package es.weso.shacl - -import es.weso.rdf.nodes.IRI -import es.weso.rdf.PrefixMap -import es.weso.rdf.PREFIXES._ -import es.weso.rdf._ - -object SHACLPrefixes { - -// lazy val sh = IRI("http://www.w3.org/ns/shacl#") - - lazy val `sh:BlankNode`: IRI = sh + "BlankNode" - lazy val `sh:BlankNodeOrIRI`: IRI = sh + "BlankNodeOrIRI" - lazy val `sh:BlankNodeOrLiteral`: IRI = sh + "BlankNodeOrLiteral" - lazy val `sh:Info`: IRI = sh + "Info" - lazy val `sh:IRI`: IRI = sh + "IRI" - lazy val `sh:IRIOrLiteral`: IRI = sh + "IRIOrLiteral" - lazy val `sh:Literal`: IRI = sh + "Literal" - lazy val `sh:NodeShape`: IRI = sh + "NodeShape" - lazy val `sh:PropertyShape`: IRI = sh + "PropertyShape" - lazy val `sh:Shape`: IRI = sh + "Shape" - lazy val `sh:Schema`: IRI = sh + "Schema" - lazy val `sh:ValidationReport`: IRI = sh + "ValidationReport" - lazy val `sh:ValidationResult`: IRI = sh + "ValidationResult" - lazy val `sh:Violation`: IRI = sh + "Violation" - lazy val `sh:Warning`: IRI = sh + "Warning" - - lazy val `sh:and`: IRI = sh + "and" - lazy val `sh:class`: IRI = sh + "class" - lazy val `sh:closed`: IRI = sh + "closed" - lazy val `sh:conforms`: IRI = sh + "conforms" - lazy val `sh:datatype`: IRI = sh + "datatype" - lazy val `sh:deactivated`: IRI = sh + "deactivated" - lazy val `sh:description`: IRI = sh + "description" - lazy val `sh:disjoint`: IRI = sh + "disjoint" - lazy val `sh:equals`: IRI = sh + "equals" - lazy val `sh:entailment`: IRI = sh + "entailment" - lazy val `sh:flags`: IRI = sh + "flags" - lazy val `sh:focusNode`: IRI = sh + "focusNode" - lazy val `sh:group`: IRI = sh + "group" - lazy val `sh:hasValue`: IRI = sh + "hasValue" - lazy val `sh:ignoredProperties`: IRI = sh + "ignoredProperties" - lazy val `sh:in`: IRI = sh + "in" - lazy val `sh:languageIn`: IRI = sh + "languageIn" - lazy val `sh:lessThan`: IRI = sh + "lessThan" - lazy val `sh:lessThanOrEquals`: IRI = sh + "lessThanOrEquals" - lazy val `sh:minCount`: IRI = sh + "minCount" - lazy val `sh:maxCount`: IRI = sh + "maxCount" - lazy val `sh:minInclusive`: IRI = sh + "minInclusive" - lazy val `sh:minExclusive`: IRI = sh + "minExclusive" - lazy val `sh:maxInclusive`: IRI = sh + "maxInclusive" - lazy val `sh:maxExclusive`: IRI = sh + "maxExclusive" - lazy val `sh:minLength`: IRI = sh + "minLength" - lazy val `sh:maxLength`: IRI = sh + "maxLength" - lazy val `sh:message`: IRI = sh + "message" - lazy val `sh:name`: IRI = sh + "name" - lazy val `sh:nodeKind`: IRI = sh + "nodeKind" - lazy val `sh:node`: IRI = sh + "node" - lazy val `sh:not`: IRI = sh + "not" - lazy val `sh:or`: IRI = sh + "or" - lazy val `sh:order`: IRI = sh + "order" - lazy val `sh:path`: IRI = sh + "path" - lazy val `sh:pattern`: IRI = sh + "pattern" - lazy val `sh:property`: IRI = sh + "property" - lazy val `sh:qualifiedMinCount`: IRI = sh + "qualifiedMinCount" - lazy val `sh:qualifiedMaxCount`: IRI = sh + "qualifiedMaxCount" - lazy val `sh:qualifiedValueShape`: IRI = sh + "qualifiedValueShape" - lazy val `sh:qualifiedValueShapesDisjoint`: IRI = sh + "qualifiedValueShapesDisjoint" - lazy val `sh:result`: IRI = sh + "result" - lazy val `sh:resultPath`: IRI = sh + "resultPath" - lazy val `sh:resultSeverity`: IRI = sh + "resultSeverity" - lazy val `sh:resultMessage`: IRI = sh + "resultMessage" - lazy val `sh:shapesGraph`: IRI = sh + "shapesGraph" - lazy val `sh:severity`: IRI = sh + "severity" - lazy val `sh:sourceConstraintComponent`: IRI = sh + "sourceConstraintComponent" - lazy val `sh:sourceShape`: IRI = sh + "sourceShape" - lazy val `sh:value`: IRI = sh + "value" - lazy val `sh:targetNode`: IRI = sh + "targetNode" - lazy val `sh:targetClass`: IRI = sh + "targetClass" - lazy val `sh:targetSubjectsOf`: IRI = sh + "targetSubjectsOf" - lazy val `sh:targetObjectsOf`: IRI = sh + "targetObjectsOf" - lazy val `sh:text`: IRI = sh + "text" - lazy val `sh:uniqueLang`: IRI = sh + "uniqueLang" - lazy val `sh:xone`: IRI = sh + "xone" - - lazy val `owl:imports`: IRI = owl + "imports" - - lazy val defaultPrefixMap = PrefixMap( - Map( - Prefix("sh") -> sh, - Prefix("rdf") -> rdf, - Prefix("xsd") -> xsd, - Prefix("rdfs") -> rdfs, - Prefix("owl") -> owl - )) - -} +package es.weso.shacl + +import es.weso.rdf.nodes.IRI +import es.weso.rdf.PrefixMap +import es.weso.rdf.PREFIXES._ +import es.weso.rdf._ + +object SHACLPrefixes { + +// lazy val sh = IRI("http://www.w3.org/ns/shacl#") + + lazy val `sh:BlankNode`: IRI = sh + "BlankNode" + lazy val `sh:BlankNodeOrIRI`: IRI = sh + "BlankNodeOrIRI" + lazy val `sh:BlankNodeOrLiteral`: IRI = sh + "BlankNodeOrLiteral" + lazy val `sh:Info`: IRI = sh + "Info" + lazy val `sh:IRI`: IRI = sh + "IRI" + lazy val `sh:IRIOrLiteral`: IRI = sh + "IRIOrLiteral" + lazy val `sh:Literal`: IRI = sh + "Literal" + lazy val `sh:NodeShape`: IRI = sh + "NodeShape" + lazy val `sh:PropertyShape`: IRI = sh + "PropertyShape" + lazy val `sh:Shape`: IRI = sh + "Shape" + lazy val `sh:Schema`: IRI = sh + "Schema" + lazy val `sh:ValidationReport`: IRI = sh + "ValidationReport" + lazy val `sh:ValidationResult`: IRI = sh + "ValidationResult" + lazy val `sh:Violation`: IRI = sh + "Violation" + lazy val `sh:Warning`: IRI = sh + "Warning" + + lazy val `sh:and`: IRI = sh + "and" + lazy val `sh:class`: IRI = sh + "class" + lazy val `sh:closed`: IRI = sh + "closed" + lazy val `sh:conforms`: IRI = sh + "conforms" + lazy val `sh:datatype`: IRI = sh + "datatype" + lazy val `sh:deactivated`: IRI = sh + "deactivated" + lazy val `sh:description`: IRI = sh + "description" + lazy val `sh:disjoint`: IRI = sh + "disjoint" + lazy val `sh:equals`: IRI = sh + "equals" + lazy val `sh:entailment`: IRI = sh + "entailment" + lazy val `sh:flags`: IRI = sh + "flags" + lazy val `sh:focusNode`: IRI = sh + "focusNode" + lazy val `sh:group`: IRI = sh + "group" + lazy val `sh:hasValue`: IRI = sh + "hasValue" + lazy val `sh:ignoredProperties`: IRI = sh + "ignoredProperties" + lazy val `sh:in`: IRI = sh + "in" + lazy val `sh:languageIn`: IRI = sh + "languageIn" + lazy val `sh:lessThan`: IRI = sh + "lessThan" + lazy val `sh:lessThanOrEquals`: IRI = sh + "lessThanOrEquals" + lazy val `sh:minCount`: IRI = sh + "minCount" + lazy val `sh:maxCount`: IRI = sh + "maxCount" + lazy val `sh:minInclusive`: IRI = sh + "minInclusive" + lazy val `sh:minExclusive`: IRI = sh + "minExclusive" + lazy val `sh:maxInclusive`: IRI = sh + "maxInclusive" + lazy val `sh:maxExclusive`: IRI = sh + "maxExclusive" + lazy val `sh:minLength`: IRI = sh + "minLength" + lazy val `sh:maxLength`: IRI = sh + "maxLength" + lazy val `sh:message`: IRI = sh + "message" + lazy val `sh:name`: IRI = sh + "name" + lazy val `sh:nodeKind`: IRI = sh + "nodeKind" + lazy val `sh:node`: IRI = sh + "node" + lazy val `sh:not`: IRI = sh + "not" + lazy val `sh:or`: IRI = sh + "or" + lazy val `sh:order`: IRI = sh + "order" + lazy val `sh:path`: IRI = sh + "path" + lazy val `sh:pattern`: IRI = sh + "pattern" + lazy val `sh:property`: IRI = sh + "property" + lazy val `sh:qualifiedMinCount`: IRI = sh + "qualifiedMinCount" + lazy val `sh:qualifiedMaxCount`: IRI = sh + "qualifiedMaxCount" + lazy val `sh:qualifiedValueShape`: IRI = sh + "qualifiedValueShape" + lazy val `sh:qualifiedValueShapesDisjoint`: IRI = sh + "qualifiedValueShapesDisjoint" + lazy val `sh:result`: IRI = sh + "result" + lazy val `sh:resultPath`: IRI = sh + "resultPath" + lazy val `sh:resultSeverity`: IRI = sh + "resultSeverity" + lazy val `sh:resultMessage`: IRI = sh + "resultMessage" + lazy val `sh:shapesGraph`: IRI = sh + "shapesGraph" + lazy val `sh:severity`: IRI = sh + "severity" + lazy val `sh:sourceConstraintComponent`: IRI = sh + "sourceConstraintComponent" + lazy val `sh:sourceShape`: IRI = sh + "sourceShape" + lazy val `sh:value`: IRI = sh + "value" + lazy val `sh:targetNode`: IRI = sh + "targetNode" + lazy val `sh:targetClass`: IRI = sh + "targetClass" + lazy val `sh:targetSubjectsOf`: IRI = sh + "targetSubjectsOf" + lazy val `sh:targetObjectsOf`: IRI = sh + "targetObjectsOf" + lazy val `sh:text`: IRI = sh + "text" + lazy val `sh:uniqueLang`: IRI = sh + "uniqueLang" + lazy val `sh:xone`: IRI = sh + "xone" + + lazy val `owl:imports`: IRI = owl + "imports" + + lazy val defaultPrefixMap = PrefixMap( + Map( + Prefix("sh") -> sh, + Prefix("rdf") -> rdf, + Prefix("xsd") -> xsd, + Prefix("rdfs") -> rdfs, + Prefix("owl") -> owl + ) + ) + +} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/Schema.scala b/modules/shacl/src/main/scala/es/weso/shacl/Schema.scala index 99bbe0f..12ffb44 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/Schema.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/Schema.scala @@ -1,125 +1,123 @@ -package es.weso.shacl - -import es.weso.rdf.{PrefixMap, RDFBuilder} -import es.weso.rdf.nodes.{IRI, RDFNode} -import es.weso.shacl.converter.Shacl2RDF - -import scala.util.{Either, Left, Right} -// import sext._ -import cats.effect.IO - -case class Schema(pm: PrefixMap, - imports: List[IRI], - entailments: List[IRI], - shapesMap: Map[RefNode, Shape], - propertyGroups: Map[RefNode, PropertyGroup] - ) { - - lazy val shapes: Seq[Shape] = - shapesMap.toSeq.map(_._2) - - lazy val shapeRefs: Seq[RefNode] = - shapesMap.keys.toSeq - - /** - * Get the shape associated to an IRI - * @param node IRI that identifies a shape - */ - def shape(node: RDFNode): Either[String, Shape] = - shapesMap.get(RefNode(node)) match { - case None => Left(s"Not found $node in Schema") - case Some(shape) => Right(shape) - } - - private[shacl] def siblingQualifiedShapes(s: RefNode): List[RefNode] = { - val parentShapes: List[Shape] = - parents(s). - map(shapesMap.get(_)). - collect { case Some(shape) => shape } - val qualifiedPropertyShapes = - parentShapes. - flatMap(_.propertyShapes). - filter(_ != s) - collectQualifiedValueShapes(qualifiedPropertyShapes) - } - - private def collectQualifiedValueShapes(ls: List[RefNode]): List[RefNode] = { - val zero: List[RefNode] = List() - def comb(xs: List[RefNode], x: RefNode): List[RefNode] = - qualifiedShapes(x) ++ xs - ls.foldLeft(zero)(comb) - } - - private def qualifiedShapes(p: RefNode): List[RefNode] = shapesMap.get(p) match { - case None => List() - case Some(shape) => - shape.components.collect { case x: QualifiedValueShape => x.shape }.toList - } - - /* Find shape x such that x sh:property p - */ - private[shacl] def parents(p: RefNode): List[RefNode] = { - shapesWithPropertyShape(this.shapeRefs, p) - } - - private def shapesWithPropertyShape(ls: Seq[RefNode], p: RefNode): List[RefNode] = { - ls.filter(hasPropertyShape(_, p)).toList - } - - private def hasPropertyShape(s: RefNode, p: RefNode): Boolean = { - shapesMap.get(s) match { - case None => false // TODO: Maybe raise an error - case Some(shape) => - if (shape.propertyShapes.contains(p)) true - else false - } - } - - /** - * Get the sequence of sh:targetNode declarations - */ - def targetNodeShapes: Seq[(RDFNode, Shape)] = { - val zero: Seq[(RDFNode, Shape)] = Seq() - def comb(rs: Seq[(RDFNode, Shape)], s: Shape): Seq[(RDFNode, Shape)] = { - val ns: Seq[RDFNode] = s.targetNodes - ns.map(n => (n, s)) ++ rs - } - shapes.foldLeft(zero)(comb) - } - - /** - * Get the sequence of `sh:targetNode` declarations - * @return a list of pairs (n,s) where n is the IRI of a node - * and s is the IRI of a shape - */ - def targetNodeDeclarations: Seq[(RDFNode, RDFNode)] = { - targetNodeShapes.map {case (node, shape) => (node, shape.id) } - } - - def serialize(format: String = "TURTLE", - base: Option[IRI], - builder: RDFBuilder): IO[String] = { - format.toUpperCase match { - /*case "TREE" => { - IO(s"PrefixMap ${pm.treeString}\nShapes: ${shapes.treeString}") - }*/ - case _ => builder.empty.flatMap(_.use(b => for { - str <- new Shacl2RDF {}.serialize(this, format, base, b) - } yield str)) - } - } - -} - -object Schema { - - val empty: Schema = - Schema( - pm = SHACLPrefixes.defaultPrefixMap, - imports = List(), - entailments = List(), - shapesMap = Map(), - propertyGroups = Map() - ) - -} +package es.weso.shacl + +import es.weso.rdf.{PrefixMap, RDFBuilder} +import es.weso.rdf.nodes.{IRI, RDFNode} +import es.weso.shacl.converter.Shacl2RDF + +import scala.util.{Either, Left, Right} +// import sext._ +import cats.effect.IO + +case class Schema( + pm: PrefixMap, + imports: List[IRI], + entailments: List[IRI], + shapesMap: Map[RefNode, Shape], + propertyGroups: Map[RefNode, PropertyGroup] +) { + + lazy val shapes: Seq[Shape] = + shapesMap.toSeq.map(_._2) + + lazy val shapeRefs: Seq[RefNode] = + shapesMap.keys.toSeq + + /** Get the shape associated to an IRI + * @param node + * IRI that identifies a shape + */ + def shape(node: RDFNode): Either[String, Shape] = + shapesMap.get(RefNode(node)) match { + case None => Left(s"Not found $node in Schema") + case Some(shape) => Right(shape) + } + + private[shacl] def siblingQualifiedShapes(s: RefNode): List[RefNode] = { + val parentShapes: List[Shape] = + parents(s).map(shapesMap.get(_)).collect { case Some(shape) => shape } + val qualifiedPropertyShapes = + parentShapes.flatMap(_.propertyShapes).filter(_ != s) + collectQualifiedValueShapes(qualifiedPropertyShapes) + } + + private def collectQualifiedValueShapes(ls: List[RefNode]): List[RefNode] = { + val zero: List[RefNode] = List() + def comb(xs: List[RefNode], x: RefNode): List[RefNode] = + qualifiedShapes(x) ++ xs + ls.foldLeft(zero)(comb) + } + + private def qualifiedShapes(p: RefNode): List[RefNode] = shapesMap.get(p) match { + case None => List() + case Some(shape) => + shape.components.collect { case x: QualifiedValueShape => x.shape }.toList + } + + /* Find shape x such that x sh:property p + */ + private[shacl] def parents(p: RefNode): List[RefNode] = { + shapesWithPropertyShape(this.shapeRefs, p) + } + + private def shapesWithPropertyShape(ls: Seq[RefNode], p: RefNode): List[RefNode] = { + ls.filter(hasPropertyShape(_, p)).toList + } + + private def hasPropertyShape(s: RefNode, p: RefNode): Boolean = { + shapesMap.get(s) match { + case None => false // TODO: Maybe raise an error + case Some(shape) => + if (shape.propertyShapes.contains(p)) true + else false + } + } + + /** Get the sequence of sh:targetNode declarations + */ + def targetNodeShapes: Seq[(RDFNode, Shape)] = { + val zero: Seq[(RDFNode, Shape)] = Seq() + def comb(rs: Seq[(RDFNode, Shape)], s: Shape): Seq[(RDFNode, Shape)] = { + val ns: Seq[RDFNode] = s.targetNodes + ns.map(n => (n, s)) ++ rs + } + shapes.foldLeft(zero)(comb) + } + + /** Get the sequence of `sh:targetNode` declarations + * @return + * a list of pairs (n,s) where n is the IRI of a node and s is the IRI of a shape + */ + def targetNodeDeclarations: Seq[(RDFNode, RDFNode)] = { + targetNodeShapes.map { case (node, shape) => (node, shape.id) } + } + + def serialize(format: String = "TURTLE", base: Option[IRI], builder: RDFBuilder): IO[String] = { + format.toUpperCase match { + /*case "TREE" => { + IO(s"PrefixMap ${pm.treeString}\nShapes: ${shapes.treeString}") + }*/ + case _ => + builder.empty.flatMap( + _.use(b => + for { + str <- new Shacl2RDF {}.serialize(this, format, base, b) + } yield str + ) + ) + } + } + +} + +object Schema { + + val empty: Schema = + Schema( + pm = SHACLPrefixes.defaultPrefixMap, + imports = List(), + entailments = List(), + shapesMap = Map(), + propertyGroups = Map() + ) + +} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/Shacl.scala b/modules/shacl/src/main/scala/es/weso/shacl/Shacl.scala index 5218944..1843bbb 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/Shacl.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/Shacl.scala @@ -1,8 +1,8 @@ -package es.weso.shacl - -object Shacl { - case object Unbounded - lazy val defaultMin = 0 - lazy val defaultMax = Unbounded - lazy val defaultFormat = "TURTLE" -} +package es.weso.shacl + +object Shacl { + case object Unbounded + lazy val defaultMin = 0 + lazy val defaultMax = Unbounded + lazy val defaultFormat = "TURTLE" +} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/Shape.scala b/modules/shacl/src/main/scala/es/weso/shacl/Shape.scala index c0714eb..62edf8e 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/Shape.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/Shape.scala @@ -1,152 +1,149 @@ -package es.weso.shacl - -import es.weso.rdf.nodes._ -import es.weso.rdf.path.SHACLPath -import es.weso.shacl.report.Severity - -sealed abstract class Shape { - def id: RDFNode - def targets: Seq[Target] - def components: Seq[Component] - def propertyShapes: Seq[RefNode] - def closed: Boolean - def deactivated: Boolean - def message: MessageMap - def name: MessageMap - def description: MessageMap - def order: Option[DecimalLiteral] - def group: Option[RefNode] - def sourceIRI: Option[IRI] - - def severity: Option[Severity] - def ignoredProperties: List[IRI] - - def hasId(iri: IRI): Boolean = { - id == iri - } - - def showId: String = - id match { - case iri: IRI => iri.str - case bnode: BNode => bnode.toString - case l: Literal => l.getLexicalForm - } - - def targetNodes: Seq[RDFNode] = - targets.map(_.toTargetNode).flatten.map(_.node) - - def targetClasses: Seq[RDFNode] = - targets.map(_.toTargetClass).flatten.map(_.node) - - def targetSubjectsOf: Seq[IRI] = - targets.map(_.toTargetSubjectsOf).flatten.map(_.pred) - - def targetObjectsOf: Seq[IRI] = - targets.map(_.toTargetObjectsOf).flatten.map(_.pred) - - def componentShapes: Seq[RefNode] = { - components.collect { - case NodeComponent(sref) => sref -// case Or(srefs) => srefs -// case And(srefs) => srefs -// case Not(sref) => List(sref) // TODO: Not sure if this should be included... - } - } - - def addPropertyShapes(ps: Seq[RefNode]): Shape - - -} - -case class NodeShape( - id: RDFNode, - components: List[Component], - targets: Seq[Target], - propertyShapes: Seq[RefNode], - closed: Boolean, - ignoredProperties: List[IRI], - deactivated: Boolean, - message: MessageMap, - severity: Option[Severity], - name: MessageMap, - description: MessageMap, - order: Option[DecimalLiteral], - group: Option[RefNode], - sourceIRI: Option[IRI] - ) extends Shape { - - def isPropertyConstraint = false - - override def addPropertyShapes(ps: Seq[RefNode]): Shape = - this.copy(propertyShapes = this.propertyShapes ++ ps) - -} - -case class PropertyShape( - id: RDFNode, - path: SHACLPath, - components: List[Component], - targets: Seq[Target], - propertyShapes: Seq[RefNode], - closed: Boolean, - ignoredProperties: List[IRI], - deactivated: Boolean, - message: MessageMap, - severity: Option[Severity], - name: MessageMap, - description: MessageMap, - order: Option[DecimalLiteral], - group: Option[RefNode], - sourceIRI: Option[IRI], - annotations: List[(IRI,RDFNode)] - ) extends Shape { - - def isPropertyConstraint = true - - def predicate: Option[IRI] = path.predicate - - override def addPropertyShapes(ps: Seq[RefNode]): Shape = { - this.copy(propertyShapes = this.propertyShapes ++ ps) - } -} - -object Shape { - - def empty(id: RDFNode) = NodeShape( - id = id, - components = List(), - targets = Seq(), - propertyShapes = Seq(), - closed = false, - ignoredProperties = List(), - deactivated = false, - message = MessageMap.empty, - severity = None, - name = MessageMap.empty, - description = MessageMap.empty, - order = None, - group = None, - sourceIRI = None - ) - - def emptyPropertyShape( - id: RDFNode, - path: SHACLPath): PropertyShape = PropertyShape( - id = id, - path = path, - components = List(), - targets = Seq(), - propertyShapes = Seq(), - closed = false, - ignoredProperties = List(), - deactivated = false, - message = MessageMap.empty, - severity = None, - name = MessageMap.empty, - description = MessageMap.empty, - order = None, - group = None, - sourceIRI = None, - annotations = List() - ) -} +package es.weso.shacl + +import es.weso.rdf.nodes._ +import es.weso.rdf.path.SHACLPath +import es.weso.shacl.report.Severity + +sealed abstract class Shape { + def id: RDFNode + def targets: Seq[Target] + def components: Seq[Component] + def propertyShapes: Seq[RefNode] + def closed: Boolean + def deactivated: Boolean + def message: MessageMap + def name: MessageMap + def description: MessageMap + def order: Option[DecimalLiteral] + def group: Option[RefNode] + def sourceIRI: Option[IRI] + + def severity: Option[Severity] + def ignoredProperties: List[IRI] + + def hasId(iri: IRI): Boolean = { + id == iri + } + + def showId: String = + id match { + case iri: IRI => iri.str + case bnode: BNode => bnode.toString + case l: Literal => l.getLexicalForm + } + + def targetNodes: Seq[RDFNode] = + targets.map(_.toTargetNode).flatten.map(_.node) + + def targetClasses: Seq[RDFNode] = + targets.map(_.toTargetClass).flatten.map(_.node) + + def targetSubjectsOf: Seq[IRI] = + targets.map(_.toTargetSubjectsOf).flatten.map(_.pred) + + def targetObjectsOf: Seq[IRI] = + targets.map(_.toTargetObjectsOf).flatten.map(_.pred) + + def componentShapes: Seq[RefNode] = { + components.collect { case NodeComponent(sref) => + sref +// case Or(srefs) => srefs +// case And(srefs) => srefs +// case Not(sref) => List(sref) // TODO: Not sure if this should be included... + } + } + + def addPropertyShapes(ps: Seq[RefNode]): Shape + +} + +case class NodeShape( + id: RDFNode, + components: List[Component], + targets: Seq[Target], + propertyShapes: Seq[RefNode], + closed: Boolean, + ignoredProperties: List[IRI], + deactivated: Boolean, + message: MessageMap, + severity: Option[Severity], + name: MessageMap, + description: MessageMap, + order: Option[DecimalLiteral], + group: Option[RefNode], + sourceIRI: Option[IRI] +) extends Shape { + + def isPropertyConstraint = false + + override def addPropertyShapes(ps: Seq[RefNode]): Shape = + this.copy(propertyShapes = this.propertyShapes ++ ps) + +} + +case class PropertyShape( + id: RDFNode, + path: SHACLPath, + components: List[Component], + targets: Seq[Target], + propertyShapes: Seq[RefNode], + closed: Boolean, + ignoredProperties: List[IRI], + deactivated: Boolean, + message: MessageMap, + severity: Option[Severity], + name: MessageMap, + description: MessageMap, + order: Option[DecimalLiteral], + group: Option[RefNode], + sourceIRI: Option[IRI], + annotations: List[(IRI, RDFNode)] +) extends Shape { + + def isPropertyConstraint = true + + def predicate: Option[IRI] = path.predicate + + override def addPropertyShapes(ps: Seq[RefNode]): Shape = { + this.copy(propertyShapes = this.propertyShapes ++ ps) + } +} + +object Shape { + + def empty(id: RDFNode) = NodeShape( + id = id, + components = List(), + targets = Seq(), + propertyShapes = Seq(), + closed = false, + ignoredProperties = List(), + deactivated = false, + message = MessageMap.empty, + severity = None, + name = MessageMap.empty, + description = MessageMap.empty, + order = None, + group = None, + sourceIRI = None + ) + + def emptyPropertyShape(id: RDFNode, path: SHACLPath): PropertyShape = PropertyShape( + id = id, + path = path, + components = List(), + targets = Seq(), + propertyShapes = Seq(), + closed = false, + ignoredProperties = List(), + deactivated = false, + message = MessageMap.empty, + severity = None, + name = MessageMap.empty, + description = MessageMap.empty, + order = None, + group = None, + sourceIRI = None, + annotations = List() + ) +} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/Target.scala b/modules/shacl/src/main/scala/es/weso/shacl/Target.scala index 044f480..fa6d78b 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/Target.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/Target.scala @@ -1,26 +1,26 @@ -package es.weso.shacl - -import es.weso.rdf.nodes.{IRI, RDFNode} - -sealed abstract class Target { - def toTargetNode: Option[TargetNode] = this match { - case tn: TargetNode => Some(tn) - case _ => None - } - def toTargetClass: Option[TargetClass] = this match { - case tc: TargetClass => Some(tc) - case _ => None - } - def toTargetSubjectsOf: Option[TargetSubjectsOf] = this match { - case t: TargetSubjectsOf => Some(t) - case _ => None - } - def toTargetObjectsOf: Option[TargetObjectsOf] = this match { - case t: TargetObjectsOf => Some(t) - case _ => None - } -} -case class TargetNode(node: RDFNode) extends Target -case class TargetClass(node: RDFNode) extends Target -case class TargetSubjectsOf(pred: IRI) extends Target -case class TargetObjectsOf(pred: IRI) extends Target +package es.weso.shacl + +import es.weso.rdf.nodes.{IRI, RDFNode} + +sealed abstract class Target { + def toTargetNode: Option[TargetNode] = this match { + case tn: TargetNode => Some(tn) + case _ => None + } + def toTargetClass: Option[TargetClass] = this match { + case tc: TargetClass => Some(tc) + case _ => None + } + def toTargetSubjectsOf: Option[TargetSubjectsOf] = this match { + case t: TargetSubjectsOf => Some(t) + case _ => None + } + def toTargetObjectsOf: Option[TargetObjectsOf] = this match { + case t: TargetObjectsOf => Some(t) + case _ => None + } +} +case class TargetNode(node: RDFNode) extends Target +case class TargetClass(node: RDFNode) extends Target +case class TargetSubjectsOf(pred: IRI) extends Target +case class TargetObjectsOf(pred: IRI) extends Target diff --git a/modules/shacl/src/main/scala/es/weso/shacl/Value.scala b/modules/shacl/src/main/scala/es/weso/shacl/Value.scala index 5f36ccc..1e3b609 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/Value.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/Value.scala @@ -1,40 +1,38 @@ -package es.weso.shacl - -import es.weso.rdf.nodes.{IRI, Literal, RDFNode} - -/** - * Represents IRIs or Literals (no Blank nodes) - */ -trait Value { - /** - * `true` if `node` matches this value - */ - def matchNode(node: RDFNode): Boolean - - /** - * Conversion from values to RDFNode's - */ - def rdfNode: RDFNode -} - -case class IRIValue(iri: IRI) extends Value { - override def matchNode(node: RDFNode) = { - node match { - case i: IRI => iri == i - case _ => false - } - } - - override def rdfNode: RDFNode = iri -} - -case class LiteralValue(literal: Literal) extends Value { - override def matchNode(node: RDFNode) = { - node match { - case l: Literal => l == literal - case _ => false - } - } - - override def rdfNode: RDFNode = literal -} +package es.weso.shacl + +import es.weso.rdf.nodes.{IRI, Literal, RDFNode} + +/** Represents IRIs or Literals (no Blank nodes) + */ +trait Value { + + /** `true` if `node` matches this value + */ + def matchNode(node: RDFNode): Boolean + + /** Conversion from values to RDFNode's + */ + def rdfNode: RDFNode +} + +case class IRIValue(iri: IRI) extends Value { + override def matchNode(node: RDFNode) = { + node match { + case i: IRI => iri == i + case _ => false + } + } + + override def rdfNode: RDFNode = iri +} + +case class LiteralValue(literal: Literal) extends Value { + override def matchNode(node: RDFNode) = { + node match { + case l: Literal => l == literal + case _ => false + } + } + + override def rdfNode: RDFNode = literal +} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/converter/RDF2Shacl.scala b/modules/shacl/src/main/scala/es/weso/shacl/converter/RDF2Shacl.scala index bb61ed2..d0df22e 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/converter/RDF2Shacl.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/converter/RDF2Shacl.scala @@ -1,755 +1,749 @@ -package es.weso.shacl.converter - -import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf.PREFIXES._ -import es.weso.rdf.{RDFBuilder, RDFReader} -import es.weso.rdf.nodes._ -import es.weso.rdf.parser.RDFParser -import es.weso.rdf.path._ -import es.weso.shacl.SHACLPrefixes._ -import es.weso.shacl._ -import es.weso.shacl.report._ -import es.weso.utils.EitherUtils._ -// import scala.util.{Failure, Success, Try} -import cats.effect._ -import cats.data._ -import cats.implicits._ -import es.weso.rdf.parser._ -import es.weso.utils.internal.CollectionCompat._ -import es.weso.rdf.triples.RDFTriple -import fs2.Stream - -object RDF2Shacl extends RDFParser with LazyLogging { - -private type ShapesMap = Map[RefNode, Shape] - -type PropertyGroups = Map[RefNode, PropertyGroup] - -// TODO. Why this class cannot be private? -case class ParserState( - parsedShapes: ShapesMap, - parsedPropertyGroups: PropertyGroups, - pendingNodes: List[RDFNode] - ) - -private def initialState: ParserState = - ParserState(Map(),Map(),List()) - -type ShaclParser[A] = StateT[RDFParser, ParserState, A] - -private def getPendingNodes: ShaclParser[List[RDFNode]] = for { - s <- StateT.get[RDFParser,ParserState] -} yield s.pendingNodes - -private def ok_s[A](x: A): ShaclParser[A] = - StateT.liftF(ok(x)) - -private def firstOf_s[A](ps: ShaclParser[A]*): ShaclParser[A] = { - def comb(rest: ShaclParser[A], p: ShaclParser[A]): ShaclParser[A] = - p orElse rest - - val zero: ShaclParser[A] = fromRDFParser(parseFail("firstOf: none of the parsers succeeded")) - ps.foldLeft(zero)(comb) -} - -private def removePendingNode: ShaclParser[Option[RDFNode]] = for { - ns <- getPendingNodes - r <- ns match { - case Nil => ok_s(none) - case n :: rest => { - for { - _ <- StateT.modify[RDFParser,ParserState](_.copy(pendingNodes = rest)) - } yield n.some - } - } -} yield r - -private def addPendingNode(n: RDFNode): ShaclParser[Unit] = { - StateT.modify(s => s.copy(pendingNodes = n :: s.pendingNodes)) -} - -private def addPendingNodes(ns: List[RDFNode]): ShaclParser[Unit] = { - StateT.modify(s => s.copy(pendingNodes = ns ++ s.pendingNodes)) -} - -private def addShapesMap(n: RefNode, shape: Shape): ShaclParser[Unit] = { - StateT.modify(s => - s.copy(parsedShapes = s.parsedShapes.updated(n,shape)) - ) -} - -private def addParsedPropertyGroups(n: RefNode, pg: PropertyGroup): ShaclParser[Unit] = { - StateT.modify(s => - s.copy(parsedPropertyGroups = - s.parsedPropertyGroups.updated(n,pg)) - ) -} - -private def getShapesMap: ShaclParser[ShapesMap] = for { - s <- StateT.get[RDFParser,ParserState] -} yield s.parsedShapes - -private def getParsedPropertyGroups: ShaclParser[PropertyGroups] = for { - s <- StateT.get[RDFParser,ParserState] -} yield s.parsedPropertyGroups - - -private def getRDF_s: ShaclParser[RDFReader] = - StateT.liftF(getRDF) - -private def getNode_s: ShaclParser[RDFNode] = - StateT.liftF(getNode) - - private def io2s[A](e: IO[A]): ShaclParser[A] = - StateT.liftF(liftIO(e)) - -private def fromStream[A](e: Stream[IO,A]): ShaclParser[LazyList[A]] = - StateT.liftF(liftIO(e.compile.to(LazyList))) - - -private def fromEitherS[A](e: Either[String,A]): ShaclParser[A] = - StateT.liftF(fromEither(e.leftMap(mkErr(_)))) - -case class ShaclParserError(e: String) extends RuntimeException(e) - -private def mkErr(e: String): ShaclParserError = ShaclParserError(e) - -/*private def fromEitherShaclParser[A](e: Either[Err,A]): ShaclParser[A] = - StateT.liftF(fromEither(e)) */ - - -private def fromRDFParser[A](p: RDFParser[A]): ShaclParser[A] = - StateT.liftF(p) - -private def withNode_s[A](node: RDFNode, p: ShaclParser[A]): ShaclParser[A] = - StateT.applyF { - val ff: (ParserState => RDFParser[(ParserState, A)]) => - (ParserState => RDFParser[(ParserState, A)]) - = f => (ps => withNode(node, f(ps))) - p.runF.map(ff) - } - -/*private def showStatus(msg: String): ShaclParser[Unit] = for { - ps <- StateT.get[RDFParser,ParserState] -} yield { - println(s""" - |$msg - |Parser state: PendingNodes: ${ps.pendingNodes.map(_.toString).mkString(",")} - |parsedShapesMap: ${ps.parsedShapes.toString} - |parsedPropertyGroups: ${ps.parsedPropertyGroups.keySet.map(_.toString).mkString(",")}""".stripMargin) - () -}*/ - - -private def anyOf_s[A](ps: ShaclParser[A]*): ShaclParser[Seq[A]] = { - def comb(rest: ShaclParser[Seq[A]], p: ShaclParser[A]): ShaclParser[Seq[A]] = for { - maybe <- p.map(_.some) orElse ok_s(none[A]) - rs <- rest - } yield maybe match { - case None => rs - case Some(x) => x +: rs - } - val zero: ShaclParser[Seq[A]] = ok_s(Seq()) - ps.foldLeft(zero)(comb) -} - -/* private def tryGetShacl(rdf: RDFBuilder, - resolveImports: Boolean): Try[Schema] = - getShacl(rdf, resolveImports).fold( - str => Failure(new Exception(str)), - Success(_)) -*/ - -private def getShaclFromRDFReader(rdf: RDFReader): ShaclParser[Schema] = { - for { - sm <- shapesMap - imports <- parseImports - entailments <- parseEntailments - parsedPropertyGroups <- getParsedPropertyGroups - parsedShapeMap <- getShapesMap - pm <- io2s(rdf.getPrefixMap) - } yield Schema( - pm = pm, - imports = imports, - entailments = entailments, - shapesMap = parsedShapeMap, - propertyGroups = parsedPropertyGroups.toMap - ) - } - - /** - * Parses RDF content and obtains a SHACL Schema and a PrefixMap - */ - def getShacl(rdf: RDFBuilder, - resolveImports: Boolean = true - ): IO[Schema] = for { - rdfReader <- if (resolveImports) rdf.extendImports - else IO.pure(rdf) - schema <- getShaclReader(rdfReader) - } yield schema - - def runShaclParser[A](parser:ShaclParser[A], rdf:RDFReader): IO[A] = - for { - eitherValue <- parser.run(initialState).value.run(Config(initialNode,rdf)) - value <- eitherValue.fold( - err => IO.raiseError[A](err), - pair => { - val (_,v) = pair - v.pure[IO] - } - ) - - } yield value - - def getShaclReader(rdf: RDFReader): IO[Schema] = - runShaclParser(getShaclFromRDFReader(rdf), rdf) - - private def shapesMap: ShaclParser[Unit] = - for { - rdf <- getRDF_s - nodeShapes <- fromStream[RDFNode](rdf.subjectsWithType(`sh:NodeShape`)) - propertyShapes <- fromStream[RDFNode](rdf.subjectsWithType(`sh:PropertyShape`)) - shapes <- fromStream[RDFNode](rdf.subjectsWithType(`sh:Shape`)) - objectsPropertyShapes <- fromStream[RDFNode](rdf.subjectsWithProperty(`sh:property`)) - allShapes = nodeShapes ++ propertyShapes ++ shapes ++ objectsPropertyShapes - _ <- addPendingNodes(allShapes.toList) - // _ <- showStatus("shapesMap after addPending") - sm <- parseShapesMap - } yield () - - private def parseShapesMap: ShaclParser[Unit] = for { - maybeNode <- removePendingNode - // _ <- { println(s"Pending node to parse: $maybeNode"); ok_s(()) } - r <- maybeNode match { - case None => getShapesMap - case Some(n) => for { - s <- withNode_s(n,shape) - // _ <- showStatus(s"parseShapeMaps after checking shape of ${n.toString}") - sm <- parseShapesMap - } yield sm - } - } yield () - - -def initialNode: RDFNode = IRI("http://internal/node") - -/*private def parseShapesMap: ShaclParser[ShapesMap] = for { - pendingNodes <- getPendingNodes - r <- pendingNodes.size match { - case 0 => getShapesMap - case _ => withNode - } -} yield r -*/ - - -def shape: ShaclParser[RefNode] = for { - n <- getNode_s - shapeRef = RefNode(n) - parsedShapes <- getShapesMap - v <- if (parsedShapes contains shapeRef) { - ok_s(shapeRef) - } else { - for { - shapeRef <- firstOf_s(nodeShape, propertyShape) - } yield { - // parsedShapes(shapeRef) = newShape - shapeRef - } - } - } yield v - - private def parseEntailments: ShaclParser[List[IRI]] = - for { - rdf <- getRDF_s - ts <- fromStream[RDFTriple](rdf.triplesWithPredicate(`sh:entailment`)) - iris <- fromEitherS(sequence(ts.map(_.obj).toList.map(_.toIRI))) - } yield iris - - private def parseImports: ShaclParser[List[IRI]] = - for { - rdf <- getRDF_s - ts <- fromStream[RDFTriple](rdf.triplesWithPredicate(`owl:imports`)) - os = ts.map(_.obj).toList - iris <- fromEitherS(sequence(os.map(_.toIRI))) -// os <- fromEitherShaclParser(sequence(ts.map(_.obj).toList.map(_.toIRI))) - } yield iris - - /* private def mkId(n: RDFNode): Option[IRI] = n match { - case iri: IRI => Some(iri) - case _ => None - } */ - - private def nodeShape: ShaclParser[RefNode] = for { - n <- getNode_s - rdf <- getRDF_s - types <- fromRDFParser(rdfTypes) - _ <- fromRDFParser(failIf(types.contains(`sh:PropertyShape`), "Node shapes must not have rdf:type sh:PropertyShape")) - targets <- fromRDFParser(targets) - propertyShapes <- propertyShapes - components <- components - closed <- fromRDFParser(booleanFromPredicateOptional(`sh:closed`)) - deactivated <- fromRDFParser(booleanFromPredicateOptional(`sh:deactivated`)) - message <- fromRDFParser(parseMessage) - name <- fromRDFParser(parseMessage) - description <- fromRDFParser(parseMessage) - group <- parsePropertyGroup - order <- fromRDFParser(parseOrder) - severity <- fromRDFParser(parseSeverity) - ignoredNodes <- fromRDFParser(rdfListForPredicateOptional(`sh:ignoredProperties`)) - ignoredIRIs <- fromEitherS(nodes2iris(ignoredNodes)) - classes <- fromRDFParser(objectsFromPredicate(`sh:class`)) - shape = NodeShape( - id = n, - components = components.toList, - targets = targets, - propertyShapes = propertyShapes, - closed = closed.getOrElse(false), - ignoredProperties = ignoredIRIs, - deactivated = deactivated.getOrElse(false), - message = message, - severity = severity, - name = name, - description = description, - group = group, - order = order, - sourceIRI = rdf.sourceIRI - ) - ref = RefNode(n) - _ <- addShapesMap(ref,shape) - // _ <- { println(s"NodeShape parsed for node ${n.toString}: ${shape.toString}"); ok_s(())} - // _ <- showStatus(s"After nodeShape of ${n.toString}") - } yield ref - - private def parsePropertyGroup: ShaclParser[Option[RefNode]] = for { - maybeGroup <- fromRDFParser(objectFromPredicateOptional(`sh:group`)) - group <- maybeGroup match { - case None => ok_s(none) - case Some(groupNode) => { - val ref = RefNode(groupNode) - for { - parsedPropGroups <- getParsedPropertyGroups - v <- parsedPropGroups.get(ref) match { - case Some(pg) => ok_s(Some(ref)) - case None => for { - labels <- fromRDFParser(objectsFromPredicate(`rdfs:label`)) - order <- fromRDFParser(parseOrder) - pg = PropertyGroup(order,labels) - _ <- addParsedPropertyGroups(ref,pg) - } yield Some(ref) - } - } yield v - } - } - } yield group - - private def parseOrder: RDFParser[Option[DecimalLiteral]] = for { - maybeOrder <- decimalLiteralFromPredicateOptional(`sh:order`) - } yield maybeOrder - - private def parseSeverity: RDFParser[Option[Severity]] = for { - maybeIri <- iriFromPredicateOptional(`sh:severity`) - } yield maybeIri match { - case Some(`sh:Violation`) => Some(ViolationSeverity) - case Some(`sh:Warning`) => Some(WarningSeverity) - case Some(`sh:Info`) => Some(InfoSeverity) - case Some(iri) => Some(GenericSeverity(iri)) - case None => None - } - - private def parseMessage: RDFParser[MessageMap] = for { - nodes <- objectsFromPredicate(`sh:message`) - map <- cnvMessages(nodes) - } yield map - - private def cnvMessages(ns: Set[RDFNode]): RDFParser[MessageMap] = - fromEither(MessageMap.fromRDFNodes(ns.toList).leftMap(mkErr(_))) - - private def propertyShape: ShaclParser[RefNode] = for { - rdf <- getRDF_s - n <- getNode_s - types <- fromRDFParser(rdfTypes) - _ <- fromRDFParser(failIf(types.contains(`sh:NodeShape`), "Property shapes must not have rdf:type sh:NodeShape")) - targets <- fromRDFParser(targets) - nodePath <- fromRDFParser(objectFromPredicate(`sh:path`)) - path <- fromRDFParser(withNode(nodePath, parsePath)) - propertyShapes <- propertyShapes - // _ <- { println(s"Components of ${n}"); ok_s(())} - components <- components - closed <- fromRDFParser(booleanFromPredicateOptional(`sh:closed`)) - ignoredNodes <- fromRDFParser(rdfListForPredicateOptional(`sh:ignoredProperties`)) - deactivated <- fromRDFParser(booleanFromPredicateOptional(`sh:deactivated`)) - message <- fromRDFParser(parseMessage) - severity <- fromRDFParser(parseSeverity) - name <- fromRDFParser(parseMessage) - description <- fromRDFParser(parseMessage) - group <- parsePropertyGroup - order <- fromRDFParser(parseOrder) - ignoredIRIs <- fromEitherS(nodes2iris(ignoredNodes)) - ps = PropertyShape( - id = n, - path = path, - components = components.toList, - targets = targets, - propertyShapes = propertyShapes, - closed = closed.getOrElse(false), - ignoredProperties = ignoredIRIs, - deactivated = deactivated.getOrElse(false), - message = message, - severity = severity, - name = name, - description = description, - order = order, - group = group, - sourceIRI = rdf.sourceIRI, - annotations = List() // TODO: Annotations should contain the values for other predicates associated with a given node - ) - ref = RefNode(n) - // _ <- { println(s"Property shape: ${n}: Components: ${components.toString}"); ok_s(())} - _ <- addShapesMap(ref,ps) - } yield ref - - private def targets: RDFParser[Seq[Target]] = - combineAll( - targetNodes, - targetClasses, - implicitTargetClass, - targetSubjectsOf, - targetObjectsOf) - - private def targetNodes: RDFParser[Seq[Target]] = - for { - ns <- objectsFromPredicate(`sh:targetNode`) - vs <- fromEither(sequenceEither(ns.toList.map(mkTargetNode))) - } yield vs - - private def targetClasses: RDFParser[Seq[Target]] = - for { - ns <- objectsFromPredicate(`sh:targetClass`) - vs <- fromEither(sequenceEither(ns.toList.map(mkTargetClass))) - } yield vs - - private def implicitTargetClass: RDFParser[Seq[Target]] = - for { - rdf <- getRDF - n <- getNode - ts <- fromRDFStream(rdf.triplesWithSubjectPredicate(n, `rdf:type`)) - shapeTypes = ts.map(_.obj) - rdfs_Class = rdfs + "Class" - r <- fromEither(if (shapeTypes.contains(rdfs_Class)) - mkTargetClass(n).map(Seq(_)) - else - Right(Seq())) - } yield r - - private def targetSubjectsOf: RDFParser[Seq[Target]] = - for { - ns <- objectsFromPredicate(`sh:targetSubjectsOf`) - vs <- fromEither(sequenceEither(ns.toList.map(mkTargetSubjectsOf))) - } yield vs - - private def targetObjectsOf: RDFParser[Seq[Target]] = - for { - ns <- objectsFromPredicate(`sh:targetObjectsOf`) - vs <- fromEither(sequenceEither(ns.toList.map(mkTargetObjectsOf))) - } yield vs - - private def mkTargetNode(n: RDFNode): Either[Err, TargetNode] = - Right(TargetNode(n)) - - private def mkTargetClass(n: RDFNode): Either[Err, TargetClass] = - Right(TargetClass(n)) - - private def mkTargetSubjectsOf(n: RDFNode): Either[Err, TargetSubjectsOf] = n match { - case i: IRI => Right(TargetSubjectsOf(i)) - case _ => Left(mkErr(s"targetSubjectsOf requires an IRI. Obtained $n")) - } - - private def mkTargetObjectsOf(n: RDFNode): Either[Err, TargetObjectsOf] = n match { - case i: IRI => Right(TargetObjectsOf(i)) - case _ => Left(mkErr(s"targetObjectsOf requires an IRI. Obtained $n")) - } - - private def propertyShapes: ShaclParser[List[RefNode]] = - for { - ps <- fromRDFParser(objectsFromPredicate(`sh:property`)) - vs <- ps.toList.map(p => withNode_s(p, propertyShapeRef)).sequence - } yield vs - - private def propertyShapeRef: ShaclParser[RefNode] = for { - n <- getNode_s - _ <- addPendingNode(n) - } yield { - RefNode(n) - } - - private def parsePath: RDFParser[SHACLPath] = for { - n <- getNode - v <- n match { - case iri: IRI => ok(PredicatePath(iri)) - case bnode: BNode => someOf( - oneOrMorePath, - zeroOrMorePath, - zeroOrOnePath, - alternativePath, - sequencePath, - inversePath - ) - case _ => parseFail(s"Unsupported value $n for path") - } - } yield v - - private def inversePath: RDFParser[SHACLPath] = for { - pathNode <- objectFromPredicate(`sh:inversePath`) - path <- withNode(pathNode,parsePath) - } yield InversePath(path) - - private def oneOrMorePath: RDFParser[SHACLPath] = for { - pathNode <- objectFromPredicate(`sh:oneOrMorePath`) - path <- withNode(pathNode,parsePath) - } yield OneOrMorePath(path) - - private def zeroOrMorePath: RDFParser[SHACLPath] = for { - pathNode <- objectFromPredicate(`sh:zeroOrMorePath`) - path <- withNode(pathNode,parsePath) - } yield ZeroOrMorePath(path) - - private def zeroOrOnePath: RDFParser[SHACLPath] = for { - pathNode <- objectFromPredicate(`sh:zeroOrOnePath`) - path <- withNode(pathNode, parsePath) - } yield ZeroOrOnePath(path) - - private def alternativePath: RDFParser[SHACLPath] = for { - pathNode <- objectFromPredicate(`sh:alternativePath`) - pathNodes <- withNode(pathNode, rdfList) - paths <- group(parsePath, pathNodes) - } yield AlternativePath(paths) - - private def sequencePath: RDFParser[SHACLPath] = for { - pathNodes <- rdfList - paths <- group(parsePath, pathNodes) - } yield { - SequencePath(paths) - } - - private def components: ShaclParser[Seq[Component]] = for { - n <- fromRDFParser(getNode) - // _ <- {println(s"Components for node: ${n}"); ok_s(())} - cs1 <- fromRDFParser(anyOf( - pattern, languageIn, uniqueLang, - equals, disjoint, lessThan, lessThanOrEquals, - hasValue, - in)) - // _ <- {println(s"First round of components: ${cs1}"); ok_s(())} - cs2 <- fromRDFParser(anyOfLs_s( - classComponent, - datatype, - nodeKind, - minCount, maxCount, - minExclusive, maxExclusive, minInclusive, maxInclusive, - minLength, maxLength - )) - // _ <- {println(s"2nd round of components: ${cs2}"); ok_s(())} - cs3 <- anyOf_s(qualifiedValueShape, or, and, not, xone, nodeComponent) - } yield { - val cs = cs1 ++ cs2 ++ cs3.toSeq - // println(s"Components: $cs") - cs - } - - private def anyOfLs_s[A](ps: RDFParser[List[A]]*): RDFParser[Seq[A]] = { - def comb(rest: RDFParser[Seq[A]], p: RDFParser[List[A]]): RDFParser[Seq[A]] = { - p.biflatMap( - _ => rest, - xs => for { - rs <- rest - } yield xs ++ rs - ) - } - val zero: RDFParser[Seq[A]] = ok(Seq()) - val vs = ps.foldLeft(zero)(comb) - vs - } - - - private def classComponent: RDFParser[List[Component]] = for { - cs <- { - parsePredicateList(`sh:class`, ClassComponent) - } - } yield { - cs - } - - private def datatype: RDFParser[List[Component]] = - parsePredicateIRIList(`sh:datatype`, Datatype) - - private def minInclusive : RDFParser[List[Component]] = - parsePredicateLiteralList(`sh:minInclusive`, MinInclusive) - - private def maxInclusive : RDFParser[List[Component]] = parsePredicateLiteralList(`sh:maxInclusive`, MaxInclusive) - - private def minExclusive : RDFParser[List[Component]] = parsePredicateLiteralList(`sh:minExclusive`, MinExclusive) - - private def maxExclusive: RDFParser[List[Component]] = parsePredicateLiteralList(`sh:maxExclusive`, MaxExclusive) - - private def minLength: RDFParser[List[Component]] = parsePredicateIntList(`sh:minLength`, MinLength) - - private def maxLength : RDFParser[List[Component]] = parsePredicateIntList(`sh:maxLength`, MaxLength) - - private def pattern: RDFParser[Component] = for { - pat <- stringFromPredicate(`sh:pattern`) - flags <- stringFromPredicateOptional(`sh:flags`) - } yield Pattern(pat, flags) - - private def languageIn: RDFParser[Component] = { - def cnv(node: RDFNode): RDFParser[String] = node match { - case StringLiteral(str) => ok(str) - case _ => parseFail(s"Expected to be a string literal but got $node") - } - for { - nodes <- rdfListForPredicate(`sh:languageIn`) - ls <- nodes.map(cnv).sequence - } yield LanguageIn(ls) - /* for { - rs <- rdfListForPredicate(`sh:languageIn`) - vs: List[RDFParser[String]] = rs.map(n => n match { - case StringLiteral(str) => ok(str) - case _ => parseFail(s"Expected to be a string literal but got $n") - }) - ls <- vs.sequence - } yield LanguageIn(ls) */ - } - - private def uniqueLang: RDFParser[Component] = for { - b <- booleanFromPredicate(`sh:uniqueLang`) - } yield UniqueLang(b) - - private def equals = parsePredicateComparison(`sh:equals`, Equals) - - private def disjoint = parsePredicateComparison(`sh:disjoint`, Disjoint) - - private def lessThan = parsePredicateComparison(`sh:lessThan`, LessThan) - - private def lessThanOrEquals = parsePredicateComparison(`sh:lessThanOrEquals`, LessThanOrEquals) - - private def parsePredicateComparison(pred: IRI, mkComp: IRI => Component): RDFParser[Component] = for { - p <- iriFromPredicate(pred) - } yield mkComp(p) - - private def or: ShaclParser[Component] = for { - shapeNodes <- fromRDFParser(rdfListForPredicate(`sh:or`)) - shapes <- mapShaclParser(shapeNodes.toList, shapeRefConst) - } yield Or(shapes) - - - private def and: ShaclParser[Component] = for { - nodes <- fromRDFParser(rdfListForPredicate(`sh:and`)) - shapes <- mapShaclParser(nodes, shapeRefConst) - } yield And(shapes) - - private def xone: ShaclParser[Component] = for { - nodes <- fromRDFParser(rdfListForPredicate(`sh:xone`)) - shapes <- mapShaclParser(nodes, shapeRefConst) - } yield Xone(shapes) - - // TODO: Check if this must take into account that not is optional... - private def not: ShaclParser[Component] = for { - shapeNode <- fromRDFParser(objectFromPredicate(`sh:not`)) - sref <- withNode_s(shapeNode, shapeRef) - } yield Not(sref) - - private def nodeComponent: ShaclParser[Component] = - for { - nodeShape <- fromRDFParser(objectFromPredicate(`sh:node`)) - sref <- withNode_s(nodeShape, shapeRef) - } yield NodeComponent(sref) - - private def qualifiedValueShape: ShaclParser[Component] = for { - obj <- fromRDFParser(objectFromPredicate(`sh:qualifiedValueShape`)) - sref <- withNode_s(obj, shapeRef) - min <- fromRDFParser(optional(integerLiteralForPredicate(`sh:qualifiedMinCount`))) - max <- fromRDFParser(optional(integerLiteralForPredicate(`sh:qualifiedMaxCount`))) - disjoint <- fromRDFParser(booleanFromPredicateOptional(`sh:qualifiedValueShapesDisjoint`)) - } yield QualifiedValueShape(sref, min, max, disjoint) - - private def shapeRef: ShaclParser[RefNode] = for { - n <- getNode_s - _ <- addPendingNode(n) - } yield RefNode(n) - - private def shapeRefConst(sref: RDFNode): ShaclParser[RefNode] = - withNode_s(sref, shapeRef) - - def mapShaclParser[A, B](ls: List[A], p: A => ShaclParser[B]): ShaclParser[List[B]] = { - ls.map(v => p(v)).sequence[ShaclParser,B] - } - - - private def minCount : RDFParser[List[Component]] = parsePredicateIntList(`sh:minCount`, MinCount) - private def maxCount : RDFParser[List[Component]] = parsePredicateIntList(`sh:maxCount`, MaxCount) - - private def hasValue: RDFParser[Component] = - for { - o <- objectFromPredicate(`sh:hasValue`) - v <- fromEither(node2Value(o).leftMap(mkErr(_))) - } yield HasValue(v) - - private def in: RDFParser[Component] = - for { - ns <- rdfListForPredicate(`sh:in`) - vs <- fromEither(convert2Values(ns.map(node2Value(_))).leftMap(mkErr(_))) - } yield In(vs) - - private def node2Value(n: RDFNode): Either[String, Value] = { - n match { - case i: IRI => Right(IRIValue(i)) - case l: Literal => Right(LiteralValue(l)) - case _ => Left(s"Element $n must be a IRI or a Literal to be part of sh:in") - } - } - - private def convert2Values[A](cs: List[Either[String, A]]): Either[String, List[A]] = { - if (cs.isEmpty) - Left("The list of values associated with sh:in must not be empty") - else { - sequenceEither(cs) - } - } - - private def nodeKind: RDFParser[List[Component]] = - for { - os <- objectsFromPredicate(`sh:nodeKind`) - nk <- fromEither(parseNodeKind(os)) - } yield List(nk) - - private def parseNodeKind(os: Set[RDFNode]): Either[Err, Component] = { - os.size match { - case 0 => Left(mkErr("no iriObjects of nodeKind property")) - case 1 => { - os.head match { - case nk: IRI => nk match { - case `sh:IRI` => Right(NodeKind(IRIKind)) - case `sh:BlankNode` => Right(NodeKind(BlankNodeKind)) - case `sh:Literal` => Right(NodeKind(LiteralKind)) - case `sh:BlankNodeOrLiteral` => Right(NodeKind(BlankNodeOrLiteral)) - case `sh:BlankNodeOrIRI` => Right(NodeKind(BlankNodeOrIRI)) - case `sh:IRIOrLiteral` => Right(NodeKind(IRIOrLiteral)) - case x => { - logger.error(s"incorrect value of nodeKind property $x") - Left(mkErr(s"incorrect value of nodeKind property $x")) - } - } - case x => { - logger.error(s"incorrect value of nodeKind property $x") - Left(mkErr(s"incorrect value of nodeKind property $x")) - } - } - } - case n => Left(mkErr(s"iriObjects of nodeKind property > 1. $os")) - } - } - -// private def noTarget: Seq[Target] = Seq() -// private def noPropertyShapes: Seq[PropertyShape] = Seq() - -} +package es.weso.shacl.converter + +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.PREFIXES._ +import es.weso.rdf.{RDFBuilder, RDFReader} +import es.weso.rdf.nodes._ +import es.weso.rdf.parser.RDFParser +import es.weso.rdf.path._ +import es.weso.shacl.SHACLPrefixes._ +import es.weso.shacl._ +import es.weso.shacl.report._ +import es.weso.utils.EitherUtils._ +// import scala.util.{Failure, Success, Try} +import cats.effect._ +import cats.data._ +import cats.implicits._ +import es.weso.rdf.parser._ +import es.weso.utils.internal.CollectionCompat._ +import es.weso.rdf.triples.RDFTriple +import fs2.Stream + +object RDF2Shacl extends RDFParser with LazyLogging { + + private type ShapesMap = Map[RefNode, Shape] + + type PropertyGroups = Map[RefNode, PropertyGroup] + +// TODO. Why this class cannot be private? + case class ParserState( + parsedShapes: ShapesMap, + parsedPropertyGroups: PropertyGroups, + pendingNodes: List[RDFNode] + ) + + private def initialState: ParserState = + ParserState(Map(), Map(), List()) + + type ShaclParser[A] = StateT[RDFParser, ParserState, A] + + private def getPendingNodes: ShaclParser[List[RDFNode]] = for { + s <- StateT.get[RDFParser, ParserState] + } yield s.pendingNodes + + private def ok_s[A](x: A): ShaclParser[A] = + StateT.liftF(ok(x)) + + private def firstOf_s[A](ps: ShaclParser[A]*): ShaclParser[A] = { + def comb(rest: ShaclParser[A], p: ShaclParser[A]): ShaclParser[A] = + p orElse rest + + val zero: ShaclParser[A] = fromRDFParser(parseFail("firstOf: none of the parsers succeeded")) + ps.foldLeft(zero)(comb) + } + + private def removePendingNode: ShaclParser[Option[RDFNode]] = for { + ns <- getPendingNodes + r <- ns match { + case Nil => ok_s(none) + case n :: rest => { + for { + _ <- StateT.modify[RDFParser, ParserState](_.copy(pendingNodes = rest)) + } yield n.some + } + } + } yield r + + private def addPendingNode(n: RDFNode): ShaclParser[Unit] = { + StateT.modify(s => s.copy(pendingNodes = n :: s.pendingNodes)) + } + + private def addPendingNodes(ns: List[RDFNode]): ShaclParser[Unit] = { + StateT.modify(s => s.copy(pendingNodes = ns ++ s.pendingNodes)) + } + + private def addShapesMap(n: RefNode, shape: Shape): ShaclParser[Unit] = { + StateT.modify(s => s.copy(parsedShapes = s.parsedShapes.updated(n, shape))) + } + + private def addParsedPropertyGroups(n: RefNode, pg: PropertyGroup): ShaclParser[Unit] = { + StateT.modify(s => s.copy(parsedPropertyGroups = s.parsedPropertyGroups.updated(n, pg))) + } + + private def getShapesMap: ShaclParser[ShapesMap] = for { + s <- StateT.get[RDFParser, ParserState] + } yield s.parsedShapes + + private def getParsedPropertyGroups: ShaclParser[PropertyGroups] = for { + s <- StateT.get[RDFParser, ParserState] + } yield s.parsedPropertyGroups + + private def getRDF_s: ShaclParser[RDFReader] = + StateT.liftF(getRDF) + + private def getNode_s: ShaclParser[RDFNode] = + StateT.liftF(getNode) + + private def io2s[A](e: IO[A]): ShaclParser[A] = + StateT.liftF(liftIO(e)) + + private def fromStream[A](e: Stream[IO, A]): ShaclParser[LazyList[A]] = + StateT.liftF(liftIO(e.compile.to(LazyList))) + + private def fromEitherS[A](e: Either[String, A]): ShaclParser[A] = + StateT.liftF(fromEither(e.leftMap(mkErr(_)))) + + case class ShaclParserError(e: String) extends RuntimeException(e) + + private def mkErr(e: String): ShaclParserError = ShaclParserError(e) + + /*private def fromEitherShaclParser[A](e: Either[Err,A]): ShaclParser[A] = + StateT.liftF(fromEither(e)) */ + + private def fromRDFParser[A](p: RDFParser[A]): ShaclParser[A] = + StateT.liftF(p) + + private def withNode_s[A](node: RDFNode, p: ShaclParser[A]): ShaclParser[A] = + StateT.applyF { + val ff: (ParserState => RDFParser[(ParserState, A)]) => (ParserState => RDFParser[(ParserState, A)]) = + f => (ps => withNode(node, f(ps))) + p.runF.map(ff) + } + + /*private def showStatus(msg: String): ShaclParser[Unit] = for { + ps <- StateT.get[RDFParser,ParserState] +} yield { + println(s""" + |$msg + |Parser state: PendingNodes: ${ps.pendingNodes.map(_.toString).mkString(",")} + |parsedShapesMap: ${ps.parsedShapes.toString} + |parsedPropertyGroups: ${ps.parsedPropertyGroups.keySet.map(_.toString).mkString(",")}""".stripMargin) + () +}*/ + + private def anyOf_s[A](ps: ShaclParser[A]*): ShaclParser[Seq[A]] = { + def comb(rest: ShaclParser[Seq[A]], p: ShaclParser[A]): ShaclParser[Seq[A]] = for { + maybe <- p.map(_.some) orElse ok_s(none[A]) + rs <- rest + } yield maybe match { + case None => rs + case Some(x) => x +: rs + } + val zero: ShaclParser[Seq[A]] = ok_s(Seq()) + ps.foldLeft(zero)(comb) + } + + /* private def tryGetShacl(rdf: RDFBuilder, + resolveImports: Boolean): Try[Schema] = + getShacl(rdf, resolveImports).fold( + str => Failure(new Exception(str)), + Success(_)) + */ + + private def getShaclFromRDFReader(rdf: RDFReader): ShaclParser[Schema] = { + for { + sm <- shapesMap + imports <- parseImports + entailments <- parseEntailments + parsedPropertyGroups <- getParsedPropertyGroups + parsedShapeMap <- getShapesMap + pm <- io2s(rdf.getPrefixMap) + } yield Schema( + pm = pm, + imports = imports, + entailments = entailments, + shapesMap = parsedShapeMap, + propertyGroups = parsedPropertyGroups.toMap + ) + } + + /** Parses RDF content and obtains a SHACL Schema and a PrefixMap + */ + def getShacl(rdf: RDFBuilder, resolveImports: Boolean = true): IO[Schema] = for { + rdfReader <- + if (resolveImports) rdf.extendImports + else IO.pure(rdf) + schema <- getShaclReader(rdfReader) + } yield schema + + def runShaclParser[A](parser: ShaclParser[A], rdf: RDFReader): IO[A] = + for { + eitherValue <- parser.run(initialState).value.run(Config(initialNode, rdf)) + value <- eitherValue.fold( + err => IO.raiseError[A](err), + pair => { + val (_, v) = pair + v.pure[IO] + } + ) + + } yield value + + def getShaclReader(rdf: RDFReader): IO[Schema] = + runShaclParser(getShaclFromRDFReader(rdf), rdf) + + private def shapesMap: ShaclParser[Unit] = + for { + rdf <- getRDF_s + nodeShapes <- fromStream[RDFNode](rdf.subjectsWithType(`sh:NodeShape`)) + propertyShapes <- fromStream[RDFNode](rdf.subjectsWithType(`sh:PropertyShape`)) + shapes <- fromStream[RDFNode](rdf.subjectsWithType(`sh:Shape`)) + objectsPropertyShapes <- fromStream[RDFNode](rdf.subjectsWithProperty(`sh:property`)) + allShapes = nodeShapes ++ propertyShapes ++ shapes ++ objectsPropertyShapes + _ <- addPendingNodes(allShapes.toList) + // _ <- showStatus("shapesMap after addPending") + sm <- parseShapesMap + } yield () + + private def parseShapesMap: ShaclParser[Unit] = for { + maybeNode <- removePendingNode + // _ <- { println(s"Pending node to parse: $maybeNode"); ok_s(()) } + r <- maybeNode match { + case None => getShapesMap + case Some(n) => + for { + s <- withNode_s(n, shape) + // _ <- showStatus(s"parseShapeMaps after checking shape of ${n.toString}") + sm <- parseShapesMap + } yield sm + } + } yield () + + def initialNode: RDFNode = IRI("http://internal/node") + + /*private def parseShapesMap: ShaclParser[ShapesMap] = for { + pendingNodes <- getPendingNodes + r <- pendingNodes.size match { + case 0 => getShapesMap + case _ => withNode + } +} yield r + */ + + def shape: ShaclParser[RefNode] = for { + n <- getNode_s + shapeRef = RefNode(n) + parsedShapes <- getShapesMap + v <- + if (parsedShapes contains shapeRef) { + ok_s(shapeRef) + } else { + for { + shapeRef <- firstOf_s(nodeShape, propertyShape) + } yield { + // parsedShapes(shapeRef) = newShape + shapeRef + } + } + } yield v + + private def parseEntailments: ShaclParser[List[IRI]] = + for { + rdf <- getRDF_s + ts <- fromStream[RDFTriple](rdf.triplesWithPredicate(`sh:entailment`)) + iris <- fromEitherS(sequence(ts.map(_.obj).toList.map(_.toIRI))) + } yield iris + + private def parseImports: ShaclParser[List[IRI]] = + for { + rdf <- getRDF_s + ts <- fromStream[RDFTriple](rdf.triplesWithPredicate(`owl:imports`)) + os = ts.map(_.obj).toList + iris <- fromEitherS(sequence(os.map(_.toIRI))) +// os <- fromEitherShaclParser(sequence(ts.map(_.obj).toList.map(_.toIRI))) + } yield iris + + /* private def mkId(n: RDFNode): Option[IRI] = n match { + case iri: IRI => Some(iri) + case _ => None + } */ + + private def nodeShape: ShaclParser[RefNode] = for { + n <- getNode_s + rdf <- getRDF_s + types <- fromRDFParser(rdfTypes) + _ <- fromRDFParser( + failIf(types.contains(`sh:PropertyShape`), "Node shapes must not have rdf:type sh:PropertyShape") + ) + targets <- fromRDFParser(targets) + propertyShapes <- propertyShapes + components <- components + closed <- fromRDFParser(booleanFromPredicateOptional(`sh:closed`)) + deactivated <- fromRDFParser(booleanFromPredicateOptional(`sh:deactivated`)) + message <- fromRDFParser(parseMessage) + name <- fromRDFParser(parseMessage) + description <- fromRDFParser(parseMessage) + group <- parsePropertyGroup + order <- fromRDFParser(parseOrder) + severity <- fromRDFParser(parseSeverity) + ignoredNodes <- fromRDFParser(rdfListForPredicateOptional(`sh:ignoredProperties`)) + ignoredIRIs <- fromEitherS(nodes2iris(ignoredNodes)) + classes <- fromRDFParser(objectsFromPredicate(`sh:class`)) + shape = NodeShape( + id = n, + components = components.toList, + targets = targets, + propertyShapes = propertyShapes, + closed = closed.getOrElse(false), + ignoredProperties = ignoredIRIs, + deactivated = deactivated.getOrElse(false), + message = message, + severity = severity, + name = name, + description = description, + group = group, + order = order, + sourceIRI = rdf.sourceIRI + ) + ref = RefNode(n) + _ <- addShapesMap(ref, shape) + // _ <- { println(s"NodeShape parsed for node ${n.toString}: ${shape.toString}"); ok_s(())} + // _ <- showStatus(s"After nodeShape of ${n.toString}") + } yield ref + + private def parsePropertyGroup: ShaclParser[Option[RefNode]] = for { + maybeGroup <- fromRDFParser(objectFromPredicateOptional(`sh:group`)) + group <- maybeGroup match { + case None => ok_s(none) + case Some(groupNode) => { + val ref = RefNode(groupNode) + for { + parsedPropGroups <- getParsedPropertyGroups + v <- parsedPropGroups.get(ref) match { + case Some(pg) => ok_s(Some(ref)) + case None => + for { + labels <- fromRDFParser(objectsFromPredicate(`rdfs:label`)) + order <- fromRDFParser(parseOrder) + pg = PropertyGroup(order, labels) + _ <- addParsedPropertyGroups(ref, pg) + } yield Some(ref) + } + } yield v + } + } + } yield group + + private def parseOrder: RDFParser[Option[DecimalLiteral]] = for { + maybeOrder <- decimalLiteralFromPredicateOptional(`sh:order`) + } yield maybeOrder + + private def parseSeverity: RDFParser[Option[Severity]] = for { + maybeIri <- iriFromPredicateOptional(`sh:severity`) + } yield maybeIri match { + case Some(`sh:Violation`) => Some(ViolationSeverity) + case Some(`sh:Warning`) => Some(WarningSeverity) + case Some(`sh:Info`) => Some(InfoSeverity) + case Some(iri) => Some(GenericSeverity(iri)) + case None => None + } + + private def parseMessage: RDFParser[MessageMap] = for { + nodes <- objectsFromPredicate(`sh:message`) + map <- cnvMessages(nodes) + } yield map + + private def cnvMessages(ns: Set[RDFNode]): RDFParser[MessageMap] = + fromEither(MessageMap.fromRDFNodes(ns.toList).leftMap(mkErr(_))) + + private def propertyShape: ShaclParser[RefNode] = for { + rdf <- getRDF_s + n <- getNode_s + types <- fromRDFParser(rdfTypes) + _ <- fromRDFParser(failIf(types.contains(`sh:NodeShape`), "Property shapes must not have rdf:type sh:NodeShape")) + targets <- fromRDFParser(targets) + nodePath <- fromRDFParser(objectFromPredicate(`sh:path`)) + path <- fromRDFParser(withNode(nodePath, parsePath)) + propertyShapes <- propertyShapes + // _ <- { println(s"Components of ${n}"); ok_s(())} + components <- components + closed <- fromRDFParser(booleanFromPredicateOptional(`sh:closed`)) + ignoredNodes <- fromRDFParser(rdfListForPredicateOptional(`sh:ignoredProperties`)) + deactivated <- fromRDFParser(booleanFromPredicateOptional(`sh:deactivated`)) + message <- fromRDFParser(parseMessage) + severity <- fromRDFParser(parseSeverity) + name <- fromRDFParser(parseMessage) + description <- fromRDFParser(parseMessage) + group <- parsePropertyGroup + order <- fromRDFParser(parseOrder) + ignoredIRIs <- fromEitherS(nodes2iris(ignoredNodes)) + ps = PropertyShape( + id = n, + path = path, + components = components.toList, + targets = targets, + propertyShapes = propertyShapes, + closed = closed.getOrElse(false), + ignoredProperties = ignoredIRIs, + deactivated = deactivated.getOrElse(false), + message = message, + severity = severity, + name = name, + description = description, + order = order, + group = group, + sourceIRI = rdf.sourceIRI, + annotations = + List() // TODO: Annotations should contain the values for other predicates associated with a given node + ) + ref = RefNode(n) + // _ <- { println(s"Property shape: ${n}: Components: ${components.toString}"); ok_s(())} + _ <- addShapesMap(ref, ps) + } yield ref + + private def targets: RDFParser[Seq[Target]] = + combineAll(targetNodes, targetClasses, implicitTargetClass, targetSubjectsOf, targetObjectsOf) + + private def targetNodes: RDFParser[Seq[Target]] = + for { + ns <- objectsFromPredicate(`sh:targetNode`) + vs <- fromEither(sequenceEither(ns.toList.map(mkTargetNode))) + } yield vs + + private def targetClasses: RDFParser[Seq[Target]] = + for { + ns <- objectsFromPredicate(`sh:targetClass`) + vs <- fromEither(sequenceEither(ns.toList.map(mkTargetClass))) + } yield vs + + private def implicitTargetClass: RDFParser[Seq[Target]] = + for { + rdf <- getRDF + n <- getNode + ts <- fromRDFStream(rdf.triplesWithSubjectPredicate(n, `rdf:type`)) + shapeTypes = ts.map(_.obj) + rdfs_Class = rdfs + "Class" + r <- fromEither( + if (shapeTypes.contains(rdfs_Class)) + mkTargetClass(n).map(Seq(_)) + else + Right(Seq()) + ) + } yield r + + private def targetSubjectsOf: RDFParser[Seq[Target]] = + for { + ns <- objectsFromPredicate(`sh:targetSubjectsOf`) + vs <- fromEither(sequenceEither(ns.toList.map(mkTargetSubjectsOf))) + } yield vs + + private def targetObjectsOf: RDFParser[Seq[Target]] = + for { + ns <- objectsFromPredicate(`sh:targetObjectsOf`) + vs <- fromEither(sequenceEither(ns.toList.map(mkTargetObjectsOf))) + } yield vs + + private def mkTargetNode(n: RDFNode): Either[Err, TargetNode] = + Right(TargetNode(n)) + + private def mkTargetClass(n: RDFNode): Either[Err, TargetClass] = + Right(TargetClass(n)) + + private def mkTargetSubjectsOf(n: RDFNode): Either[Err, TargetSubjectsOf] = n match { + case i: IRI => Right(TargetSubjectsOf(i)) + case _ => Left(mkErr(s"targetSubjectsOf requires an IRI. Obtained $n")) + } + + private def mkTargetObjectsOf(n: RDFNode): Either[Err, TargetObjectsOf] = n match { + case i: IRI => Right(TargetObjectsOf(i)) + case _ => Left(mkErr(s"targetObjectsOf requires an IRI. Obtained $n")) + } + + private def propertyShapes: ShaclParser[List[RefNode]] = + for { + ps <- fromRDFParser(objectsFromPredicate(`sh:property`)) + vs <- ps.toList.map(p => withNode_s(p, propertyShapeRef)).sequence + } yield vs + + private def propertyShapeRef: ShaclParser[RefNode] = for { + n <- getNode_s + _ <- addPendingNode(n) + } yield { + RefNode(n) + } + + private def parsePath: RDFParser[SHACLPath] = for { + n <- getNode + v <- n match { + case iri: IRI => ok(PredicatePath(iri)) + case bnode: BNode => + someOf( + oneOrMorePath, + zeroOrMorePath, + zeroOrOnePath, + alternativePath, + sequencePath, + inversePath + ) + case _ => parseFail(s"Unsupported value $n for path") + } + } yield v + + private def inversePath: RDFParser[SHACLPath] = for { + pathNode <- objectFromPredicate(`sh:inversePath`) + path <- withNode(pathNode, parsePath) + } yield InversePath(path) + + private def oneOrMorePath: RDFParser[SHACLPath] = for { + pathNode <- objectFromPredicate(`sh:oneOrMorePath`) + path <- withNode(pathNode, parsePath) + } yield OneOrMorePath(path) + + private def zeroOrMorePath: RDFParser[SHACLPath] = for { + pathNode <- objectFromPredicate(`sh:zeroOrMorePath`) + path <- withNode(pathNode, parsePath) + } yield ZeroOrMorePath(path) + + private def zeroOrOnePath: RDFParser[SHACLPath] = for { + pathNode <- objectFromPredicate(`sh:zeroOrOnePath`) + path <- withNode(pathNode, parsePath) + } yield ZeroOrOnePath(path) + + private def alternativePath: RDFParser[SHACLPath] = for { + pathNode <- objectFromPredicate(`sh:alternativePath`) + pathNodes <- withNode(pathNode, rdfList) + paths <- group(parsePath, pathNodes) + } yield AlternativePath(paths) + + private def sequencePath: RDFParser[SHACLPath] = for { + pathNodes <- rdfList + paths <- group(parsePath, pathNodes) + } yield { + SequencePath(paths) + } + + private def components: ShaclParser[Seq[Component]] = for { + n <- fromRDFParser(getNode) + // _ <- {println(s"Components for node: ${n}"); ok_s(())} + cs1 <- fromRDFParser( + anyOf(pattern, languageIn, uniqueLang, equals, disjoint, lessThan, lessThanOrEquals, hasValue, in) + ) + // _ <- {println(s"First round of components: ${cs1}"); ok_s(())} + cs2 <- fromRDFParser( + anyOfLs_s( + classComponent, + datatype, + nodeKind, + minCount, + maxCount, + minExclusive, + maxExclusive, + minInclusive, + maxInclusive, + minLength, + maxLength + ) + ) + // _ <- {println(s"2nd round of components: ${cs2}"); ok_s(())} + cs3 <- anyOf_s(qualifiedValueShape, or, and, not, xone, nodeComponent) + } yield { + val cs = cs1 ++ cs2 ++ cs3.toSeq + // println(s"Components: $cs") + cs + } + + private def anyOfLs_s[A](ps: RDFParser[List[A]]*): RDFParser[Seq[A]] = { + def comb(rest: RDFParser[Seq[A]], p: RDFParser[List[A]]): RDFParser[Seq[A]] = { + p.biflatMap( + _ => rest, + xs => + for { + rs <- rest + } yield xs ++ rs + ) + } + val zero: RDFParser[Seq[A]] = ok(Seq()) + val vs = ps.foldLeft(zero)(comb) + vs + } + + private def classComponent: RDFParser[List[Component]] = for { + cs <- { + parsePredicateList(`sh:class`, ClassComponent) + } + } yield { + cs + } + + private def datatype: RDFParser[List[Component]] = + parsePredicateIRIList(`sh:datatype`, Datatype) + + private def minInclusive: RDFParser[List[Component]] = + parsePredicateLiteralList(`sh:minInclusive`, MinInclusive) + + private def maxInclusive: RDFParser[List[Component]] = parsePredicateLiteralList(`sh:maxInclusive`, MaxInclusive) + + private def minExclusive: RDFParser[List[Component]] = parsePredicateLiteralList(`sh:minExclusive`, MinExclusive) + + private def maxExclusive: RDFParser[List[Component]] = parsePredicateLiteralList(`sh:maxExclusive`, MaxExclusive) + + private def minLength: RDFParser[List[Component]] = parsePredicateIntList(`sh:minLength`, MinLength) + + private def maxLength: RDFParser[List[Component]] = parsePredicateIntList(`sh:maxLength`, MaxLength) + + private def pattern: RDFParser[Component] = for { + pat <- stringFromPredicate(`sh:pattern`) + flags <- stringFromPredicateOptional(`sh:flags`) + } yield Pattern(pat, flags) + + private def languageIn: RDFParser[Component] = { + def cnv(node: RDFNode): RDFParser[String] = node match { + case StringLiteral(str) => ok(str) + case _ => parseFail(s"Expected to be a string literal but got $node") + } + for { + nodes <- rdfListForPredicate(`sh:languageIn`) + ls <- nodes.map(cnv).sequence + } yield LanguageIn(ls) + /* for { + rs <- rdfListForPredicate(`sh:languageIn`) + vs: List[RDFParser[String]] = rs.map(n => n match { + case StringLiteral(str) => ok(str) + case _ => parseFail(s"Expected to be a string literal but got $n") + }) + ls <- vs.sequence + } yield LanguageIn(ls) */ + } + + private def uniqueLang: RDFParser[Component] = for { + b <- booleanFromPredicate(`sh:uniqueLang`) + } yield UniqueLang(b) + + private def equals = parsePredicateComparison(`sh:equals`, Equals) + + private def disjoint = parsePredicateComparison(`sh:disjoint`, Disjoint) + + private def lessThan = parsePredicateComparison(`sh:lessThan`, LessThan) + + private def lessThanOrEquals = parsePredicateComparison(`sh:lessThanOrEquals`, LessThanOrEquals) + + private def parsePredicateComparison(pred: IRI, mkComp: IRI => Component): RDFParser[Component] = for { + p <- iriFromPredicate(pred) + } yield mkComp(p) + + private def or: ShaclParser[Component] = for { + shapeNodes <- fromRDFParser(rdfListForPredicate(`sh:or`)) + shapes <- mapShaclParser(shapeNodes.toList, shapeRefConst) + } yield Or(shapes) + + private def and: ShaclParser[Component] = for { + nodes <- fromRDFParser(rdfListForPredicate(`sh:and`)) + shapes <- mapShaclParser(nodes, shapeRefConst) + } yield And(shapes) + + private def xone: ShaclParser[Component] = for { + nodes <- fromRDFParser(rdfListForPredicate(`sh:xone`)) + shapes <- mapShaclParser(nodes, shapeRefConst) + } yield Xone(shapes) + + // TODO: Check if this must take into account that not is optional... + private def not: ShaclParser[Component] = for { + shapeNode <- fromRDFParser(objectFromPredicate(`sh:not`)) + sref <- withNode_s(shapeNode, shapeRef) + } yield Not(sref) + + private def nodeComponent: ShaclParser[Component] = + for { + nodeShape <- fromRDFParser(objectFromPredicate(`sh:node`)) + sref <- withNode_s(nodeShape, shapeRef) + } yield NodeComponent(sref) + + private def qualifiedValueShape: ShaclParser[Component] = for { + obj <- fromRDFParser(objectFromPredicate(`sh:qualifiedValueShape`)) + sref <- withNode_s(obj, shapeRef) + min <- fromRDFParser(optional(integerLiteralForPredicate(`sh:qualifiedMinCount`))) + max <- fromRDFParser(optional(integerLiteralForPredicate(`sh:qualifiedMaxCount`))) + disjoint <- fromRDFParser(booleanFromPredicateOptional(`sh:qualifiedValueShapesDisjoint`)) + } yield QualifiedValueShape(sref, min, max, disjoint) + + private def shapeRef: ShaclParser[RefNode] = for { + n <- getNode_s + _ <- addPendingNode(n) + } yield RefNode(n) + + private def shapeRefConst(sref: RDFNode): ShaclParser[RefNode] = + withNode_s(sref, shapeRef) + + def mapShaclParser[A, B](ls: List[A], p: A => ShaclParser[B]): ShaclParser[List[B]] = { + ls.map(v => p(v)).sequence[ShaclParser, B] + } + + private def minCount: RDFParser[List[Component]] = parsePredicateIntList(`sh:minCount`, MinCount) + private def maxCount: RDFParser[List[Component]] = parsePredicateIntList(`sh:maxCount`, MaxCount) + + private def hasValue: RDFParser[Component] = + for { + o <- objectFromPredicate(`sh:hasValue`) + v <- fromEither(node2Value(o).leftMap(mkErr(_))) + } yield HasValue(v) + + private def in: RDFParser[Component] = + for { + ns <- rdfListForPredicate(`sh:in`) + vs <- fromEither(convert2Values(ns.map(node2Value(_))).leftMap(mkErr(_))) + } yield In(vs) + + private def node2Value(n: RDFNode): Either[String, Value] = { + n match { + case i: IRI => Right(IRIValue(i)) + case l: Literal => Right(LiteralValue(l)) + case _ => Left(s"Element $n must be a IRI or a Literal to be part of sh:in") + } + } + + private def convert2Values[A](cs: List[Either[String, A]]): Either[String, List[A]] = { + if (cs.isEmpty) + Left("The list of values associated with sh:in must not be empty") + else { + sequenceEither(cs) + } + } + + private def nodeKind: RDFParser[List[Component]] = + for { + os <- objectsFromPredicate(`sh:nodeKind`) + nk <- fromEither(parseNodeKind(os)) + } yield List(nk) + + private def parseNodeKind(os: Set[RDFNode]): Either[Err, Component] = { + os.size match { + case 0 => Left(mkErr("no iriObjects of nodeKind property")) + case 1 => { + os.head match { + case nk: IRI => + nk match { + case `sh:IRI` => Right(NodeKind(IRIKind)) + case `sh:BlankNode` => Right(NodeKind(BlankNodeKind)) + case `sh:Literal` => Right(NodeKind(LiteralKind)) + case `sh:BlankNodeOrLiteral` => Right(NodeKind(BlankNodeOrLiteral)) + case `sh:BlankNodeOrIRI` => Right(NodeKind(BlankNodeOrIRI)) + case `sh:IRIOrLiteral` => Right(NodeKind(IRIOrLiteral)) + case x => { + logger.error(s"incorrect value of nodeKind property $x") + Left(mkErr(s"incorrect value of nodeKind property $x")) + } + } + case x => { + logger.error(s"incorrect value of nodeKind property $x") + Left(mkErr(s"incorrect value of nodeKind property $x")) + } + } + } + case n => Left(mkErr(s"iriObjects of nodeKind property > 1. $os")) + } + } + +// private def noTarget: Seq[Target] = Seq() +// private def noPropertyShapes: Seq[PropertyShape] = Seq() + +} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/converter/Shacl2RDF.scala b/modules/shacl/src/main/scala/es/weso/shacl/converter/Shacl2RDF.scala index f50d4ed..5da86f3 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/converter/Shacl2RDF.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/converter/Shacl2RDF.scala @@ -1,243 +1,249 @@ -package es.weso.shacl.converter - -//import scala.util._ -//import cats.data._ -import cats.implicits._ -import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf.nodes._ -import es.weso.shacl.SHACLPrefixes._ -import es.weso.rdf.PREFIXES._ -import es.weso.rdf._ -import es.weso.rdf.saver.RDFSaver -import es.weso.shacl._ -import es.weso.shacl.report.Severity -import cats.effect.IO - -case class Shacl2RDF() extends RDFSaver with LazyLogging { - - def serialize(shacl: Schema, - format: String, - base: Option[IRI], - builder: RDFBuilder): IO[String] = for { - rdf <- toRDF(shacl, builder) - str <- rdf.serialize(format, base) - } yield str - - def toRDF(shacl: Schema, initial: RDFBuilder): IO[RDFBuilder] = { - val result = schema(shacl).run(initial) - result.map(_._1) - } - - private def schema(shacl: Schema): RDFSaver[Unit] = { - for { - _ <- addPrefix("sh", sh) - _ <- addPrefix("xsd", xsd) - _ <- addPrefix("rdf", rdf) - _ <- addPrefix("rdfs", rdfs) - _ <- addPrefixMap(shacl.pm) - _ <- sequence(shacl.shapes.toList.map(shape(_))) - _ <- sequence(shacl.propertyGroups.toList.map(propertyGroup)) - } yield () - } - - private def shape(shape: Shape): RDFSaver[RDFNode] = shape match { - case ns: NodeShape => nodeShape(ns) - case ps: PropertyShape => propertyShape(ps) - } - - private def propertyGroup(pair: (RefNode, PropertyGroup)): RDFSaver[RDFNode] = { - val (ref,pg) = pair - val node = ref.id - for { - _ <- order(node,pg.order) - _ <- labels(node, pg.label) - } yield node - } - - private def labels(node: RDFNode, labels: Set[RDFNode]): RDFSaver[Unit] = - sequence( - labels.toList.map(lbl => addTriple(node,`rdfs:label`,lbl)) - ).map(_ => ()) - - private def shapeRef(shape: RefNode): RDFSaver[RDFNode] = ok(shape.id) - - private def makeShapeId(v: RDFNode): RDFSaver[RDFNode] = ok(v) - - private def targets(id: RDFNode, ts: Seq[Target]): RDFSaver[Unit] = - saveList(ts.toList, target(id)) - - private def target(id: RDFNode)(t: Target): RDFSaver[Unit] = t match { - case TargetNode(node) => addTriple(id, `sh:targetNode`, node) - case TargetClass(node) => addTriple(id, `sh:targetClass`, node) - case TargetSubjectsOf(node) => addTriple(id, `sh:targetSubjectsOf`, node) - case TargetObjectsOf(node) => addTriple(id, `sh:targetObjectsOf`, node) - } - - private def propertyShapes(id: RDFNode, ts: Seq[RefNode]): RDFSaver[Unit] = - saveList(ts.toList, makePropertyShape(id)) - - private def makePropertyShape(id: RDFNode)(p: RefNode): RDFSaver[Unit] = - for { - node <- ok(p.id) // propertyShape(p) - _ <- addTriple(id, `sh:property`, node) - } yield () - - private def closed(id: RDFNode, b: Boolean): RDFSaver[Unit] = - if (b) - addTriple(id, `sh:closed`, BooleanLiteral(b)) - else ok(()) - - private def deactivated(id: RDFNode, b: Boolean): RDFSaver[Unit] = - if (b) - addTriple(id, `sh:deactivated`, BooleanLiteral(b)) - else ok(()) - - private def ignoredProperties(id: RDFNode, ignored: List[IRI]): RDFSaver[Unit] = - if (!ignored.isEmpty) { - for { - nodeList <- saveToRDFList(ignored, (iri: IRI) => ok(iri)) - _ <- addTriple(id, `sh:ignoredProperties`, nodeList) - } yield () - } else - ok(()) - - private def propertyShape(ps: PropertyShape): RDFSaver[RDFNode] = { - for { - shapeNode <- makeShapeId(ps.id) - _ <- addTriple(shapeNode, `rdf:type`, `sh:PropertyShape`) - _ <- targets(shapeNode, ps.targets) - _ <- propertyShapes(shapeNode, ps.propertyShapes) - _ <- closed(shapeNode, ps.closed) - _ <- deactivated(shapeNode, ps.deactivated) - _ <- ignoredProperties(shapeNode, ps.ignoredProperties) - _ <- messageMap(shapeNode, ps.message, `sh:message`) - _ <- messageMap(shapeNode, ps.message, `sh:name`) - _ <- messageMap(shapeNode, ps.message, `sh:description`) - _ <- order(shapeNode,ps.order) - _ <- group(shapeNode,ps.group) - _ <- severity(shapeNode, ps.severity) - pathNode <- makePath(ps.path) - _ <- addTriple(shapeNode, `sh:path`, pathNode) - _ <- saveList(ps.components, component(shapeNode)) - _ <- saveList(ps.annotations,annotation(shapeNode)) - } yield (shapeNode) - } - - private def nodeShape(n: NodeShape): RDFSaver[RDFNode] = for { - shapeNode <- makeShapeId(n.id) - _ <- addTriple(shapeNode, `rdf:type`, `sh:NodeShape`) - _ <- targets(shapeNode, n.targets) - _ <- propertyShapes(shapeNode, n.propertyShapes) - _ <- closed(shapeNode, n.closed) - _ <- deactivated(shapeNode, n.deactivated) - _ <- ignoredProperties(shapeNode, n.ignoredProperties) - _ <- saveList(n.components, component(shapeNode)) - _ <- messageMap(shapeNode, n.message, `sh:message`) - _ <- messageMap(shapeNode, n.name, `sh:name`) - _ <- messageMap(shapeNode, n.name, `sh:description`) - _ <- severity(shapeNode, n.severity) - _ <- order(shapeNode,n.order) - _ <- group(shapeNode,n.group) - } yield shapeNode - - private def order(n: RDFNode, maybeValue: Option[DecimalLiteral]): RDFSaver[Unit] = - maybeValue match { - case None => ok(()) - case Some(value) => addTriple(n, `sh:order`,value) - } - - private def group(n: RDFNode, maybeValue: Option[RefNode]): RDFSaver[Unit] = - maybeValue match { - case None => ok(()) - case Some(pg) => { - addTriple(n, `sh:group`, pg.id) - } - } - - private def messageMap(n: RDFNode, message: MessageMap, pred: IRI): RDFSaver[Unit] = - sequence(message.getRDFNodes.map(addTriple(n,pred,_)) - ).map(_ => ()) - - private def severity(n: RDFNode, severity: Option[Severity]): RDFSaver[Unit] = - severity match { - case None => ok(()) - case Some(s) => addTriple(n, `sh:severity`,s.toIRI) - } - - private def annotation(id:RDFNode)(annotation:(IRI,RDFNode)): RDFSaver[Unit] = { - val (pred,obj) = annotation - addTriple(id,pred,obj) - } - - private def component(id: RDFNode)(c: Component): RDFSaver[Unit] = c match { - case ClassComponent(v) => addTriple(id, `sh:class`, v) - case Datatype(iri) => addTriple(id, `sh:datatype`, iri) - case NodeKind(value) => addTriple(id, `sh:nodeKind`, value.id) - case MinCount(n) => addTriple(id, `sh:minCount`, IntegerLiteral(n)) - case MaxCount(n) => addTriple(id, `sh:maxCount`, IntegerLiteral(n)) - case MinExclusive(v) => addTriple(id, `sh:minExclusive`, v) - case MinInclusive(v) => addTriple(id, `sh:minInclusive`, v) - case MaxExclusive(v) => addTriple(id, `sh:maxExclusive`, v) - case MaxInclusive(v) => addTriple(id, `sh:maxInclusive`, v) - case MinLength(n) => addTriple(id, `sh:minLength`, IntegerLiteral(n)) - case MaxLength(n) => addTriple(id, `sh:maxLength`, IntegerLiteral(n)) - case Pattern(p, flags) => addTriple(id, `sh:pattern`, StringLiteral(p)) >> - (flags match { - case Some(f) => addTriple(id, `sh:flags`, StringLiteral(f)) - case None => ok(()) - }) - case UniqueLang(b) => addTriple(id, `sh:uniqueLang`, BooleanLiteral(b)) - case LanguageIn(langs) => for { - ls <- saveToRDFList(langs, (lang: String) => ok(StringLiteral(lang))) - _ <- addTriple(id, `sh:languageIn`, ls) - } yield () - case Equals(p) => addTriple(id, `sh:equals`, p) - case Disjoint(p) => addTriple(id, `sh:disjoint`, p) - case LessThan(p) => addTriple(id, `sh:lessThan`, p) - case LessThanOrEquals(p) => addTriple(id, `sh:lessThanOrEquals`, p) - case And(shapes) => for { - ls <- saveToRDFList(shapes, shapeRef) - _ <- addTriple(id, `sh:and`, ls) - } yield () - case Or(shapes) => for { - ls <- saveToRDFList(shapes, shapeRef) - _ <- addTriple(id, `sh:or`, ls) - } yield () - case Xone(shapes) => for { - ls <- saveToRDFList(shapes, shapeRef) - _ <- addTriple(id, `sh:xone`, ls) - } yield () - case QualifiedValueShape(s, min, max, disjoint) => for { - nodeShape <- shapeRef(s) - _ <- addTriple(id, `sh:qualifiedValueShape`, nodeShape) - _ <- maybeAddTriple(id, `sh:qualifiedMinCount`, min.map(IntegerLiteral(_))) - _ <- maybeAddTriple(id, `sh:qualifiedMaxCount`, max.map(IntegerLiteral(_))) - _ <- maybeAddTriple(id, `sh:qualifiedValueShapesDisjoint`, disjoint.map(BooleanLiteral(_))) - } yield () - case Not(s) => for { - nodeS <- shapeRef(s) - _ <- addTriple(id, `sh:not`, nodeS) - } yield () - case Closed(b, ignoredPs) => for { - _ <- addTriple(id, `sh:closed`, BooleanLiteral(b)) - nodeList <- saveToRDFList(ignoredPs, (iri: IRI) => ok(iri)) - _ <- addTriple(id, `sh:ignoredProperties`, nodeList) - } yield () - case NodeComponent(s) => for { - nodeS <- shapeRef(s) - _ <- addTriple(id, `sh:node`, nodeS) - } yield () - case HasValue(v) => addTriple(id, `sh:hasValue`, v.rdfNode) - case In(vs) => for { - nodeLs <- saveToRDFList(vs, (v: Value) => ok(v.rdfNode)) - _ <- addTriple(id, `sh:in`, nodeLs) - } yield () - } - -} - -object Shacl2RDF { - def shacl2RDF(shacl: Schema, builder: RDFBuilder): IO[RDFBuilder] = { - new Shacl2RDF().toRDF(shacl, builder) - } -} \ No newline at end of file +package es.weso.shacl.converter + +//import scala.util._ +//import cats.data._ +import cats.implicits._ +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.nodes._ +import es.weso.shacl.SHACLPrefixes._ +import es.weso.rdf.PREFIXES._ +import es.weso.rdf._ +import es.weso.rdf.saver.RDFSaver +import es.weso.shacl._ +import es.weso.shacl.report.Severity +import cats.effect.IO + +case class Shacl2RDF() extends RDFSaver with LazyLogging { + + def serialize(shacl: Schema, format: String, base: Option[IRI], builder: RDFBuilder): IO[String] = for { + rdf <- toRDF(shacl, builder) + str <- rdf.serialize(format, base) + } yield str + + def toRDF(shacl: Schema, initial: RDFBuilder): IO[RDFBuilder] = { + val result = schema(shacl).run(initial) + result.map(_._1) + } + + private def schema(shacl: Schema): RDFSaver[Unit] = { + for { + _ <- addPrefix("sh", sh) + _ <- addPrefix("xsd", xsd) + _ <- addPrefix("rdf", rdf) + _ <- addPrefix("rdfs", rdfs) + _ <- addPrefixMap(shacl.pm) + _ <- sequence(shacl.shapes.toList.map(shape(_))) + _ <- sequence(shacl.propertyGroups.toList.map(propertyGroup)) + } yield () + } + + private def shape(shape: Shape): RDFSaver[RDFNode] = shape match { + case ns: NodeShape => nodeShape(ns) + case ps: PropertyShape => propertyShape(ps) + } + + private def propertyGroup(pair: (RefNode, PropertyGroup)): RDFSaver[RDFNode] = { + val (ref, pg) = pair + val node = ref.id + for { + _ <- order(node, pg.order) + _ <- labels(node, pg.label) + } yield node + } + + private def labels(node: RDFNode, labels: Set[RDFNode]): RDFSaver[Unit] = + sequence( + labels.toList.map(lbl => addTriple(node, `rdfs:label`, lbl)) + ).map(_ => ()) + + private def shapeRef(shape: RefNode): RDFSaver[RDFNode] = ok(shape.id) + + private def makeShapeId(v: RDFNode): RDFSaver[RDFNode] = ok(v) + + private def targets(id: RDFNode, ts: Seq[Target]): RDFSaver[Unit] = + saveList(ts.toList, target(id)) + + private def target(id: RDFNode)(t: Target): RDFSaver[Unit] = t match { + case TargetNode(node) => addTriple(id, `sh:targetNode`, node) + case TargetClass(node) => addTriple(id, `sh:targetClass`, node) + case TargetSubjectsOf(node) => addTriple(id, `sh:targetSubjectsOf`, node) + case TargetObjectsOf(node) => addTriple(id, `sh:targetObjectsOf`, node) + } + + private def propertyShapes(id: RDFNode, ts: Seq[RefNode]): RDFSaver[Unit] = + saveList(ts.toList, makePropertyShape(id)) + + private def makePropertyShape(id: RDFNode)(p: RefNode): RDFSaver[Unit] = + for { + node <- ok(p.id) // propertyShape(p) + _ <- addTriple(id, `sh:property`, node) + } yield () + + private def closed(id: RDFNode, b: Boolean): RDFSaver[Unit] = + if (b) + addTriple(id, `sh:closed`, BooleanLiteral(b)) + else ok(()) + + private def deactivated(id: RDFNode, b: Boolean): RDFSaver[Unit] = + if (b) + addTriple(id, `sh:deactivated`, BooleanLiteral(b)) + else ok(()) + + private def ignoredProperties(id: RDFNode, ignored: List[IRI]): RDFSaver[Unit] = + if (!ignored.isEmpty) { + for { + nodeList <- saveToRDFList(ignored, (iri: IRI) => ok(iri)) + _ <- addTriple(id, `sh:ignoredProperties`, nodeList) + } yield () + } else + ok(()) + + private def propertyShape(ps: PropertyShape): RDFSaver[RDFNode] = { + for { + shapeNode <- makeShapeId(ps.id) + _ <- addTriple(shapeNode, `rdf:type`, `sh:PropertyShape`) + _ <- targets(shapeNode, ps.targets) + _ <- propertyShapes(shapeNode, ps.propertyShapes) + _ <- closed(shapeNode, ps.closed) + _ <- deactivated(shapeNode, ps.deactivated) + _ <- ignoredProperties(shapeNode, ps.ignoredProperties) + _ <- messageMap(shapeNode, ps.message, `sh:message`) + _ <- messageMap(shapeNode, ps.message, `sh:name`) + _ <- messageMap(shapeNode, ps.message, `sh:description`) + _ <- order(shapeNode, ps.order) + _ <- group(shapeNode, ps.group) + _ <- severity(shapeNode, ps.severity) + pathNode <- makePath(ps.path) + _ <- addTriple(shapeNode, `sh:path`, pathNode) + _ <- saveList(ps.components, component(shapeNode)) + _ <- saveList(ps.annotations, annotation(shapeNode)) + } yield (shapeNode) + } + + private def nodeShape(n: NodeShape): RDFSaver[RDFNode] = for { + shapeNode <- makeShapeId(n.id) + _ <- addTriple(shapeNode, `rdf:type`, `sh:NodeShape`) + _ <- targets(shapeNode, n.targets) + _ <- propertyShapes(shapeNode, n.propertyShapes) + _ <- closed(shapeNode, n.closed) + _ <- deactivated(shapeNode, n.deactivated) + _ <- ignoredProperties(shapeNode, n.ignoredProperties) + _ <- saveList(n.components, component(shapeNode)) + _ <- messageMap(shapeNode, n.message, `sh:message`) + _ <- messageMap(shapeNode, n.name, `sh:name`) + _ <- messageMap(shapeNode, n.name, `sh:description`) + _ <- severity(shapeNode, n.severity) + _ <- order(shapeNode, n.order) + _ <- group(shapeNode, n.group) + } yield shapeNode + + private def order(n: RDFNode, maybeValue: Option[DecimalLiteral]): RDFSaver[Unit] = + maybeValue match { + case None => ok(()) + case Some(value) => addTriple(n, `sh:order`, value) + } + + private def group(n: RDFNode, maybeValue: Option[RefNode]): RDFSaver[Unit] = + maybeValue match { + case None => ok(()) + case Some(pg) => { + addTriple(n, `sh:group`, pg.id) + } + } + + private def messageMap(n: RDFNode, message: MessageMap, pred: IRI): RDFSaver[Unit] = + sequence(message.getRDFNodes.map(addTriple(n, pred, _))).map(_ => ()) + + private def severity(n: RDFNode, severity: Option[Severity]): RDFSaver[Unit] = + severity match { + case None => ok(()) + case Some(s) => addTriple(n, `sh:severity`, s.toIRI) + } + + private def annotation(id: RDFNode)(annotation: (IRI, RDFNode)): RDFSaver[Unit] = { + val (pred, obj) = annotation + addTriple(id, pred, obj) + } + + private def component(id: RDFNode)(c: Component): RDFSaver[Unit] = c match { + case ClassComponent(v) => addTriple(id, `sh:class`, v) + case Datatype(iri) => addTriple(id, `sh:datatype`, iri) + case NodeKind(value) => addTriple(id, `sh:nodeKind`, value.id) + case MinCount(n) => addTriple(id, `sh:minCount`, IntegerLiteral(n)) + case MaxCount(n) => addTriple(id, `sh:maxCount`, IntegerLiteral(n)) + case MinExclusive(v) => addTriple(id, `sh:minExclusive`, v) + case MinInclusive(v) => addTriple(id, `sh:minInclusive`, v) + case MaxExclusive(v) => addTriple(id, `sh:maxExclusive`, v) + case MaxInclusive(v) => addTriple(id, `sh:maxInclusive`, v) + case MinLength(n) => addTriple(id, `sh:minLength`, IntegerLiteral(n)) + case MaxLength(n) => addTriple(id, `sh:maxLength`, IntegerLiteral(n)) + case Pattern(p, flags) => + addTriple(id, `sh:pattern`, StringLiteral(p)) >> + (flags match { + case Some(f) => addTriple(id, `sh:flags`, StringLiteral(f)) + case None => ok(()) + }) + case UniqueLang(b) => addTriple(id, `sh:uniqueLang`, BooleanLiteral(b)) + case LanguageIn(langs) => + for { + ls <- saveToRDFList(langs, (lang: String) => ok(StringLiteral(lang))) + _ <- addTriple(id, `sh:languageIn`, ls) + } yield () + case Equals(p) => addTriple(id, `sh:equals`, p) + case Disjoint(p) => addTriple(id, `sh:disjoint`, p) + case LessThan(p) => addTriple(id, `sh:lessThan`, p) + case LessThanOrEquals(p) => addTriple(id, `sh:lessThanOrEquals`, p) + case And(shapes) => + for { + ls <- saveToRDFList(shapes, shapeRef) + _ <- addTriple(id, `sh:and`, ls) + } yield () + case Or(shapes) => + for { + ls <- saveToRDFList(shapes, shapeRef) + _ <- addTriple(id, `sh:or`, ls) + } yield () + case Xone(shapes) => + for { + ls <- saveToRDFList(shapes, shapeRef) + _ <- addTriple(id, `sh:xone`, ls) + } yield () + case QualifiedValueShape(s, min, max, disjoint) => + for { + nodeShape <- shapeRef(s) + _ <- addTriple(id, `sh:qualifiedValueShape`, nodeShape) + _ <- maybeAddTriple(id, `sh:qualifiedMinCount`, min.map(IntegerLiteral(_))) + _ <- maybeAddTriple(id, `sh:qualifiedMaxCount`, max.map(IntegerLiteral(_))) + _ <- maybeAddTriple(id, `sh:qualifiedValueShapesDisjoint`, disjoint.map(BooleanLiteral(_))) + } yield () + case Not(s) => + for { + nodeS <- shapeRef(s) + _ <- addTriple(id, `sh:not`, nodeS) + } yield () + case Closed(b, ignoredPs) => + for { + _ <- addTriple(id, `sh:closed`, BooleanLiteral(b)) + nodeList <- saveToRDFList(ignoredPs, (iri: IRI) => ok(iri)) + _ <- addTriple(id, `sh:ignoredProperties`, nodeList) + } yield () + case NodeComponent(s) => + for { + nodeS <- shapeRef(s) + _ <- addTriple(id, `sh:node`, nodeS) + } yield () + case HasValue(v) => addTriple(id, `sh:hasValue`, v.rdfNode) + case In(vs) => + for { + nodeLs <- saveToRDFList(vs, (v: Value) => ok(v.rdfNode)) + _ <- addTriple(id, `sh:in`, nodeLs) + } yield () + } + +} + +object Shacl2RDF { + def shacl2RDF(shacl: Schema, builder: RDFBuilder): IO[RDFBuilder] = { + new Shacl2RDF().toRDF(shacl, builder) + } +} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/report/Severity.scala b/modules/shacl/src/main/scala/es/weso/shacl/report/Severity.scala index d535520..34b182b 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/report/Severity.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/report/Severity.scala @@ -1,24 +1,24 @@ -package es.weso.shacl.report - -import es.weso.rdf.nodes.IRI -import es.weso.shacl.SHACLPrefixes._ - -sealed abstract class Severity { - def toIRI: IRI -} -case object ViolationSeverity extends Severity { - override def toIRI: IRI = `sh:Violation` -} -case object WarningSeverity extends Severity { - override def toIRI: IRI = `sh:Warning` -} -case object InfoSeverity extends Severity { - override def toIRI: IRI = `sh:Info` -} -case class GenericSeverity(iri: IRI) extends Severity { - override def toIRI: IRI = iri -} - -object Severity { - val defaultSeverity = ViolationSeverity -} +package es.weso.shacl.report + +import es.weso.rdf.nodes.IRI +import es.weso.shacl.SHACLPrefixes._ + +sealed abstract class Severity { + def toIRI: IRI +} +case object ViolationSeverity extends Severity { + override def toIRI: IRI = `sh:Violation` +} +case object WarningSeverity extends Severity { + override def toIRI: IRI = `sh:Warning` +} +case object InfoSeverity extends Severity { + override def toIRI: IRI = `sh:Info` +} +case class GenericSeverity(iri: IRI) extends Severity { + override def toIRI: IRI = iri +} + +object Severity { + val defaultSeverity = ViolationSeverity +} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/report/ValidationReport.scala b/modules/shacl/src/main/scala/es/weso/shacl/report/ValidationReport.scala index a895a44..69853f1 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/report/ValidationReport.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/report/ValidationReport.scala @@ -1,22 +1,20 @@ -package es.weso.shacl.report - -import es.weso.rdf.RDFBuilder -import es.weso.rdf.saver.RDFSaver -import cats.effect.IO - -case class ValidationReport(conforms: Boolean, - results: Seq[AbstractResult], - shapesGraphWellFormed: Boolean - ) extends RDFSaver { - - def toRDF(builder: RDFBuilder): IO[RDFBuilder] = { - ValidationReport2RDF.run(this,builder) - } - -} - -object ValidationReport { - def fromError(e: AbstractResult): ValidationReport = { - ValidationReport(conforms = false, results = Seq(e), true) - } -} \ No newline at end of file +package es.weso.shacl.report + +import es.weso.rdf.RDFBuilder +import es.weso.rdf.saver.RDFSaver +import cats.effect.IO + +case class ValidationReport(conforms: Boolean, results: Seq[AbstractResult], shapesGraphWellFormed: Boolean) + extends RDFSaver { + + def toRDF(builder: RDFBuilder): IO[RDFBuilder] = { + ValidationReport2RDF.run(this, builder) + } + +} + +object ValidationReport { + def fromError(e: AbstractResult): ValidationReport = { + ValidationReport(conforms = false, results = Seq(e), true) + } +} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/report/ValidationReport2RDF.scala b/modules/shacl/src/main/scala/es/weso/shacl/report/ValidationReport2RDF.scala index 342a5a8..0c35338 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/report/ValidationReport2RDF.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/report/ValidationReport2RDF.scala @@ -1,69 +1,70 @@ -package es.weso.shacl.report - -import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf.RDFBuilder -import es.weso.rdf.saver.RDFSaver -import es.weso.shacl.SHACLPrefixes._ -import es.weso.rdf.PREFIXES._ -import es.weso.rdf.nodes.{BooleanLiteral, RDFNode, StringLiteral} -import es.weso.shacl.LiteralValue -import cats.effect.IO - -class ValidationReport2RDF extends RDFSaver with LazyLogging { - - def toRDF(vr: ValidationReport, initial: RDFBuilder): IO[RDFBuilder] = { - val result = validationReport(vr).run(initial) - result.map(_._1) - } - - private def validationReport(vr: ValidationReport): RDFSaver[Unit] = for { - _ <- addPrefix("sh", sh) - node <- createBNode() - _ <- addTriple(node, `rdf:type`, `sh:ValidationReport`) - _ <- addTriple(node, `sh:conforms`, BooleanLiteral(vr.conforms)) - _ <- results(node, vr.results) - } yield () - - private def results(id: RDFNode, ts: Seq[AbstractResult]): RDFSaver[Unit] = - saveList(ts.toList, result(id)) - - private def result(id: RDFNode)(ar: AbstractResult): RDFSaver[Unit] = ar match { - case vr: ValidationResult => - for { - node <- createBNode() - _ <- addTriple(id, `sh:result`, node) - _ <- addTriple(node, `rdf:type`, `sh:ValidationResult`) - _ <- addTriple(node, `sh:resultSeverity`, vr.resultSeverity.toIRI) - _ <- addTriple(node, `sh:focusNode`, vr.focusNode) - _ <- addTriple(node, `sh:sourceConstraintComponent`, vr.sourceConstraintComponent) - _ <- addTriple(node, `sh:sourceShape`, vr.sourceShape.id) - _ <- addTripleObjects(node, `sh:value`, vr.values.toList) - _ <- addTripleObjects(node, `sh:resultMessage`, vr.messageMap.getRDFNodes) - _ <- saveList(vr.message.toList, message(node)) - _ <- vr.focusPath match { - case None => ok(()) - case Some(path) => - for { - path <- makePath(path) - _ <- addTriple(node, `sh:resultPath`, path) - } yield () - } - } yield () - case mr: MsgError => for { - node <- createBNode() - _ <- addTriple(node, `sh:resultMessage`, StringLiteral(mr.msg)) - } yield () - } - - private def message(node: RDFNode)(msg: LiteralValue): RDFSaver[Unit] = for { - _ <- addTriple(node, `sh:message`,msg.literal) - } yield () - -} - -object ValidationReport2RDF { - - def run(vr: ValidationReport, builder: RDFBuilder): IO[RDFBuilder] = - new ValidationReport2RDF().toRDF(vr,builder) - -} \ No newline at end of file +package es.weso.shacl.report + +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.RDFBuilder +import es.weso.rdf.saver.RDFSaver +import es.weso.shacl.SHACLPrefixes._ +import es.weso.rdf.PREFIXES._ +import es.weso.rdf.nodes.{BooleanLiteral, RDFNode, StringLiteral} +import es.weso.shacl.LiteralValue +import cats.effect.IO + +class ValidationReport2RDF extends RDFSaver with LazyLogging { + + def toRDF(vr: ValidationReport, initial: RDFBuilder): IO[RDFBuilder] = { + val result = validationReport(vr).run(initial) + result.map(_._1) + } + + private def validationReport(vr: ValidationReport): RDFSaver[Unit] = for { + _ <- addPrefix("sh", sh) + node <- createBNode() + _ <- addTriple(node, `rdf:type`, `sh:ValidationReport`) + _ <- addTriple(node, `sh:conforms`, BooleanLiteral(vr.conforms)) + _ <- results(node, vr.results) + } yield () + + private def results(id: RDFNode, ts: Seq[AbstractResult]): RDFSaver[Unit] = + saveList(ts.toList, result(id)) + + private def result(id: RDFNode)(ar: AbstractResult): RDFSaver[Unit] = ar match { + case vr: ValidationResult => + for { + node <- createBNode() + _ <- addTriple(id, `sh:result`, node) + _ <- addTriple(node, `rdf:type`, `sh:ValidationResult`) + _ <- addTriple(node, `sh:resultSeverity`, vr.resultSeverity.toIRI) + _ <- addTriple(node, `sh:focusNode`, vr.focusNode) + _ <- addTriple(node, `sh:sourceConstraintComponent`, vr.sourceConstraintComponent) + _ <- addTriple(node, `sh:sourceShape`, vr.sourceShape.id) + _ <- addTripleObjects(node, `sh:value`, vr.values.toList) + _ <- addTripleObjects(node, `sh:resultMessage`, vr.messageMap.getRDFNodes) + _ <- saveList(vr.message.toList, message(node)) + _ <- vr.focusPath match { + case None => ok(()) + case Some(path) => + for { + path <- makePath(path) + _ <- addTriple(node, `sh:resultPath`, path) + } yield () + } + } yield () + case mr: MsgError => + for { + node <- createBNode() + _ <- addTriple(node, `sh:resultMessage`, StringLiteral(mr.msg)) + } yield () + } + + private def message(node: RDFNode)(msg: LiteralValue): RDFSaver[Unit] = for { + _ <- addTriple(node, `sh:message`, msg.literal) + } yield () + +} + +object ValidationReport2RDF { + + def run(vr: ValidationReport, builder: RDFBuilder): IO[RDFBuilder] = + new ValidationReport2RDF().toRDF(vr, builder) + +} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/report/ValidationResult.scala b/modules/shacl/src/main/scala/es/weso/shacl/report/ValidationResult.scala index 44d88d2..62d460a 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/report/ValidationResult.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/report/ValidationResult.scala @@ -1,187 +1,260 @@ -package es.weso.shacl.report - -import es.weso.rdf.nodes._ -import es.weso.rdf.PREFIXES.sh -import es.weso.rdf.path._ -import es.weso.shacl._ -import es.weso.shacl.validator.Attempt - - -// TODO: Refactor this code creating Classes for each error? - -abstract class AbstractResult - -case class ValidationResult(focusNode: RDFNode, - resultSeverity: Severity, - sourceConstraintComponent: IRI, - focusPath: Option[SHACLPath], - sourceShape: RefNode, - values: Seq[RDFNode], - message: Seq[LiteralValue], - messageMap: MessageMap, - details: Seq[AbstractResult] - ) extends AbstractResult { - - def setSeverity(s: Severity): ValidationResult = - this.copy(resultSeverity = s) - - override def toString = s"Violation error on $focusNode: ${message.mkString(",")}" -} - -case class MsgError(msg: String) extends AbstractResult - -object ValidationResult { - - def basic(suffix: String, - focusNode: RDFNode, - attempt: Attempt, - msg: String - ) = - ValidationResult( - sourceConstraintComponent = sh + suffix, - focusNode = focusNode, - resultSeverity = attempt.severity, - sourceShape = attempt.shapeRef, - values = Seq(), - focusPath = attempt.path, - message = Seq(LiteralValue(StringLiteral(msg))), - messageMap = attempt.messageMap, - details = Seq() - ) - - def notFoundShapeRef(node: RDFNode, attempt: Attempt, msg: String) = - basic("NotFoundShapeRef", node, attempt, msg) - - def expectedPropertyShape(node: RDFNode, attempt: Attempt, msg: String) = - basic("ExpectedPropertyShape", node, attempt, msg) - - def shapesFailed(node: RDFNode, shape: Shape, ps: Set[Shape], attempt: Attempt, msg: String) = - basic("ShapesFailed", node, attempt, msg).setSeverity(InfoSeverity) - - def regexError(node: RDFNode, attempt: Attempt, msg: String) = - basic("RegEx error", node, attempt, msg) - - def noSiblingsError(focusNode: RDFNode, p: PropertyShape, msg: String, attempt: Attempt) = - basic("noSiblingsError", focusNode, attempt, s"No siblings found for property shape $p in schema: $msg") - - def errorNode(node: RDFNode, shape: Shape, attempt: Attempt, msg: String): ValidationResult = - basic("NodeConstraintComponent", node, attempt, msg) - - def classError(focusNode: RDFNode, cls: RDFNode, attempt: Attempt) = - basic("ClassConstraintComponent", focusNode, attempt, s"Node $focusNode doesn't belong to class $cls") - - def datatypeError(focusNode: RDFNode, datatype: RDFNode, attempt: Attempt) = - basic("DatatypeConstraintComponent", focusNode, attempt, s"Node $focusNode doesn't have dataType $datatype") - - def unsupported(focusNode: RDFNode, attempt: Attempt, msg: String) = - basic("unsupported", focusNode, attempt, "Unsupported: " + msg) - - def notNumeric(focusNode: RDFNode, attempt: Attempt) = - basic("NotNumericConstraintComponent", focusNode, attempt, s"NotNumeric violation. Expected $focusNode to be a number") - - def minExclusiveError(focusNode: RDFNode, attempt: Attempt, n: RDFNode) = - basic("MinExclusiveConstraintComponent", focusNode, attempt, s"minExclusive violation. Expected $focusNode > $n") - - def minInclusiveError(focusNode: RDFNode, attempt: Attempt, n: RDFNode) = - basic("MinInclusiveConstraintComponent", focusNode, attempt, s"minInclusive violation. Expected $focusNode >= $n") - - def maxExclusiveError(focusNode: RDFNode, attempt: Attempt, n: RDFNode) = - basic("MaxExclusiveConstraintComponent", focusNode, attempt, s"maxExclusive violation. Expected $focusNode < $n") - - def maxInclusiveError(focusNode: RDFNode, attempt: Attempt, n: RDFNode) = - basic("MaxInclusiveConstraintComponent", focusNode, attempt, s"maxInclusive violation. Expected $focusNode <= $n") - - def minLengthError(focusNode: RDFNode, attempt: Attempt, n: Int) = - basic("MinLengthConstraintComponent", focusNode, attempt, s"minLength violation. Expected length($focusNode) >= $n") - - def maxLengthError(focusNode: RDFNode, attempt: Attempt, n: Int) = - basic("MaxLengthConstraintComponent", focusNode, attempt, s"maxLength violation. Expected length($focusNode) <= $n") - - def patternError(focusNode: RDFNode, attempt: Attempt, p: String, flags: Option[String]) = - basic("PatternConstraintComponent", focusNode, attempt, s"pattern violation. Expected $focusNode to match '$p'${flags.getOrElse("")}") - - def uniqueLangError(focusNode: RDFNode, attempt: Attempt, path: SHACLPath, vs: Seq[RDFNode]) = - basic("UniqueLangConstraintComponent", focusNode, attempt, s"uniqueLang violation. Expected $focusNode to have a unique language for path $path with values $vs") - - def languageInError(focusNode: RDFNode, attempt: Attempt, langs: List[String]) = - basic("LanguageInConstraintComponent", focusNode, attempt, s"languageIn violation. Expected $focusNode to match 'languageIn(${langs.mkString(",")})'") - - def equalsError(focusNode: RDFNode, attempt: Attempt, p: IRI, vs: Set[RDFNode]) = - comparisonError("EqualsConstraintComponent", focusNode, attempt, p, vs) - - def disjointError(focusNode: RDFNode, attempt: Attempt, p: IRI, vs: Set[RDFNode]) = - comparisonError("DisjointConstraintComponent", focusNode, attempt, p, vs) - - def lessThanError(focusNode: RDFNode, attempt: Attempt, p: IRI, vs: Set[RDFNode]) = - comparisonError("LessThanConstraintComponent", focusNode, attempt, p, vs) - - def lessThanOrEqualsError(focusNode: RDFNode, attempt: Attempt, p: IRI, vs: Set[RDFNode]) = - comparisonError("LessThanOrEqualsConstraintComponent", focusNode, attempt, p, vs) - - def comparisonError(name: String, focusNode: RDFNode, attempt: Attempt, p: IRI, vs: Set[RDFNode]) = - basic(s"${name}ConstraintComponent", focusNode, attempt, s"$name violation. Expected $focusNode to match $name '$p', values: $vs") - - def minCountError(focusNode: RDFNode, attempt: Attempt, minCount: Int, count: Int) = - basic("MinCountConstraintComponent", focusNode, attempt, s"MinCount violation. Expected $minCount, obtained: $count") - - def maxCountError(focusNode: RDFNode, attempt: Attempt, maxCount: Int, count: Int) = - basic("MaxCountConstraintComponent", focusNode, attempt, s"MaxCount violation. Expected $maxCount, obtained: $count") - - def iriKindError(focusNode: RDFNode, attempt: Attempt) = - basic("IriConstraintComponent", focusNode, attempt, s"Node $focusNode is not an IRI") - - def literalKindError(focusNode: RDFNode, attempt: Attempt) = - basic("LiteralConstraintComponent", focusNode, attempt, s"Node $focusNode is not a Literal") - - def bNodeKindError(focusNode: RDFNode, attempt: Attempt) = - basic("BNodeConstraintComponent", focusNode, attempt, s"Node $focusNode is not a blank node") - - def bNodeOrIRIKindError(focusNode: RDFNode, attempt: Attempt) = - basic("BNodeOrIRIConstraintComponent", focusNode, attempt, s"Node $focusNode is not a blank node or an IRI") - - def bNodeOrLiteralKindError(focusNode: RDFNode, attempt: Attempt) = - basic("BNodeOrLiteralConstraintComponent", focusNode, attempt, s"Node $focusNode is not a blank node or a Literal") - - def iriOrLiteralKindError(focusNode: RDFNode, attempt: Attempt) = - basic("IriOrLiteralConstraintComponent", focusNode, attempt, s"Node $focusNode is not a IRI or a Literal") - - def notError(focusNode: RDFNode, attempt: Attempt, shape: RefNode) = - basic("NotConstraintComponent", focusNode, attempt, s"Not violation. Expected $focusNode not to satisfy ${shape.showId}") - - def andError(focusNode: RDFNode, attempt: Attempt, shapes: List[RefNode]) = - basic("AndConstraintComponent", focusNode, attempt, s"And violation. Expected $focusNode to satisfy all of the shapes ${shapes.map(_.showId).mkString(",")}") - - def orError(focusNode: RDFNode, attempt: Attempt, shapes: List[RefNode]) = - basic("OrConstraintComponent", focusNode, attempt, s"Or violation. Expected $focusNode to satisfy some of the shapes ${shapes.map(_.showId).mkString(",")}") - - def xoneError(focusNode: RDFNode, attempt: Attempt, shapes: Seq[RefNode]) = - basic("XoneConstraintComponent", focusNode, attempt, s"Xone violation. Expected $focusNode to satisfy exactly one of the shapes ${shapes.map(_.showId).mkString(",")}") - - def qualifiedShapeError(focusNode: RDFNode, attempt: Attempt, value: Int, min: Option[Int], max: Option[Int]) = - basic("QualifiedShapeConstraintComponent", focusNode, attempt, s"qualified shape error. Expected $focusNode to satisfy qualifiedValueShape. Value = ${value}, min: ${min.map(_.toString).getOrElse("-")}, max: ${max.map(_.toString).getOrElse("-")}") - - def hasValueError(focusNode: RDFNode, attempt: Attempt, value: Value) = - basic("HasValueConstraintComponent", focusNode, attempt, s"HasValue error. Expected $focusNode to be $value") - - def hasValueErrorNoValue(focusNode: RDFNode, attempt: Attempt, value: Value, path: SHACLPath) = - basic("HasValueConstraintComponent", focusNode, attempt, s"HasValue error. Missing value for path $path on $focusNode. Value must be $value") - - def hasValueErrorMoreThanOne(focusNode: RDFNode, attempt: Attempt, value: Value, path: SHACLPath, n: Int) = - basic("HasValueConstraintComponent", focusNode, attempt, s"HasValue error. More than one value ($n) for path $path on $focusNode. Value must be $value") - - def inError(focusNode: RDFNode, attempt: Attempt, values: Seq[Value]) = - basic("InConstraintComponent", focusNode, attempt, s"In violation. Expected $focusNode to be in $values") - - def notShapeError(focusNode: RDFNode, shapeRef: RefNode, attempt: Attempt) = - basic("notShape", focusNode, attempt, s"Not failed because $focusNode has shape $shapeRef and it should not have") - - def closedError( - focusNode: RDFNode, - attempt: Attempt, - allowedProperties: List[IRI], - ignoredProperties: List[IRI], - notAllowed: List[IRI]) = - basic("ClosedConstraintComponent", focusNode, attempt, - s"closed violation. $focusNode has more properties than $allowedProperties. Extra properties found: $notAllowed, ignoredProperties: $ignoredProperties") - -} +package es.weso.shacl.report + +import es.weso.rdf.nodes._ +import es.weso.rdf.PREFIXES.sh +import es.weso.rdf.path._ +import es.weso.shacl._ +import es.weso.shacl.validator.Attempt + +// TODO: Refactor this code creating Classes for each error? + +abstract class AbstractResult + +case class ValidationResult( + focusNode: RDFNode, + resultSeverity: Severity, + sourceConstraintComponent: IRI, + focusPath: Option[SHACLPath], + sourceShape: RefNode, + values: Seq[RDFNode], + message: Seq[LiteralValue], + messageMap: MessageMap, + details: Seq[AbstractResult] +) extends AbstractResult { + + def setSeverity(s: Severity): ValidationResult = + this.copy(resultSeverity = s) + + override def toString = s"Violation error on $focusNode: ${message.mkString(",")}" +} + +case class MsgError(msg: String) extends AbstractResult + +object ValidationResult { + + def basic(suffix: String, focusNode: RDFNode, attempt: Attempt, msg: String) = + ValidationResult( + sourceConstraintComponent = sh + suffix, + focusNode = focusNode, + resultSeverity = attempt.severity, + sourceShape = attempt.shapeRef, + values = Seq(), + focusPath = attempt.path, + message = Seq(LiteralValue(StringLiteral(msg))), + messageMap = attempt.messageMap, + details = Seq() + ) + + def notFoundShapeRef(node: RDFNode, attempt: Attempt, msg: String) = + basic("NotFoundShapeRef", node, attempt, msg) + + def expectedPropertyShape(node: RDFNode, attempt: Attempt, msg: String) = + basic("ExpectedPropertyShape", node, attempt, msg) + + def shapesFailed(node: RDFNode, shape: Shape, ps: Set[Shape], attempt: Attempt, msg: String) = + basic("ShapesFailed", node, attempt, msg).setSeverity(InfoSeverity) + + def regexError(node: RDFNode, attempt: Attempt, msg: String) = + basic("RegEx error", node, attempt, msg) + + def noSiblingsError(focusNode: RDFNode, p: PropertyShape, msg: String, attempt: Attempt) = + basic("noSiblingsError", focusNode, attempt, s"No siblings found for property shape $p in schema: $msg") + + def errorNode(node: RDFNode, shape: Shape, attempt: Attempt, msg: String): ValidationResult = + basic("NodeConstraintComponent", node, attempt, msg) + + def classError(focusNode: RDFNode, cls: RDFNode, attempt: Attempt) = + basic("ClassConstraintComponent", focusNode, attempt, s"Node $focusNode doesn't belong to class $cls") + + def datatypeError(focusNode: RDFNode, datatype: RDFNode, attempt: Attempt) = + basic("DatatypeConstraintComponent", focusNode, attempt, s"Node $focusNode doesn't have dataType $datatype") + + def unsupported(focusNode: RDFNode, attempt: Attempt, msg: String) = + basic("unsupported", focusNode, attempt, "Unsupported: " + msg) + + def notNumeric(focusNode: RDFNode, attempt: Attempt) = + basic( + "NotNumericConstraintComponent", + focusNode, + attempt, + s"NotNumeric violation. Expected $focusNode to be a number" + ) + + def minExclusiveError(focusNode: RDFNode, attempt: Attempt, n: RDFNode) = + basic("MinExclusiveConstraintComponent", focusNode, attempt, s"minExclusive violation. Expected $focusNode > $n") + + def minInclusiveError(focusNode: RDFNode, attempt: Attempt, n: RDFNode) = + basic("MinInclusiveConstraintComponent", focusNode, attempt, s"minInclusive violation. Expected $focusNode >= $n") + + def maxExclusiveError(focusNode: RDFNode, attempt: Attempt, n: RDFNode) = + basic("MaxExclusiveConstraintComponent", focusNode, attempt, s"maxExclusive violation. Expected $focusNode < $n") + + def maxInclusiveError(focusNode: RDFNode, attempt: Attempt, n: RDFNode) = + basic("MaxInclusiveConstraintComponent", focusNode, attempt, s"maxInclusive violation. Expected $focusNode <= $n") + + def minLengthError(focusNode: RDFNode, attempt: Attempt, n: Int) = + basic("MinLengthConstraintComponent", focusNode, attempt, s"minLength violation. Expected length($focusNode) >= $n") + + def maxLengthError(focusNode: RDFNode, attempt: Attempt, n: Int) = + basic("MaxLengthConstraintComponent", focusNode, attempt, s"maxLength violation. Expected length($focusNode) <= $n") + + def patternError(focusNode: RDFNode, attempt: Attempt, p: String, flags: Option[String]) = + basic( + "PatternConstraintComponent", + focusNode, + attempt, + s"pattern violation. Expected $focusNode to match '$p'${flags.getOrElse("")}" + ) + + def uniqueLangError(focusNode: RDFNode, attempt: Attempt, path: SHACLPath, vs: Seq[RDFNode]) = + basic( + "UniqueLangConstraintComponent", + focusNode, + attempt, + s"uniqueLang violation. Expected $focusNode to have a unique language for path $path with values $vs" + ) + + def languageInError(focusNode: RDFNode, attempt: Attempt, langs: List[String]) = + basic( + "LanguageInConstraintComponent", + focusNode, + attempt, + s"languageIn violation. Expected $focusNode to match 'languageIn(${langs.mkString(",")})'" + ) + + def equalsError(focusNode: RDFNode, attempt: Attempt, p: IRI, vs: Set[RDFNode]) = + comparisonError("EqualsConstraintComponent", focusNode, attempt, p, vs) + + def disjointError(focusNode: RDFNode, attempt: Attempt, p: IRI, vs: Set[RDFNode]) = + comparisonError("DisjointConstraintComponent", focusNode, attempt, p, vs) + + def lessThanError(focusNode: RDFNode, attempt: Attempt, p: IRI, vs: Set[RDFNode]) = + comparisonError("LessThanConstraintComponent", focusNode, attempt, p, vs) + + def lessThanOrEqualsError(focusNode: RDFNode, attempt: Attempt, p: IRI, vs: Set[RDFNode]) = + comparisonError("LessThanOrEqualsConstraintComponent", focusNode, attempt, p, vs) + + def comparisonError(name: String, focusNode: RDFNode, attempt: Attempt, p: IRI, vs: Set[RDFNode]) = + basic( + s"${name}ConstraintComponent", + focusNode, + attempt, + s"$name violation. Expected $focusNode to match $name '$p', values: $vs" + ) + + def minCountError(focusNode: RDFNode, attempt: Attempt, minCount: Int, count: Int) = + basic( + "MinCountConstraintComponent", + focusNode, + attempt, + s"MinCount violation. Expected $minCount, obtained: $count" + ) + + def maxCountError(focusNode: RDFNode, attempt: Attempt, maxCount: Int, count: Int) = + basic( + "MaxCountConstraintComponent", + focusNode, + attempt, + s"MaxCount violation. Expected $maxCount, obtained: $count" + ) + + def iriKindError(focusNode: RDFNode, attempt: Attempt) = + basic("IriConstraintComponent", focusNode, attempt, s"Node $focusNode is not an IRI") + + def literalKindError(focusNode: RDFNode, attempt: Attempt) = + basic("LiteralConstraintComponent", focusNode, attempt, s"Node $focusNode is not a Literal") + + def bNodeKindError(focusNode: RDFNode, attempt: Attempt) = + basic("BNodeConstraintComponent", focusNode, attempt, s"Node $focusNode is not a blank node") + + def bNodeOrIRIKindError(focusNode: RDFNode, attempt: Attempt) = + basic("BNodeOrIRIConstraintComponent", focusNode, attempt, s"Node $focusNode is not a blank node or an IRI") + + def bNodeOrLiteralKindError(focusNode: RDFNode, attempt: Attempt) = + basic("BNodeOrLiteralConstraintComponent", focusNode, attempt, s"Node $focusNode is not a blank node or a Literal") + + def iriOrLiteralKindError(focusNode: RDFNode, attempt: Attempt) = + basic("IriOrLiteralConstraintComponent", focusNode, attempt, s"Node $focusNode is not a IRI or a Literal") + + def notError(focusNode: RDFNode, attempt: Attempt, shape: RefNode) = + basic( + "NotConstraintComponent", + focusNode, + attempt, + s"Not violation. Expected $focusNode not to satisfy ${shape.showId}" + ) + + def andError(focusNode: RDFNode, attempt: Attempt, shapes: List[RefNode]) = + basic( + "AndConstraintComponent", + focusNode, + attempt, + s"And violation. Expected $focusNode to satisfy all of the shapes ${shapes.map(_.showId).mkString(",")}" + ) + + def orError(focusNode: RDFNode, attempt: Attempt, shapes: List[RefNode]) = + basic( + "OrConstraintComponent", + focusNode, + attempt, + s"Or violation. Expected $focusNode to satisfy some of the shapes ${shapes.map(_.showId).mkString(",")}" + ) + + def xoneError(focusNode: RDFNode, attempt: Attempt, shapes: Seq[RefNode]) = + basic( + "XoneConstraintComponent", + focusNode, + attempt, + s"Xone violation. Expected $focusNode to satisfy exactly one of the shapes ${shapes.map(_.showId).mkString(",")}" + ) + + def qualifiedShapeError(focusNode: RDFNode, attempt: Attempt, value: Int, min: Option[Int], max: Option[Int]) = + basic( + "QualifiedShapeConstraintComponent", + focusNode, + attempt, + s"qualified shape error. Expected $focusNode to satisfy qualifiedValueShape. Value = ${value}, min: ${min + .map(_.toString) + .getOrElse("-")}, max: ${max.map(_.toString).getOrElse("-")}" + ) + + def hasValueError(focusNode: RDFNode, attempt: Attempt, value: Value) = + basic("HasValueConstraintComponent", focusNode, attempt, s"HasValue error. Expected $focusNode to be $value") + + def hasValueErrorNoValue(focusNode: RDFNode, attempt: Attempt, value: Value, path: SHACLPath) = + basic( + "HasValueConstraintComponent", + focusNode, + attempt, + s"HasValue error. Missing value for path $path on $focusNode. Value must be $value" + ) + + def hasValueErrorMoreThanOne(focusNode: RDFNode, attempt: Attempt, value: Value, path: SHACLPath, n: Int) = + basic( + "HasValueConstraintComponent", + focusNode, + attempt, + s"HasValue error. More than one value ($n) for path $path on $focusNode. Value must be $value" + ) + + def inError(focusNode: RDFNode, attempt: Attempt, values: Seq[Value]) = + basic("InConstraintComponent", focusNode, attempt, s"In violation. Expected $focusNode to be in $values") + + def notShapeError(focusNode: RDFNode, shapeRef: RefNode, attempt: Attempt) = + basic("notShape", focusNode, attempt, s"Not failed because $focusNode has shape $shapeRef and it should not have") + + def closedError( + focusNode: RDFNode, + attempt: Attempt, + allowedProperties: List[IRI], + ignoredProperties: List[IRI], + notAllowed: List[IRI] + ) = + basic( + "ClosedConstraintComponent", + focusNode, + attempt, + s"closed violation. $focusNode has more properties than $allowedProperties. Extra properties found: $notAllowed, ignoredProperties: $ignoredProperties" + ) + +} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/showShacl.scala b/modules/shacl/src/main/scala/es/weso/shacl/showShacl.scala index f72c72d..d69068e 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/showShacl.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/showShacl.scala @@ -1,26 +1,27 @@ -package es.weso.shacl -import cats._ -import es.weso.rdf.nodes._ -import es.weso.shacl.report.{AbstractResult, MsgError, ValidationResult} - -object showShacl { - - implicit def showShape: Show[Shape] = new Show[Shape] { - def show(shape: Shape): String = { - shape.id.toString // .fold("_?")(iri => iri.str) - } - } - - implicit def showError: Show[AbstractResult] = new Show[AbstractResult] { - def show(ve: AbstractResult): String = - ve match { - case vr: ValidationResult => s"Violation Error(${vr.sourceConstraintComponent}). Node(${vr.focusNode}) ${vr.message.mkString(",")}" - case m: MsgError => s"Error: ${m.msg}" - } - } - - implicit def showRDFNode: Show[RDFNode] = new Show[RDFNode] { - def show(n: RDFNode): String = n.toString - } - -} +package es.weso.shacl +import cats._ +import es.weso.rdf.nodes._ +import es.weso.shacl.report.{AbstractResult, MsgError, ValidationResult} + +object showShacl { + + implicit def showShape: Show[Shape] = new Show[Shape] { + def show(shape: Shape): String = { + shape.id.toString // .fold("_?")(iri => iri.str) + } + } + + implicit def showError: Show[AbstractResult] = new Show[AbstractResult] { + def show(ve: AbstractResult): String = + ve match { + case vr: ValidationResult => + s"Violation Error(${vr.sourceConstraintComponent}). Node(${vr.focusNode}) ${vr.message.mkString(",")}" + case m: MsgError => s"Error: ${m.msg}" + } + } + + implicit def showRDFNode: Show[RDFNode] = new Show[RDFNode] { + def show(n: RDFNode): String = n.toString + } + +} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/validator/Attempt.scala b/modules/shacl/src/main/scala/es/weso/shacl/validator/Attempt.scala index 7e163a4..bc61535 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/validator/Attempt.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/validator/Attempt.scala @@ -1,20 +1,18 @@ -package es.weso.shacl.validator - -import es.weso.rdf.nodes._ -import es.weso.rdf.path.SHACLPath -import es.weso.shacl.{MessageMap, RefNode} -import es.weso.shacl.report.Severity -/** - * Represents current validation attempt - * It contains the node and a shape - * It may contain a predicate, path or nothing - */ -case class Attempt(node: RDFNode, - shapeRef: RefNode, - messageMap: MessageMap, - severity: Severity, - path: Option[SHACLPath] - ) { - def shapeId: RDFNode = shapeRef.id -} - +package es.weso.shacl.validator + +import es.weso.rdf.nodes._ +import es.weso.rdf.path.SHACLPath +import es.weso.shacl.{MessageMap, RefNode} +import es.weso.shacl.report.Severity + +/** Represents current validation attempt It contains the node and a shape It may contain a predicate, path or nothing + */ +case class Attempt( + node: RDFNode, + shapeRef: RefNode, + messageMap: MessageMap, + severity: Severity, + path: Option[SHACLPath] +) { + def shapeId: RDFNode = shapeRef.id +} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/validator/AttemptInfo.scala b/modules/shacl/src/main/scala/es/weso/shacl/validator/AttemptInfo.scala index aff3ce8..be44563 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/validator/AttemptInfo.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/validator/AttemptInfo.scala @@ -1,22 +1,18 @@ -package es.weso.shacl.validator - -import cats._ -import es.weso.rdf.nodes._ -import es.weso.shacl.report.Severity -import es.weso.shacl.{MessageMap, RefNode} - -case class AttemptInfo(node: RDFNode, - shape: RefNode, - messageMap: MessageMap, - severity: Severity - ) { - - override def toString = AttemptInfo.nodeShapeShow.show(this) - -} - -object AttemptInfo { - implicit val nodeShapeShow: Show[AttemptInfo] = new Show[AttemptInfo] { - def show(ns: AttemptInfo) = s"[${ns.node},${ns.shape.showId}]" - } -} +package es.weso.shacl.validator + +import cats._ +import es.weso.rdf.nodes._ +import es.weso.shacl.report.Severity +import es.weso.shacl.{MessageMap, RefNode} + +case class AttemptInfo(node: RDFNode, shape: RefNode, messageMap: MessageMap, severity: Severity) { + + override def toString = AttemptInfo.nodeShapeShow.show(this) + +} + +object AttemptInfo { + implicit val nodeShapeShow: Show[AttemptInfo] = new Show[AttemptInfo] { + def show(ns: AttemptInfo) = s"[${ns.node},${ns.shape.showId}]" + } +} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/validator/CheckResult.scala b/modules/shacl/src/main/scala/es/weso/shacl/validator/CheckResult.scala index eb9c796..6b3c85e 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/validator/CheckResult.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/validator/CheckResult.scala @@ -1,37 +1,35 @@ -package es.weso.shacl.validator - -import cats._ -import cats.implicits._ - - -case class CheckResult[E: Show, A: Show, Log: Show](r: (Log, Either[E, A])) { - - def result: Either[E, A] = r._2 - - def isOK: Boolean = r._2.isRight - - def errors: Seq[E] = - r._2.fold(e => List(e), _ => Seq()) - - def results: List[A] = { - r._2.fold(_ => List(), x => List(x)) - } - - def show: String = { - val result = if (isOK) { - val first = results.head - "OK. Result: " ++ "\n" ++ - Show[A].show(first) - } else - "Not OK. Error: " ++ "\n" ++ errors.map(e => Show[E].show(e)).mkString("\n") - val sb = new StringBuilder - sb ++= result - sb ++= "\n----------------------------log-----------------------\n" - sb ++= r._1.show - sb.toString - } - - override def toString = show - -} - +package es.weso.shacl.validator + +import cats._ +import cats.implicits._ + +case class CheckResult[E: Show, A: Show, Log: Show](r: (Log, Either[E, A])) { + + def result: Either[E, A] = r._2 + + def isOK: Boolean = r._2.isRight + + def errors: Seq[E] = + r._2.fold(e => List(e), _ => Seq()) + + def results: List[A] = { + r._2.fold(_ => List(), x => List(x)) + } + + def show: String = { + val result = if (isOK) { + val first = results.head + "OK. Result: " ++ "\n" ++ + Show[A].show(first) + } else + "Not OK. Error: " ++ "\n" ++ errors.map(e => Show[E].show(e)).mkString("\n") + val sb = new StringBuilder + sb ++= result + sb ++= "\n----------------------------log-----------------------\n" + sb ++= r._1.show + sb.toString + } + + override def toString = show + +} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/validator/ComponentChecker.scala b/modules/shacl/src/main/scala/es/weso/shacl/validator/ComponentChecker.scala index bae2e47..278da4d 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/validator/ComponentChecker.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/validator/ComponentChecker.scala @@ -1,5 +1,3 @@ -package es.weso.shacl.validator - -object ComponentChecker { - -} \ No newline at end of file +package es.weso.shacl.validator + +object ComponentChecker {} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/validator/Evidence.scala b/modules/shacl/src/main/scala/es/weso/shacl/validator/Evidence.scala index 2334779..155e9f4 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/validator/Evidence.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/validator/Evidence.scala @@ -1,26 +1,23 @@ -package es.weso.shacl.validator - -import cats._ -import es.weso.rdf.nodes.RDFNode -import es.weso.shacl.RefNode - -case class Evidences(ls: List[Evidence]) - -abstract class Evidence { - override def toString = Evidence.evidenceShow.show(this) -} - -case class NodeShapeEvidence(node: RDFNode, - shape: RefNode, - msg: String - ) extends Evidence -case class MsgEvidence(msg: String) extends Evidence - -object Evidence { - implicit val evidenceShow: Show[Evidence] = new Show[Evidence] { - def show(e: Evidence) = e match { - case NodeShapeEvidence(node, shape, msg) => s"$node@${shape.id}: $msg" - case MsgEvidence(msg) => msg - } - } -} \ No newline at end of file +package es.weso.shacl.validator + +import cats._ +import es.weso.rdf.nodes.RDFNode +import es.weso.shacl.RefNode + +case class Evidences(ls: List[Evidence]) + +abstract class Evidence { + override def toString = Evidence.evidenceShow.show(this) +} + +case class NodeShapeEvidence(node: RDFNode, shape: RefNode, msg: String) extends Evidence +case class MsgEvidence(msg: String) extends Evidence + +object Evidence { + implicit val evidenceShow: Show[Evidence] = new Show[Evidence] { + def show(e: Evidence) = e match { + case NodeShapeEvidence(node, shape, msg) => s"$node@${shape.id}: $msg" + case MsgEvidence(msg) => msg + } + } +} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/validator/SHACLChecker.scala b/modules/shacl/src/main/scala/es/weso/shacl/validator/SHACLChecker.scala index 49c9682..821570d 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/validator/SHACLChecker.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/validator/SHACLChecker.scala @@ -1,90 +1,88 @@ -package es.weso.shacl.validator - -import cats._ -import cats.effect.IO -import cats.implicits._ -import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf._ -import es.weso.checking.CheckerCats -import es.weso.utils.internal.CollectionCompat._ -import es.weso.shacl.report.AbstractResult -import fs2.Stream -// import es.weso.utils.MyLogging - -object SHACLChecker extends CheckerCats with LazyLogging { - - type Config = RDFReader - type Env = ShapeTyping - type Err = AbstractResult - type Log = List[Evidence] - implicit val logMonoid: Monoid[Log] = new Monoid[Log] { - def combine(l1: Log, l2: Log): Log = l1 ++ l2 - def empty: Log = List() - } - implicit val logShow: Show[Log] = new Show[Log] { - def show(l: Log): String = l.map(_.show).mkString("\n") - } - - private def combineEnv(t1: Env, t2: Env): Env = - Monoid[Env].combine(t1,t2) - - private[validator] def combineResults(x: Result, y: Result): Result = { - val z = combineEnv(x._1,y._1) - (z, x._2 && y._2) - } - - private[validator] def checkAllWithTyping[A](cs: LazyList[A], chk: A => CheckTyping): CheckTyping = for { - t <- getTyping - r <- checkAllFlag(cs, chk, t) - } yield r - - private[validator] def done: CheckTyping = for { - t <- getTyping - } yield (t,true) - - private[validator] def fail(msg: String): CheckTyping = { - logger.debug(s"Failed: $msg") - for { - t <- getTyping - } yield (t,false) - } - - - private[validator] def combineResultSeq(ts: Seq[Result]): CheckTyping = { - val zero: (ShapeTyping,Boolean) = (ShapeTyping.empty, true) - def cmb(x: Result, y: Result): Result = { - (x._1 |+| y._1, x._2 && y._2) - } - ok(ts.foldRight(zero)(cmb)) - } - - private[validator] def combineTypings(ts: Seq[ShapeTyping]): Check[ShapeTyping] = { - ok(ShapeTyping.combineTypings(ts)) - } - - private[validator] def getRDF: Check[RDFReader] = getConfig - - private[validator] def getTyping: Check[ShapeTyping] = getEnv - - private[validator] def addLogMsg(msg: String): Check[Unit] = - addLog(List(MsgEvidence(msg))) - - - private[validator] def runLocalTyping[A](c: Check[A], f: ShapeTyping => ShapeTyping): Check[A] = - local(f)(c) - - private[validator] def checkAllTyping[A](ls: LazyList[A], chk: A => CheckTyping): CheckTyping = for { - t <- getTyping - r <- checkAllFlag(ls, chk, t) - } yield r - - private[validator] def checkSequenceTyping(ls: List[CheckTyping]): CheckTyping = for { - t <- getTyping - r <- checkSequenceFlag(ls,t) - } yield r - - def fromStreamIO[A](ls: Stream[IO,A]): Check[LazyList[A]] = { - fromIO(ls.compile.to(LazyList)) - } - -} +package es.weso.shacl.validator + +import cats._ +import cats.effect.IO +import cats.implicits._ +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf._ +import es.weso.checking.CheckerCats +import es.weso.utils.internal.CollectionCompat._ +import es.weso.shacl.report.AbstractResult +import fs2.Stream +// import es.weso.utils.MyLogging + +object SHACLChecker extends CheckerCats with LazyLogging { + + type Config = RDFReader + type Env = ShapeTyping + type Err = AbstractResult + type Log = List[Evidence] + implicit val logMonoid: Monoid[Log] = new Monoid[Log] { + def combine(l1: Log, l2: Log): Log = l1 ++ l2 + def empty: Log = List() + } + implicit val logShow: Show[Log] = new Show[Log] { + def show(l: Log): String = l.map(_.show).mkString("\n") + } + + private def combineEnv(t1: Env, t2: Env): Env = + Monoid[Env].combine(t1, t2) + + private[validator] def combineResults(x: Result, y: Result): Result = { + val z = combineEnv(x._1, y._1) + (z, x._2 && y._2) + } + + private[validator] def checkAllWithTyping[A](cs: LazyList[A], chk: A => CheckTyping): CheckTyping = for { + t <- getTyping + r <- checkAllFlag(cs, chk, t) + } yield r + + private[validator] def done: CheckTyping = for { + t <- getTyping + } yield (t, true) + + private[validator] def fail(msg: String): CheckTyping = { + logger.debug(s"Failed: $msg") + for { + t <- getTyping + } yield (t, false) + } + + private[validator] def combineResultSeq(ts: Seq[Result]): CheckTyping = { + val zero: (ShapeTyping, Boolean) = (ShapeTyping.empty, true) + def cmb(x: Result, y: Result): Result = { + (x._1 |+| y._1, x._2 && y._2) + } + ok(ts.foldRight(zero)(cmb)) + } + + private[validator] def combineTypings(ts: Seq[ShapeTyping]): Check[ShapeTyping] = { + ok(ShapeTyping.combineTypings(ts)) + } + + private[validator] def getRDF: Check[RDFReader] = getConfig + + private[validator] def getTyping: Check[ShapeTyping] = getEnv + + private[validator] def addLogMsg(msg: String): Check[Unit] = + addLog(List(MsgEvidence(msg))) + + private[validator] def runLocalTyping[A](c: Check[A], f: ShapeTyping => ShapeTyping): Check[A] = + local(f)(c) + + private[validator] def checkAllTyping[A](ls: LazyList[A], chk: A => CheckTyping): CheckTyping = for { + t <- getTyping + r <- checkAllFlag(ls, chk, t) + } yield r + + private[validator] def checkSequenceTyping(ls: List[CheckTyping]): CheckTyping = for { + t <- getTyping + r <- checkSequenceFlag(ls, t) + } yield r + + def fromStreamIO[A](ls: Stream[IO, A]): Check[LazyList[A]] = { + fromIO(ls.compile.to(LazyList)) + } + +} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/validator/ShapeTyping.scala b/modules/shacl/src/main/scala/es/weso/shacl/validator/ShapeTyping.scala index 3d8301c..17eccfc 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/validator/ShapeTyping.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/validator/ShapeTyping.scala @@ -1,87 +1,89 @@ -package es.weso.shacl.validator -import cats._ -import cats.implicits._ -import es.weso.rdf.nodes.RDFNode -import es.weso.shacl.Shape -import es.weso.shacl.report._ -import es.weso.typing._ - -case class ShapeTyping(t: Typing[RDFNode, Shape, AbstractResult, String]) { - - def getNodes: Seq[RDFNode] = t.getKeys.toSeq - - def getMap : Map[RDFNode, scala.collection.Map[Shape, TypingResult[AbstractResult, String]]] = - t.getMap.toMap - - def hasType(node: RDFNode, shape: Shape): Boolean = - t.hasType(node,shape) - - def addType(node: RDFNode, shape: Shape): ShapeTyping = - ShapeTyping(t.addType(node,shape)) - - def addEvidence(node: RDFNode, shape: Shape, msg: String): ShapeTyping = - ShapeTyping(t.addEvidence(node, shape, msg)) - - def addNotEvidence(node: RDFNode, shape: Shape, e: AbstractResult): ShapeTyping = - ShapeTyping(t.addNotEvidence(node,shape,e)) - - def getFailedValues(node: RDFNode): Set[Shape] = - t.getFailedValues(node).toSet - - def getOkValues(node: RDFNode): Set[Shape] = - t.getOkValues(node).toSet - - def toValidationReport: ValidationReport = { - ValidationReport( - conforms = t.allOk, - results = { - val rs: Seq[(RDFNode, Shape, TypingResult[AbstractResult, String])] = - t.getMap.toSeq.map { - case (node,valueMap) => valueMap.toSeq.map { - case (shape, result) => (node, shape, result) - } - }.flatten - rs.map(_._3.getErrors.toList.flatten).flatten - }, - shapesGraphWellFormed = true - ) - } - - override def toString: String = Show[ShapeTyping].show(this) - -} - -object ShapeTyping { - def empty: ShapeTyping = ShapeTyping(Typing.empty) - - def combineTypings(ts: Seq[ShapeTyping]): ShapeTyping = - ShapeTyping(Typing.combineTypings(ts.map(_.t))) - - implicit def showShapeTyping: Show[ShapeTyping] = new Show[ShapeTyping] { - override def show(st: ShapeTyping): String = { - val sb: StringBuilder = new StringBuilder - st.getMap.toList.map{ case (node,shapeMap) => { - shapeMap.toList.map{ case (shape, typingResult) => { - sb.append(s"${node.show}-${shape.showId} = ${showTypingResult(typingResult)}\n") - }} - } - } - sb.toString - } - - private def showTypingResult(tr: TypingResult[AbstractResult, String]): String = - if (tr.isOK) "Valid" - else "Not valid" - } - - implicit def monoidShapeTyping: Monoid[ShapeTyping] = { - new Monoid[ShapeTyping] { - override def empty: ShapeTyping = ShapeTyping.empty - - override def combine(t1: ShapeTyping, t2: ShapeTyping): ShapeTyping = - ShapeTyping(Typing.combineTypings(Seq(t1.t, t2.t))) - } - } - -} - +package es.weso.shacl.validator +import cats._ +import cats.implicits._ +import es.weso.rdf.nodes.RDFNode +import es.weso.shacl.Shape +import es.weso.shacl.report._ +import es.weso.typing._ + +case class ShapeTyping(t: Typing[RDFNode, Shape, AbstractResult, String]) { + + def getNodes: Seq[RDFNode] = t.getKeys.toSeq + + def getMap: Map[RDFNode, scala.collection.Map[Shape, TypingResult[AbstractResult, String]]] = + t.getMap.toMap + + def hasType(node: RDFNode, shape: Shape): Boolean = + t.hasType(node, shape) + + def addType(node: RDFNode, shape: Shape): ShapeTyping = + ShapeTyping(t.addType(node, shape)) + + def addEvidence(node: RDFNode, shape: Shape, msg: String): ShapeTyping = + ShapeTyping(t.addEvidence(node, shape, msg)) + + def addNotEvidence(node: RDFNode, shape: Shape, e: AbstractResult): ShapeTyping = + ShapeTyping(t.addNotEvidence(node, shape, e)) + + def getFailedValues(node: RDFNode): Set[Shape] = + t.getFailedValues(node).toSet + + def getOkValues(node: RDFNode): Set[Shape] = + t.getOkValues(node).toSet + + def toValidationReport: ValidationReport = { + ValidationReport( + conforms = t.allOk, + results = { + val rs: Seq[(RDFNode, Shape, TypingResult[AbstractResult, String])] = + t.getMap.toSeq.map { case (node, valueMap) => + valueMap.toSeq.map { case (shape, result) => + (node, shape, result) + } + }.flatten + rs.map(_._3.getErrors.toList.flatten).flatten + }, + shapesGraphWellFormed = true + ) + } + + override def toString: String = Show[ShapeTyping].show(this) + +} + +object ShapeTyping { + def empty: ShapeTyping = ShapeTyping(Typing.empty) + + def combineTypings(ts: Seq[ShapeTyping]): ShapeTyping = + ShapeTyping(Typing.combineTypings(ts.map(_.t))) + + implicit def showShapeTyping: Show[ShapeTyping] = new Show[ShapeTyping] { + override def show(st: ShapeTyping): String = { + val sb: StringBuilder = new StringBuilder + st.getMap.toList.map { + case (node, shapeMap) => { + shapeMap.toList.map { + case (shape, typingResult) => { + sb.append(s"${node.show}-${shape.showId} = ${showTypingResult(typingResult)}\n") + } + } + } + } + sb.toString + } + + private def showTypingResult(tr: TypingResult[AbstractResult, String]): String = + if (tr.isOK) "Valid" + else "Not valid" + } + + implicit def monoidShapeTyping: Monoid[ShapeTyping] = { + new Monoid[ShapeTyping] { + override def empty: ShapeTyping = ShapeTyping.empty + + override def combine(t1: ShapeTyping, t2: ShapeTyping): ShapeTyping = + ShapeTyping(Typing.combineTypings(Seq(t1.t, t2.t))) + } + } + +} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/validator/Validator.scala b/modules/shacl/src/main/scala/es/weso/shacl/validator/Validator.scala index 1d36b7c..4bcadf9 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/validator/Validator.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/validator/Validator.scala @@ -1,1058 +1,1058 @@ -package es.weso.shacl.validator - -import cats._ -import cats.implicits._ -import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf._ -import es.weso.rdf.nodes._ -import es.weso.rdf.path.{PredicatePath, SHACLPath} -import es.weso.shacl._ -import es.weso.utils._ -import es.weso.shacl.showShacl._ -import SHACLChecker._ -import es.weso.rdf.operations.Comparisons -import es.weso.shacl.report.{AbstractResult, Severity, ValidationResult} -import es.weso.shacl.report.ValidationResult._ -import es.weso.rdf.operations.Comparisons._ -import es.weso.rdf.triples.RDFTriple -import es.weso.utils.internal.CollectionCompat._ -import cats.effect.IO - -/** This validator is implemented directly in Scala using the cats library - */ - -case class Validator(schema: Schema) extends LazyLogging { - - /** Return all targetNode declarations which are pairs (n,s) where

`n` = node to validate

`s` = candidate - * shape - */ - def targetNodes: Seq[(RDFNode, Shape)] = { - schema.targetNodeShapes - } - - def runCheck[A: Show](c: Check[A], rdf: RDFReader): IO[CheckResult[AbstractResult, A, Log]] = { - val initial: ShapeTyping = ShapeTyping.empty - val r = run(c)(rdf)(initial).map(CheckResult(_)) - r - } - - /** Checks if all nodes/shapes are valid in a schema Fails if any of them is not correct - */ - def checkSchemaAll: CheckTyping = { - val shapes = schema.shapes.toList.toLazyList - checkAllTyping(shapes, shapeChecker) - } - - def shapeChecker: ShapeChecker = shape => { - logger.debug(s"Checking shape: ${shape.showId}") - for { - _ <- addLogMsg(s"Checking shape ${shape.showId}") - r <- checkSequenceTyping( - List( - checkTargetNodes(shape.targetNodes)(shape), - checkTargetClasses(shape.targetClasses)(shape), - checkTargetSubjectsOf(shape.targetSubjectsOf)(shape), - checkTargetObjectsOf(shape.targetObjectsOf)(shape) - ) - ) - } yield r - } - - def checkTargetNodes(nodes: Seq[RDFNode]): ShapeChecker = shape => { - logger.debug(s"Target nodes of ${shape.showId} = ${nodes.mkString(",")}") - def chk(n: RDFNode): CheckTyping = nodeShape(n, shape) -// val nodesShapes = nodes.map(n => nodeShape(n, shape)).toStream - for { - // rdf <- getRDF - _ <- addLogMsg(s"Checking targetNode declarations for shape ${shape.showId}. Nodes: ${nodes}") - r <- checkAllTyping(nodes.toLazyList, chk) - } yield { - r - } - } - - def checkTargetClasses(classes: Seq[RDFNode]): ShapeChecker = shape => { - def chk(n: RDFNode): CheckTyping = nodeShape(n, shape) - logger.debug(s"Target classes of ${shape.showId} = ${classes.map(_.show).mkString(",")}") - for { - rdf <- getRDF - nss <- sequence(classes.map(findNodesInClass(_, rdf)).toList) - nodes = nss.flatten - r <- checkAllTyping(nodes.toLazyList, chk) - } yield r - } - - def getTriplesWithPredicate(p: IRI, rdf: RDFReader): Check[LazyList[RDFTriple]] = - fromStreamIO(rdf.triplesWithPredicate(p)) - - def checkTargetSubjectsOf(preds: Seq[IRI]): ShapeChecker = shape => { - def chk(n: RDFNode): CheckTyping = nodeShape(n, shape) - for { - rdf <- getRDF - ts <- sequence(preds.map(getTriplesWithPredicate(_, rdf)).toList) - subjects = ts.flatten.map(_.subj) - r <- checkAllTyping(subjects.toLazyList, chk) - } yield r - } - - private def checkTargetObjectsOf(preds: Seq[IRI]): ShapeChecker = shape => { - def chk(n: RDFNode): CheckTyping = nodeShape(n, shape) - for { - rdf <- getRDF - ts <- sequence(preds.map(getTriplesWithPredicate(_, rdf)).toList) - objects = ts.flatten.map(_.obj) - r <- checkAllTyping(objects.toLazyList, chk) - } yield r - } - - def findNodesInClass(cls: RDFNode, rdf: RDFReader): Check[LazyList[RDFNode]] = - fromStreamIO(rdf.getSHACLInstances(cls)) // .map(_.toList).leftMap(MsgError(_))) - - private def nodeShapeRef(node: RDFNode, shapeRef: RefNode, attempt: Attempt): CheckTyping = for { - rdf <- getRDF - shape <- getShapeRef(shapeRef, attempt, node) - t <- nodeShape(node, shape) - } yield t - - def nodeShape(node: RDFNode, shape: Shape): CheckTyping = shape match { - case ns: NodeShape => nodeNodeShape(node, ns) - case ps: PropertyShape => nodePropertyShape(node, ps) - } - - private def getSeverity(s: Shape): Severity = - s.severity.getOrElse(Severity.defaultSeverity) - - def nodeNodeShape(node: RDFNode, ns: NodeShape): CheckTyping = { - logger.debug(s"Node $node - NodeShape ${ns.showId}") - logger.debug(s"Node shape is deactivated? (${ns.deactivated})") - val attempt = Attempt(node, RefNode(ns.id), ns.message, getSeverity(ns), None) - for { - t0 <- getTyping - t <- runLocal(checkNodeShape(ns)(attempt)(node), _.addType(node, ns)) - } yield { - val r = - if (t._2) t - else - ( - t._1.addNotEvidence( - node, - ns, - shapesFailed( - node, - ns, - Set(), - attempt, - s"$node does not have nodeShape ${ns.showId} because some shapes failed." - ) - ), - false - ) - logger.debug(s"Result of node $node - NodeShape ${ns.showId})\n${showResult(r)}") - r - } - } - - def nodePropertyShape(node: RDFNode, ps: PropertyShape): CheckTyping = { - logger.debug(s"Node $node - PropertyShape ${ps.showId}") - val path = ps.path - val attempt = Attempt(node, RefNode(ps.id), ps.message, getSeverity(ps), Some(path)) - if (ps.deactivated) for { - t <- addEvidence(attempt, s"Property shape ${ps.showId} is deactivated") - } yield (t, true) - else { - val cs = ps.components - val pss = ps.propertyShapes.toList - for { - r1 <- runLocal( - checkAllWithTyping(cs.toLazyList, component2PropertyChecker(ps)(attempt, path)), - _.addType(node, ps) - ) - r2 <- runLocal( - checkAllWithTyping(pss.toLazyList, checkPropertyShapePath(path)(attempt)(node)), - _.addType(node, ps) - ) - } yield { - val r = combineResults(r1, r2) - logger.debug(s"Result of node $node - PropertyShape ${ps.showId}: ${showResult(r)}") - val finalR: Result = if (r._2) { - (r._1.addEvidence(node, ps, s"$node satisfies property shape $ps"), true) - } else { - (r._1.addNotEvidence(node, ps, shapesFailed(node, ps, Set(), attempt, "Property shape failed")), false) - } - logger.debug(s"Result of chechPropertyShape($node,${ps.showId})=${showResult(finalR)}") - finalR - - } - } - } - - private def checkNodeShape(shape: Shape): NodeChecker = attempt => - node => { - logger.debug(s"checkNodeShape($node,${shape.showId})") - if (shape.deactivated) { - logger.debug(s"Node shape is deactivated") - for { - t <- addEvidence(attempt, s"NodeShape ${shape.showId} is deactivated") - } yield (t, true) - } else - for { - r1 <- checkComponents(shape.components.toList)(attempt)(node) - r2 <- checkPropertyShapes(shape.propertyShapes.toList)(attempt)(node) - r = combineResults(r1, r2) - r1 <- - if (shape.closed) for { - predicates <- predicatesInPropertyConstraints(shape, attempt, node) - c <- checkClosed(shape.ignoredProperties, predicates)(attempt)(node) - } yield c - else ok(r) - } yield { - logger.debug(s"Result of checkNodeShape($node,${shape.showId})=\n${r1}") - r1 - } - } - - private def predicatesInPropertyConstraints(shape: Shape, attempt: Attempt, node: RDFNode): Check[List[IRI]] = for { - shapes <- getPropertyShapeRefs(shape.propertyShapes.toList, attempt, node) - } yield shapes.map(_.predicate).collect { case Some(iri) => iri } - - private def checkPropertyShape(attempt: Attempt)(node: RDFNode)(ps: PropertyShape): CheckTyping = { - nodePropertyShape(node, ps) - } - - private def checkPropertyShapePath(path: SHACLPath)(attempt: Attempt)(node: RDFNode)(sref: RefNode): CheckTyping = { - logger.info(s"checkPropertyShapePath $node $sref path: ${path.show}") - for { - ps <- getPropertyShapeRef(sref, attempt, node) - rdf <- getRDF - os <- fromStreamIO(rdf.objectsWithPath(node, path)) - // _ <- debug(s"checkPropertyShapePath: os=$os\nnode: $node, path=${path.show}") - shape <- getShapeRef(sref, attempt, node) - r <- checkAllWithTyping( - os.toLazyList, - (o: RDFNode) => { - val newAttempt = Attempt(o, sref, shape.message, getSeverity(shape), Some(path)) - checkPropertyShape(newAttempt)(o)(ps) - } - ) - } yield r - } - - private def checkPropertyShapes(shapeRefs: List[RefNode]): NodeChecker = attempt => - node => { - logger.debug(s"Check propertyShapes($node, ${shapeRefs.map(_.showId).mkString(",")})") - for { - pss <- getPropertyShapeRefs(shapeRefs, attempt, node) - r <- checkAllWithTyping(pss.toLazyList, checkPropertyShape(attempt)(node)) - } yield { - logger.debug( - s"Result of check propertyShapes($node, ${shapeRefs.map(_.showId).mkString(",")})=${showResult(r)}" - ) - r - } - } - - private def checkComponents(cs: List[Component]): NodeChecker = attempt => - node => { - logger.debug(s"chechComponents($node,...)") - checkAllWithTyping(cs.toLazyList, (c: Component) => checkComponent(c)(attempt)(node)) - } - - private def checkComponent(c: Component): NodeChecker = attempt => - node => { - logger.debug(s"chechComponent($node,${c})") - component2Checker(c)(attempt)(node) - } - - private def component2Checker(c: Component): NodeChecker = attempt => - node => { - logger.debug(s"component2Checker($c") - c match { - case NodeComponent(s) => nodeComponentChecker(s)(attempt)(node) - case Datatype(d) => datatypeChecker(d)(attempt)(node) - case NodeKind(k) => nodeKindChecker(k)(attempt)(node) - case MinExclusive(n) => minExclusive(n)(attempt)(node) - case MaxExclusive(n) => maxExclusive(n)(attempt)(node) - case MinInclusive(n) => minInclusive(n)(attempt)(node) - case MaxInclusive(n) => maxInclusive(n)(attempt)(node) - case MinLength(n) => minLength(n)(attempt)(node) - case MaxLength(n) => maxLength(n)(attempt)(node) - case Pattern(s, flags) => pattern(s, flags)(attempt)(node) - case LanguageIn(langs) => languageIn(langs)(attempt)(node) - case Equals(p) => equals(p)(attempt)(node) - case Disjoint(p) => disjoint(p)(attempt)(node) - case LessThan(p) => lessThan(p)(attempt)(node) - case LessThanOrEquals(p) => lessThanOrEquals(p)(attempt)(node) - case Xone(shapes) => xone(shapes)(attempt)(node) - case And(shapes) => and(shapes)(attempt)(node) - case Or(shapes) => or(shapes)(attempt)(node) - case Not(shape) => not(shape)(attempt)(node) - case ClassComponent(cls) => classComponentChecker(cls)(attempt)(node) - case HasValue(v) => hasValue(v)(attempt)(node) - case In(ls) => inChecker(ls)(attempt)(node) - case _ => unsupportedNodeChecker(s"Node constraint: $c")(attempt)(node) - } - } - - private def checkValues(ls: List[RDFNode], p: RDFNode => CheckTyping): CheckTyping = for { - ts <- checkList(ls, p) - r <- combineResultSeq(ts) - } yield r - - /* private def propertyShape2PropertyChecker(attempt: Attempt, path: SHACLPath) - (psref: RefNode): CheckTyping = { - logger.debug(s"propertyShape2PropertyChecker. path: $path, propertyShape: $psref") - val node = attempt.node - for { - ps <- getPropertyShapeRef(psref, attempt, node) - rdf <- getRDF - os = rdf.objectsWithPath(node, path).toList - shape <- getShapeRef(psref,attempt,node) - _ <- debug(s"propertyShape2PropertyChecker: $os for $path") - check: CheckTyping = checkValues(os, o => { - val newAttempt = Attempt(o, psref, shape.message, getSeverity(shape), Some(path)) - checkPropertyShape(newAttempt)(o)(ps) - } - ) - r <- check - _ <- debug(s"Result of propertyShape2PropertyChecker\n${showResult(r)}") - } yield r - } */ - - private def component2PropertyChecker( - p: PropertyShape - )(attempt: Attempt, path: SHACLPath)(c: Component): CheckTyping = { - logger.debug(s"component2PropertyChecker. propertyShape: $p, path: $path, component: $c") - for { - rdf <- getRDF - node = attempt.node - os <- fromStreamIO(rdf.objectsWithPath(node, path)) - ls = os.toList - check: CheckTyping = c match { - case MinCount(n) => minCount(n, ls, attempt, path) - case MaxCount(n) => maxCount(n, ls, attempt, path) - case UniqueLang(v) => uniqueLang(v, ls, attempt, path) - case QualifiedValueShape(shape, min, max, disjoint) => - qualifiedValueShape(shape, p, min, max, disjoint, ls, attempt, path, node) - case HasValue(v) => { - logger.debug(s"HasValuePropertyChecker(v = $v, ls=$ls, node = $node, path= $path") - hasValuePropertyChecker(v, ls, attempt, node, path) - } - case Equals(p) => - for { - values <- fromStreamIO(rdf.objectsWithPath(node, PredicatePath(p))) - v <- equalsPath(ls, values.toList, p, attempt, path) - } yield v - case _ => checkValues(ls, component2Checker(c)(attempt)(_)) - } - t <- check - } yield t - } - - private def nodeComponentChecker(sref: RefNode): NodeChecker = attempt => - node => { - for { - s <- getShapeRef(sref, attempt, node) - typing <- getTyping - // shape <- getShapeRef(sref, attempt, node) - r <- - if (typing.getOkValues(node).contains(s)) done - else if (typing.getFailedValues(node).contains(s)) fail(s"getFailedValues($node) already contains $s") - else runLocal(nodeShape(node, s), _.addType(node, s)) - t <- - if (r._2) addEvidence(attempt, s"$node has shape ${s.id}") - else - for { - shape <- getShapeRef(attempt.shapeRef, attempt, node) - t1 <- addNotEvidence( - attempt, - errorNode(node, shape, attempt, s"$node does not have shape ${s.id}"), - s"$node does not have shape ${s.id}" - ) - } yield t1 - } yield { - // println(s"NodeComponentChecker: ref $sref, attempt: $attempt") - (t, r._2) - } - } - - private def classComponentChecker(cls: RDFNode): NodeChecker = attempt => - node => { - for { - rdf <- getRDF - b <- fromIO(rdf.hasSHACLClass(node, cls)) - t <- condition(b, attempt, classError(node, cls, attempt), s"$node is in class $cls") - } yield t - } - - private def nodeKindChecker(k: NodeKindType): NodeChecker = attempt => - node => { - logger.debug(s"nodeKindChecker($node,$k)") - k match { - case IRIKind => iriChecker(attempt)(node) - case LiteralKind => literalChecker(attempt)(node) - case BlankNodeKind => blankNodeChecker(attempt)(node) - case BlankNodeOrIRI => blankNodeOrIRIChecker(attempt)(node) - case BlankNodeOrLiteral => blankNodeOrLiteralChecker(attempt)(node) - case IRIOrLiteral => iriOrLiteralChecker(attempt)(node) - } - } - - private def datatypeChecker(d: IRI): NodeChecker = attempt => - node => - for { - rdf <- getRDF - b <- hasDatatype(rdf, node, d).handleErrorWith { err => - addNotEvidence(attempt, err, s"Error checking datatype ${d.show} for node ${node.show}") >> ok(false) - } - t <- condition(b, attempt, datatypeError(node, d, attempt), s"$node has datatype $d") - } yield t - - private def unsupportedNodeChecker(msg: String): NodeChecker = attempt => node => fail(s"Unsupported feature: $msg") - - private def iriChecker: NodeChecker = attempt => - node => { - condition(node.isIRI, attempt, iriKindError(node, attempt), s"$node is an IRI", "iri") - } - - def compare( - control: RDFNode, - comparison: (RDFNode, RDFNode) => Either[String, Boolean], - err: (RDFNode, Attempt, RDFNode) => ValidationResult, - msg: String - ): NodeChecker = attempt => - node => { - val c = comparison(control, node).getOrElse(false) - for { - t <- condition(c, attempt, err(node, attempt, control), s"$node satisfies $msg(${control})") - } yield t - } - - def compareLiterals( - n: Literal, - f: (NumericLiteral, NumericLiteral) => Boolean, - err: (RDFNode, Attempt, RDFNode) => ValidationResult, - msg: String - ): NodeChecker = attempt => - node => - for { - ctrolValue <- checkNumeric(n, attempt) - value <- checkNumeric(node, attempt) - t <- condition(f(ctrolValue, value), attempt, err(node, attempt, n), s"$node satisfies $msg(${n})") - } yield t - - private def minExclusive(n: Literal): NodeChecker = - compare(n, lessThanNodes, minExclusiveError, "minExclusive") - - private def minInclusive(n: Literal): NodeChecker = { - compare(n, lessThanOrEqualsNodes, minInclusiveError, "minInclusive") - } - - private def maxExclusive(n: Literal): NodeChecker = - compare(n, greaterThanNodes, maxExclusiveError, "maxExclusive") - - private def maxInclusive(n: Literal): NodeChecker = - compare(n, greaterThanOrEqualsNodes, maxInclusiveError, "maxInclusive") - - private def minLength(n: Int): NodeChecker = attempt => - node => - condition( - !node.isBNode && node.getLexicalForm.length >= n, - attempt, - minLengthError(node, attempt, n), - s"$node satisfies minLength($n)" - ) - - private def maxLength(n: Int): NodeChecker = attempt => - node => - condition( - !node.isBNode && node.getLexicalForm.length <= n, - attempt, - maxLengthError(node, attempt, n), - s"$node satisfies maxLength($n)" - ) - - private def pattern(p: String, flags: Option[String]): NodeChecker = attempt => - node => - for { - b <- regexMatch(p, flags, node.getLexicalForm, node, attempt) - t <- condition( - !node.isBNode && b, - attempt, - patternError(node, attempt, p, flags), - s"$node satisfies pattern ~/$p/${flags.getOrElse("")}" - ) - } yield t - - private def regexMatch( - p: String, - flags: Option[String], - str: String, - node: RDFNode, - attempt: Attempt - ): Check[Boolean] = - RegEx(p, flags).matches(str) match { - case Left(msg) => err(regexError(node, attempt, msg)) - case Right(b) => ok(b) - } - - private def uniqueLang(b: Boolean, os: Seq[RDFNode], attempt: Attempt, path: SHACLPath): CheckTyping = if (b) { - val node = attempt.node - for { - t <- condition( - checkUniqueLang(os), - attempt, - uniqueLangError(node, attempt, path, os), - s"Checked uniqueLang(true) for path $path on node $node" - ) - } yield t - } else done - - private def checkUniqueLang(os: Seq[RDFNode]): Boolean = { - def getLanguageTag(n: RDFNode): Option[String] = { - n match { - case LangLiteral(_, l) => Some(l.lang) - case _ => None - } - } - val langs: Seq[String] = os.map(getLanguageTag).flatten - - // If there are duplicated langs, the following condition fails - langs.distinct.size == langs.size - } - - private def languageIn(langs: List[String]): NodeChecker = attempt => - node => - for { - t <- condition( - checkLangIn(node, langs), - attempt, - languageInError(node, attempt, langs), - s"$node satisfies languageIn(${langs.mkString(",")})" - ) - } yield t - - private def checkLangIn(node: RDFNode, langs: List[String]): Boolean = { - node match { - case LangLiteral(_, l) => langs.contains(l.lang) - case _ => false - } - } - - private def lessThanOrEqualsNodes(n1: RDFNode, n2: RDFNode): Either[String, Boolean] = - for { - c1 <- n1.lessThan(n2) - c2 <- n1.isEqualTo(n2) - } yield c1 || c2 - - private def lessThanNodes(n1: RDFNode, n2: RDFNode): Either[String, Boolean] = - n1.lessThan(n2) - - private def greaterThanNodes(n1: RDFNode, n2: RDFNode): Either[String, Boolean] = - n2.lessThan(n1) - - private def greaterThanOrEqualsNodes(n1: RDFNode, n2: RDFNode): Either[String, Boolean] = - for { - c1 <- n2.lessThan(n1) - c2 <- n2.isEqualTo(n1) - } yield c1 || c2 - - def equals(p: IRI): NodeChecker = - comparison(p, "equals", equalsError, equalsNode) - def disjoint(p: IRI): NodeChecker = - comparison(p, "disjoint", disjointError, disjointNode) - def lessThan(p: IRI): NodeChecker = - comparison(p, "lessThan", lessThanError, lessThanNode) - def lessThanOrEquals(p: IRI): NodeChecker = - comparison(p, "lessThanOrEquals", lessThanOrEqualsError, lessThanOrEqualNode) - - // TODO: Maybe add a check to see if the nodes are comparable - // With current definition, if nodes are not comparable, always returns false without raising any error... - private def comparison( - p: IRI, - name: String, - errorMaker: (RDFNode, Attempt, IRI, Set[RDFNode]) => ValidationResult, - cond: (RDFNode, RDFNode) => Boolean - ): NodeChecker = - attempt => - node => { - logger.debug(s"Comparison on node $node") - for { - rdf <- getRDF - subject = attempt.node - vs <- fromStreamIO(rdf.triplesWithSubjectPredicate(subject, p)) - os = vs.map(_.obj) - t <- { - logger.debug(s"Values: $vs") - if (os.isEmpty) for { - t1 <- addNotEvidence( - attempt, - errorMaker(node, attempt, p, os.toSet), - s"No values for node $subject with predicate $p" - ) - } yield (t1, false) - else - condition( - os.forall(cond(node, _)), - attempt, - errorMaker(node, attempt, p, os.toSet), - s"$node satisfies $name $p with values ${os})" - ) - } - } yield t - } - - private def and(srefs: Seq[RefNode]): NodeChecker = attempt => - node => { - for { - shapes <- getShapeRefs(srefs.toList, attempt, node) - r <- checkAllWithTyping(shapes.toLazyList, (s: Shape) => nodeShape(node, s)) - } yield r - } - - /* private def checkAnd(node: RDFNode, shapes: List[Shape], t: ShapeTyping) : Boolean = { - t.getFailedValues(node).isEmpty - } */ - - private def xone(sRefs: Seq[RefNode]): NodeChecker = attempt => - node => { - for { - t <- getTyping - // shapes <- getShapeRefs(sRefs.toList, attempt, node) - r <- checkSomeFlagCount(sRefs.toLazyList, (s: RefNode) => nodeShapeRef(node, s, attempt), t) - count = r._2 - t1 <- condition(count == 1, attempt, xoneError(node, attempt, sRefs), s"$node satisfies exactly one of $sRefs") - } yield t1 - } - - def checkXoneType(node: RDFNode, shapes: List[Shape], t: ShapeTyping): Boolean = { - shapes.map(t.hasType(node, _)).count(_ == true) == 1 - } - - private def qualifiedValueShape( - shape: RefNode, - p: PropertyShape, - min: Option[Int], - max: Option[Int], - maybeDisjoint: Option[Boolean], - values: Seq[RDFNode], - attempt: Attempt, - path: SHACLPath, - node: RDFNode - ): CheckTyping = { - // println(s"qualifiedValueShape: $shape, $p, $min, $max") - val disjoint = maybeDisjoint.getOrElse(false) - for { - s <- getShapeRef(shape, attempt, node) - // _ <- IO(println(s"checking qualifiedValueShape: $s, $shape, $p, $min, $max, $disjoint")) - typing <- getTyping - vs <- - if (disjoint) filterConformSiblings(values, p, attempt) - else ok(values) - t <- - if (typing.getOkValues(node).contains(s)) done - else if (typing.getFailedValues(node).contains(s)) fail(s"getFailedValues($node) already contains $s") - else - for { - r <- checkSomeFlagCount(vs.toLazyList, (n: RDFNode) => nodeShapeRef(n, shape, attempt), typing) - value = r._2 - t <- condition( - between(value, min, max), - attempt, - qualifiedShapeError(attempt.node, attempt, value, min, max), - s"qualifiedValueShape value = ${value}, min=${min.map(_.toString).getOrElse("-")}, max=${max.map(_.toString).getOrElse("-")}" - ) - } yield t - } yield { - logger.debug(s"qualifiedValueShape(attempt: ${attempt},${shape.showId}): t=\n${showResult(t)}") - t - } - } - - private def filterConformSiblings(values: Seq[RDFNode], p: PropertyShape, attempt: Attempt): Check[Seq[RDFNode]] = { - val shapes = schema.siblingQualifiedShapes(RefNode(p.id)) - filterConformShapes(values, shapes, attempt) - } - - private def filterConformShapes(values: Seq[RDFNode], shapes: Seq[RefNode], attempt: Attempt): Check[Seq[RDFNode]] = { - logger.debug(s"FilterConformShapes(values=$values, shapes=$shapes)") - def checkValuesShapes: Check[List[(RDFNode, Boolean)]] = { - sequence(values.toList.map(value => conformsNodeShapes(value, shapes, attempt))) - } - for { - cs <- checkValuesShapes - rs = cs.collect { case (n, false) => n } - } yield { - logger.debug(s"Result of FilterConformShapes($values,$shapes,$attempt) = $rs") - rs.toSeq - } - } - - private def conformsNodeShapes(node: RDFNode, shapes: Seq[RefNode], attempt: Attempt): Check[(RDFNode, Boolean)] = - for { - ls <- checkLs(shapes.toList.map(nodeShapeRef(node, _, attempt))) - } yield (node, !ls.isEmpty) - - def between(v: Int, maybeMin: Option[Int], maybeMax: Option[Int]): Boolean = (maybeMin, maybeMax) match { - case (None, None) => true - case (Some(min), None) => v >= min - case (None, Some(max)) => v <= max - case (Some(min), Some(max)) => v >= min && v <= max - } - - private def or(sRefs: Seq[RefNode]): NodeChecker = attempt => - node => { - val last: CheckTyping = fail(s"None of the components of or pass") - def fn(sref: RefNode): CheckTyping = nodeShapeRef(node, sref, attempt) - checkSomeFlag(sRefs.toLazyList, fn, last) - } - - private def not(sref: RefNode): NodeChecker = attempt => - node => { - for { - shape <- getShapeRef(sref, attempt, node) - typing <- getTyping - t <- { - logger.debug(s"\nTesting not nodeShape($node,${shape.showId}) with typing\n${typing}") - nodeShape(node, shape) - } - t1 <- { - logger.debug(s"\nnot($sref). Value of nodeShape($node,${shape.showId})=\n$t") - condition( - !t._1.hasType(node, shape), - attempt, - notShapeError(node, sref, attempt), - s"$node does not have shape $sref" - ) - } - } yield t1 - } - - private def checkNumeric(node: RDFNode, attempt: Attempt): Check[NumericLiteral] = - numericValue(node).fold(e => err(notNumeric(node, attempt)), value => ok(value)) - - private def literalChecker: NodeChecker = attempt => - node => { - condition(node.isLiteral, attempt, literalKindError(node, attempt), s"$node is a Literal") - } - - private def blankNodeChecker: NodeChecker = nodeShape => - node => { - condition(node.isBNode, nodeShape, bNodeKindError(node, nodeShape), s"$node is a Blank Node") - } - - private def blankNodeOrIRIChecker: NodeChecker = nodeShape => - node => { - condition( - node.isBNode || node.isIRI, - nodeShape, - bNodeOrIRIKindError(node, nodeShape), - s"$node is a Blank Node or an IRI" - ) - } - - private def blankNodeOrLiteralChecker: NodeChecker = attempt => - node => { - condition( - node.isBNode || node.isLiteral, - attempt, - bNodeOrLiteralKindError(node, attempt), - s"$node is a Blank Node or Literal" - ) - } - - private def iriOrLiteralChecker: NodeChecker = attempt => - node => { - condition( - node.isIRI || node.isLiteral, - attempt, - iriOrLiteralKindError(node, attempt), - s"$node is a IRI or Literal" - ) - } - - private def hasValuePropertyChecker( - v: Value, - os: List[RDFNode], - attempt: Attempt, - node: RDFNode, - path: SHACLPath - ): CheckTyping = for { - t <- getTyping - newT <- os.size match { - case 0 => - for { - t1 <- addNotEvidence( - attempt, - hasValueErrorNoValue(node, attempt, v, path), - s"HasValue($v) failed. $node has not value" - ) - } yield (t1, false) - case 1 => hasValue(v)(attempt)(os.head) - case n => - for { - t1 <- addNotEvidence( - attempt, - hasValueErrorMoreThanOne(node, attempt, v, path, n), - s"HasValue($v) failed. $node has more $n values" - ) - } yield (t1, false) - } - } yield newT - - private def hasValue(value: Value): NodeChecker = attempt => - currentNode => { - condition( - isValue(currentNode, value), - attempt, - hasValueError(currentNode, attempt, value), - s"Checked $currentNode sh:hasValue $value" - ) - } - - private def inChecker(values: Seq[Value]): NodeChecker = attempt => - currentNode => { - condition( - inValues(currentNode, values), - attempt, - inError(currentNode, attempt, values), - s"Checked $currentNode sh:in $values" - ) - } - - private def minCount(minCount: Int, os: Seq[RDFNode], attempt: Attempt, path: SHACLPath): CheckTyping = { - logger.debug(s"minCount $minCount, os: $os, attempt: $attempt, path: $path") - val count = os.size - val node = attempt.node - condition( - count >= minCount, - attempt, - minCountError(node, attempt, minCount, os.size), - s"Checked minCount($minCount) for path($path) on node $node" - ) - } - - private def maxCount(maxCount: Int, os: Seq[RDFNode], attempt: Attempt, path: SHACLPath): CheckTyping = { - val count = os.size - val node = attempt.node - condition( - count <= maxCount, - attempt, - maxCountError(node, attempt, maxCount, count), - s"Checked maxCount($maxCount) for path($path) on node $node" - ) - } - - private def equalsPath( - os: List[RDFNode], - values: List[RDFNode], - equalsIri: IRI, - attempt: Attempt, - path: SHACLPath - ): CheckTyping = { - logger.debug(s"equalsPath $equalsIri, os: $os, values: $values, attempt: $attempt, path: $path") - Comparisons.different(os.toList, values.toList) match { - case Left(msg) => - for { - t <- addNotEvidence( - attempt, - equalsError(attempt.node, attempt, equalsIri, Set()), - s"node ${attempt.node} fails equals condition. Error: $msg" - ) - } yield (t, false) - case Right(List()) => - for { - t <- addEvidence( - attempt, - s"equals(${equalsIri.show}. nodes ${os.show} pass equals condition with values ${values.show}" - ) - } yield (t, true) - - case Right(ls) => - checkAllWithTyping( - ls.toLazyList, - (n: RDFNode) => - for { - t <- addNotEvidence( - attempt, - equalsError(n, attempt, equalsIri, Set()), - s"node $n fails equals condition. " - ) - } yield (t, false) - ) - } - } - - private def checkClosed(ignoredProperties: List[IRI], allowedProperties: List[IRI]): NodeChecker = attempt => - node => { - logger.debug(s"checkClosed(ignored=$ignoredProperties, allowed=$allowedProperties") - for { - rdf <- getRDF - neighbours <- fromStreamIO(rdf.triplesWithSubject(node)) - predicates = neighbours.map(_.pred).toList - notAllowed = predicates.diff(ignoredProperties).diff(allowedProperties) - t <- { - condition( - notAllowed.isEmpty, - attempt, - closedError(node, attempt, allowedProperties, ignoredProperties, notAllowed), - s"Passes closed condition with predicates $predicates and ignoredProperties $ignoredProperties" - ) - } - } yield t - } - - private def getShapeRefs(sRefs: List[RefNode], attempt: Attempt, node: RDFNode): Check[List[Shape]] = - sequence(sRefs.map(getShapeRef(_, attempt, node))) - - private def getPropertyShapeRefs(srefs: List[RefNode], attempt: Attempt, node: RDFNode): Check[List[PropertyShape]] = - sequence(srefs.map(getPropertyShapeRef(_, attempt, node))) - - private def getPropertyShapeRef(sref: RefNode, attempt: Attempt, node: RDFNode): Check[PropertyShape] = for { - shape <- getShapeRef(sref, attempt, node) - ps <- shape2PropertyShape(shape, attempt, node) - } yield ps - - private def shape2PropertyShape(shape: Shape, attempt: Attempt, node: RDFNode): Check[PropertyShape] = shape match { - case ps: PropertyShape => ok(ps) - case _ => err(expectedPropertyShape(node, attempt, s"Expected shape $shape to be a property shape")) - } - - private def addEvidence(attempt: Attempt, msg: String): Check[ShapeTyping] = { - for { - t <- getTyping - shape <- getShapeRef(attempt.shapeRef, attempt, attempt.node) - _ <- addLog(List(NodeShapeEvidence(attempt.node, attempt.shapeRef, msg))) - } yield t.addEvidence(attempt.node, shape, msg) - } - - private def addNotEvidence(attempt: Attempt, e: AbstractResult, msg: String): Check[ShapeTyping] = { - val node = attempt.node - val sref = attempt.shapeRef - for { - t <- getTyping - shape <- getShapeRef(sref, attempt, node) - _ <- addLog(List(NodeShapeEvidence(attempt.node, sref, msg))) - } yield { - t.addNotEvidence(node, shape, e) - } - } - - private def getShapeRef(sref: RefNode, attempt: Attempt, node: RDFNode): Check[Shape] = - schema.shapesMap.get(sref) match { - case Some(shape) => ok(shape) - case None => - err( - notFoundShapeRef( - node, - attempt, - s"Shape ${sref.showId} not found in schema. Available srefs: ${schema.shapesMap.keys.map(_.showId).mkString(",")}" - ) - ) - } - - def validateAll(rdf: RDFReader): IO[CheckResult[AbstractResult, (ShapeTyping, Boolean), Log]] = { - runCheck(checkSchemaAll, rdf) - } - - def showResult(t: (ShapeTyping, Boolean)): String = - t.show - - //////////////////////////////////////////// - - /** if condition is true adds an evidence, otherwise, adds a not typing with the Violation error as evidence - * @param condition - * condition to check - * @param attempt - * current validation attempt that is being tried - * @param error - * error to raise in case `condition` is false - * @param evidence - * evidence to add to `attempt` in case `condition` is true - */ - private[validator] def condition( - condition: Boolean, - attempt: Attempt, - error: AbstractResult, - evidence: String, - conditionName: String = "" - ): CheckTyping = { - logger.debug(s"condition($conditionName,...)") - for { - t <- getTyping - r <- condFlag( - validateCheck(condition, error), - (_: Unit) => addEvidence(attempt, evidence), - err => addNotEvidence(attempt, err, "Condition failed") - ) - } yield { - logger.debug(s"result of condition: $r") - r - } - } - - // TODO: Refactor the following code... - // move to SRDF and check SPARQL compatibility - // SPARQL comparison opetators: https://www.w3.org/TR/sparql11-query/#OperatorMapping - private def equalsNode(n1: RDFNode, n2: RDFNode): Boolean = (n1, n2) match { - case (l1: Literal, l2: Literal) => l1 == l2 - case (i1: IRI, i2: IRI) => i1 == i2 - case (b1: BNode, b2: BNode) => b1 == b2 - case (_, _) => false - } - - private def disjointNode(n1: RDFNode, n2: RDFNode): Boolean = n1 != n2 - private def lessThanNode(n1: RDFNode, n2: RDFNode): Boolean = (n1, n2) match { - case (IntegerLiteral(n1, _), IntegerLiteral(n2, _)) => n1 < n2 - case (DecimalLiteral(n1, _), DecimalLiteral(n2, _)) => n1 < n2 - case (DoubleLiteral(n1, _), DoubleLiteral(n2, _)) => n1 < n2 - case (StringLiteral(n1), StringLiteral(n2)) => n1 < n2 - case (DatatypeLiteral(n1, d1), DatatypeLiteral(n2, d2)) => d1 == d2 && n1 < n2 - case (LangLiteral(n1, l1), LangLiteral(n2, l2)) => n1 < n2 - case (i1: IRI, i2: IRI) => i1.str < i2.str - case (b1: BNode, b2: BNode) => b1.id < b2.id - case (_, _) => false - } - private def lessThanOrEqualNode(n1: RDFNode, n2: RDFNode): Boolean = (n1, n2) match { - case (IntegerLiteral(n1, _), IntegerLiteral(n2, _)) => n1 <= n2 - case (DecimalLiteral(n1, _), DecimalLiteral(n2, _)) => n1 <= n2 - case (DoubleLiteral(n1, _), DoubleLiteral(n2, _)) => n1 <= n2 - case (StringLiteral(n1), StringLiteral(n2)) => n1 <= n2 - case (DatatypeLiteral(n1, d1), DatatypeLiteral(n2, d2)) => d1 == d2 && n1 <= n2 - case (LangLiteral(n1, l1), LangLiteral(n2, l2)) => n1 <= n2 - case (i1: IRI, i2: IRI) => i1.str <= i2.str - case (b1: BNode, b2: BNode) => b1.id <= b2.id - case (_, _) => false - } - - /** Checks that `node` is one of `values` - */ - private def inValues(node: RDFNode, values: Seq[Value]): Boolean = { - values.exists(_.matchNode(node)) - } - - private def isValue(node: RDFNode, value: Value): Boolean = { - value.matchNode(node) - } - - private def hasDatatype(rdf: RDFReader, node: RDFNode, d: IRI): Check[Boolean] = { - for { - eitherBoolean <- fromIO(rdf.checkDatatype(node, d).attempt) - b <- eitherBoolean.fold(_ => ok(false), ok(_)) - } yield b - } - - private[validator] def debug(msg: String): Check[Unit] = { - logger.debug(msg) - ok(()) - } - -} - -object Validator { - def empty = Validator(schema = Schema.empty) - - def validate(schema: Schema, rdf: RDFReader): IO[Either[AbstractResult, (ShapeTyping, Boolean)]] = { - Validator(schema).validateAll(rdf).map(_.result) - } - -} +package es.weso.shacl.validator + +import cats._ +import cats.implicits._ +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf._ +import es.weso.rdf.nodes._ +import es.weso.rdf.path.{PredicatePath, SHACLPath} +import es.weso.shacl._ +import es.weso.utils._ +import es.weso.shacl.showShacl._ +import SHACLChecker._ +import es.weso.rdf.operations.Comparisons +import es.weso.shacl.report.{AbstractResult, Severity, ValidationResult} +import es.weso.shacl.report.ValidationResult._ +import es.weso.rdf.operations.Comparisons._ +import es.weso.rdf.triples.RDFTriple +import es.weso.utils.internal.CollectionCompat._ +import cats.effect.IO + +/** This validator is implemented directly in Scala using the cats library + */ + +case class Validator(schema: Schema) extends LazyLogging { + + /** Return all targetNode declarations which are pairs (n,s) where

`n` = node to validate

`s` = candidate + * shape + */ + def targetNodes: Seq[(RDFNode, Shape)] = { + schema.targetNodeShapes + } + + def runCheck[A: Show](c: Check[A], rdf: RDFReader): IO[CheckResult[AbstractResult, A, Log]] = { + val initial: ShapeTyping = ShapeTyping.empty + val r = run(c)(rdf)(initial).map(CheckResult(_)) + r + } + + /** Checks if all nodes/shapes are valid in a schema Fails if any of them is not correct + */ + def checkSchemaAll: CheckTyping = { + val shapes = schema.shapes.toList.toLazyList + checkAllTyping(shapes, shapeChecker) + } + + def shapeChecker: ShapeChecker = shape => { + logger.debug(s"Checking shape: ${shape.showId}") + for { + _ <- addLogMsg(s"Checking shape ${shape.showId}") + r <- checkSequenceTyping( + List( + checkTargetNodes(shape.targetNodes)(shape), + checkTargetClasses(shape.targetClasses)(shape), + checkTargetSubjectsOf(shape.targetSubjectsOf)(shape), + checkTargetObjectsOf(shape.targetObjectsOf)(shape) + ) + ) + } yield r + } + + def checkTargetNodes(nodes: Seq[RDFNode]): ShapeChecker = shape => { + logger.debug(s"Target nodes of ${shape.showId} = ${nodes.mkString(",")}") + def chk(n: RDFNode): CheckTyping = nodeShape(n, shape) +// val nodesShapes = nodes.map(n => nodeShape(n, shape)).toStream + for { + // rdf <- getRDF + _ <- addLogMsg(s"Checking targetNode declarations for shape ${shape.showId}. Nodes: ${nodes}") + r <- checkAllTyping(nodes.toLazyList, chk) + } yield { + r + } + } + + def checkTargetClasses(classes: Seq[RDFNode]): ShapeChecker = shape => { + def chk(n: RDFNode): CheckTyping = nodeShape(n, shape) + logger.debug(s"Target classes of ${shape.showId} = ${classes.map(_.show).mkString(",")}") + for { + rdf <- getRDF + nss <- sequence(classes.map(findNodesInClass(_, rdf)).toList) + nodes = nss.flatten + r <- checkAllTyping(nodes.toLazyList, chk) + } yield r + } + + def getTriplesWithPredicate(p: IRI, rdf: RDFReader): Check[LazyList[RDFTriple]] = + fromStreamIO(rdf.triplesWithPredicate(p)) + + def checkTargetSubjectsOf(preds: Seq[IRI]): ShapeChecker = shape => { + def chk(n: RDFNode): CheckTyping = nodeShape(n, shape) + for { + rdf <- getRDF + ts <- sequence(preds.map(getTriplesWithPredicate(_, rdf)).toList) + subjects = ts.flatten.map(_.subj) + r <- checkAllTyping(subjects.toLazyList, chk) + } yield r + } + + private def checkTargetObjectsOf(preds: Seq[IRI]): ShapeChecker = shape => { + def chk(n: RDFNode): CheckTyping = nodeShape(n, shape) + for { + rdf <- getRDF + ts <- sequence(preds.map(getTriplesWithPredicate(_, rdf)).toList) + objects = ts.flatten.map(_.obj) + r <- checkAllTyping(objects.toLazyList, chk) + } yield r + } + + def findNodesInClass(cls: RDFNode, rdf: RDFReader): Check[LazyList[RDFNode]] = + fromStreamIO(rdf.getSHACLInstances(cls)) // .map(_.toList).leftMap(MsgError(_))) + + private def nodeShapeRef(node: RDFNode, shapeRef: RefNode, attempt: Attempt): CheckTyping = for { + rdf <- getRDF + shape <- getShapeRef(shapeRef, attempt, node) + t <- nodeShape(node, shape) + } yield t + + def nodeShape(node: RDFNode, shape: Shape): CheckTyping = shape match { + case ns: NodeShape => nodeNodeShape(node, ns) + case ps: PropertyShape => nodePropertyShape(node, ps) + } + + private def getSeverity(s: Shape): Severity = + s.severity.getOrElse(Severity.defaultSeverity) + + def nodeNodeShape(node: RDFNode, ns: NodeShape): CheckTyping = { + logger.debug(s"Node $node - NodeShape ${ns.showId}") + logger.debug(s"Node shape is deactivated? (${ns.deactivated})") + val attempt = Attempt(node, RefNode(ns.id), ns.message, getSeverity(ns), None) + for { + t0 <- getTyping + t <- runLocal(checkNodeShape(ns)(attempt)(node), _.addType(node, ns)) + } yield { + val r = + if (t._2) t + else + ( + t._1.addNotEvidence( + node, + ns, + shapesFailed( + node, + ns, + Set(), + attempt, + s"$node does not have nodeShape ${ns.showId} because some shapes failed." + ) + ), + false + ) + logger.debug(s"Result of node $node - NodeShape ${ns.showId})\n${showResult(r)}") + r + } + } + + def nodePropertyShape(node: RDFNode, ps: PropertyShape): CheckTyping = { + logger.debug(s"Node $node - PropertyShape ${ps.showId}") + val path = ps.path + val attempt = Attempt(node, RefNode(ps.id), ps.message, getSeverity(ps), Some(path)) + if (ps.deactivated) for { + t <- addEvidence(attempt, s"Property shape ${ps.showId} is deactivated") + } yield (t, true) + else { + val cs = ps.components + val pss = ps.propertyShapes.toList + for { + r1 <- runLocal( + checkAllWithTyping(cs.toLazyList, component2PropertyChecker(ps)(attempt, path)), + _.addType(node, ps) + ) + r2 <- runLocal( + checkAllWithTyping(pss.toLazyList, checkPropertyShapePath(path)(attempt)(node)), + _.addType(node, ps) + ) + } yield { + val r = combineResults(r1, r2) + logger.debug(s"Result of node $node - PropertyShape ${ps.showId}: ${showResult(r)}") + val finalR: Result = if (r._2) { + (r._1.addEvidence(node, ps, s"$node satisfies property shape $ps"), true) + } else { + (r._1.addNotEvidence(node, ps, shapesFailed(node, ps, Set(), attempt, "Property shape failed")), false) + } + logger.debug(s"Result of chechPropertyShape($node,${ps.showId})=${showResult(finalR)}") + finalR + + } + } + } + + private def checkNodeShape(shape: Shape): NodeChecker = attempt => + node => { + logger.debug(s"checkNodeShape($node,${shape.showId})") + if (shape.deactivated) { + logger.debug(s"Node shape is deactivated") + for { + t <- addEvidence(attempt, s"NodeShape ${shape.showId} is deactivated") + } yield (t, true) + } else + for { + r1 <- checkComponents(shape.components.toList)(attempt)(node) + r2 <- checkPropertyShapes(shape.propertyShapes.toList)(attempt)(node) + r = combineResults(r1, r2) + r1 <- + if (shape.closed) for { + predicates <- predicatesInPropertyConstraints(shape, attempt, node) + c <- checkClosed(shape.ignoredProperties, predicates)(attempt)(node) + } yield c + else ok(r) + } yield { + logger.debug(s"Result of checkNodeShape($node,${shape.showId})=\n${r1}") + r1 + } + } + + private def predicatesInPropertyConstraints(shape: Shape, attempt: Attempt, node: RDFNode): Check[List[IRI]] = for { + shapes <- getPropertyShapeRefs(shape.propertyShapes.toList, attempt, node) + } yield shapes.map(_.predicate).collect { case Some(iri) => iri } + + private def checkPropertyShape(attempt: Attempt)(node: RDFNode)(ps: PropertyShape): CheckTyping = { + nodePropertyShape(node, ps) + } + + private def checkPropertyShapePath(path: SHACLPath)(attempt: Attempt)(node: RDFNode)(sref: RefNode): CheckTyping = { + logger.info(s"checkPropertyShapePath $node $sref path: ${path.show}") + for { + ps <- getPropertyShapeRef(sref, attempt, node) + rdf <- getRDF + os <- fromStreamIO(rdf.objectsWithPath(node, path)) + // _ <- debug(s"checkPropertyShapePath: os=$os\nnode: $node, path=${path.show}") + shape <- getShapeRef(sref, attempt, node) + r <- checkAllWithTyping( + os.toLazyList, + (o: RDFNode) => { + val newAttempt = Attempt(o, sref, shape.message, getSeverity(shape), Some(path)) + checkPropertyShape(newAttempt)(o)(ps) + } + ) + } yield r + } + + private def checkPropertyShapes(shapeRefs: List[RefNode]): NodeChecker = attempt => + node => { + logger.debug(s"Check propertyShapes($node, ${shapeRefs.map(_.showId).mkString(",")})") + for { + pss <- getPropertyShapeRefs(shapeRefs, attempt, node) + r <- checkAllWithTyping(pss.toLazyList, checkPropertyShape(attempt)(node)) + } yield { + logger.debug( + s"Result of check propertyShapes($node, ${shapeRefs.map(_.showId).mkString(",")})=${showResult(r)}" + ) + r + } + } + + private def checkComponents(cs: List[Component]): NodeChecker = attempt => + node => { + logger.debug(s"chechComponents($node,...)") + checkAllWithTyping(cs.toLazyList, (c: Component) => checkComponent(c)(attempt)(node)) + } + + private def checkComponent(c: Component): NodeChecker = attempt => + node => { + logger.debug(s"chechComponent($node,${c})") + component2Checker(c)(attempt)(node) + } + + private def component2Checker(c: Component): NodeChecker = attempt => + node => { + logger.debug(s"component2Checker($c") + c match { + case NodeComponent(s) => nodeComponentChecker(s)(attempt)(node) + case Datatype(d) => datatypeChecker(d)(attempt)(node) + case NodeKind(k) => nodeKindChecker(k)(attempt)(node) + case MinExclusive(n) => minExclusive(n)(attempt)(node) + case MaxExclusive(n) => maxExclusive(n)(attempt)(node) + case MinInclusive(n) => minInclusive(n)(attempt)(node) + case MaxInclusive(n) => maxInclusive(n)(attempt)(node) + case MinLength(n) => minLength(n)(attempt)(node) + case MaxLength(n) => maxLength(n)(attempt)(node) + case Pattern(s, flags) => pattern(s, flags)(attempt)(node) + case LanguageIn(langs) => languageIn(langs)(attempt)(node) + case Equals(p) => equals(p)(attempt)(node) + case Disjoint(p) => disjoint(p)(attempt)(node) + case LessThan(p) => lessThan(p)(attempt)(node) + case LessThanOrEquals(p) => lessThanOrEquals(p)(attempt)(node) + case Xone(shapes) => xone(shapes)(attempt)(node) + case And(shapes) => and(shapes)(attempt)(node) + case Or(shapes) => or(shapes)(attempt)(node) + case Not(shape) => not(shape)(attempt)(node) + case ClassComponent(cls) => classComponentChecker(cls)(attempt)(node) + case HasValue(v) => hasValue(v)(attempt)(node) + case In(ls) => inChecker(ls)(attempt)(node) + case _ => unsupportedNodeChecker(s"Node constraint: $c")(attempt)(node) + } + } + + private def checkValues(ls: List[RDFNode], p: RDFNode => CheckTyping): CheckTyping = for { + ts <- checkList(ls, p) + r <- combineResultSeq(ts) + } yield r + + /* private def propertyShape2PropertyChecker(attempt: Attempt, path: SHACLPath) + (psref: RefNode): CheckTyping = { + logger.debug(s"propertyShape2PropertyChecker. path: $path, propertyShape: $psref") + val node = attempt.node + for { + ps <- getPropertyShapeRef(psref, attempt, node) + rdf <- getRDF + os = rdf.objectsWithPath(node, path).toList + shape <- getShapeRef(psref,attempt,node) + _ <- debug(s"propertyShape2PropertyChecker: $os for $path") + check: CheckTyping = checkValues(os, o => { + val newAttempt = Attempt(o, psref, shape.message, getSeverity(shape), Some(path)) + checkPropertyShape(newAttempt)(o)(ps) + } + ) + r <- check + _ <- debug(s"Result of propertyShape2PropertyChecker\n${showResult(r)}") + } yield r + } */ + + private def component2PropertyChecker( + p: PropertyShape + )(attempt: Attempt, path: SHACLPath)(c: Component): CheckTyping = { + logger.debug(s"component2PropertyChecker. propertyShape: $p, path: $path, component: $c") + for { + rdf <- getRDF + node = attempt.node + os <- fromStreamIO(rdf.objectsWithPath(node, path)) + ls = os.toList + check: CheckTyping = c match { + case MinCount(n) => minCount(n, ls, attempt, path) + case MaxCount(n) => maxCount(n, ls, attempt, path) + case UniqueLang(v) => uniqueLang(v, ls, attempt, path) + case QualifiedValueShape(shape, min, max, disjoint) => + qualifiedValueShape(shape, p, min, max, disjoint, ls, attempt, path, node) + case HasValue(v) => { + logger.debug(s"HasValuePropertyChecker(v = $v, ls=$ls, node = $node, path= $path") + hasValuePropertyChecker(v, ls, attempt, node, path) + } + case Equals(p) => + for { + values <- fromStreamIO(rdf.objectsWithPath(node, PredicatePath(p))) + v <- equalsPath(ls, values.toList, p, attempt, path) + } yield v + case _ => checkValues(ls, component2Checker(c)(attempt)(_)) + } + t <- check + } yield t + } + + private def nodeComponentChecker(sref: RefNode): NodeChecker = attempt => + node => { + for { + s <- getShapeRef(sref, attempt, node) + typing <- getTyping + // shape <- getShapeRef(sref, attempt, node) + r <- + if (typing.getOkValues(node).contains(s)) done + else if (typing.getFailedValues(node).contains(s)) fail(s"getFailedValues($node) already contains $s") + else runLocal(nodeShape(node, s), _.addType(node, s)) + t <- + if (r._2) addEvidence(attempt, s"$node has shape ${s.id}") + else + for { + shape <- getShapeRef(attempt.shapeRef, attempt, node) + t1 <- addNotEvidence( + attempt, + errorNode(node, shape, attempt, s"$node does not have shape ${s.id}"), + s"$node does not have shape ${s.id}" + ) + } yield t1 + } yield { + // println(s"NodeComponentChecker: ref $sref, attempt: $attempt") + (t, r._2) + } + } + + private def classComponentChecker(cls: RDFNode): NodeChecker = attempt => + node => { + for { + rdf <- getRDF + b <- fromIO(rdf.hasSHACLClass(node, cls)) + t <- condition(b, attempt, classError(node, cls, attempt), s"$node is in class $cls") + } yield t + } + + private def nodeKindChecker(k: NodeKindType): NodeChecker = attempt => + node => { + logger.debug(s"nodeKindChecker($node,$k)") + k match { + case IRIKind => iriChecker(attempt)(node) + case LiteralKind => literalChecker(attempt)(node) + case BlankNodeKind => blankNodeChecker(attempt)(node) + case BlankNodeOrIRI => blankNodeOrIRIChecker(attempt)(node) + case BlankNodeOrLiteral => blankNodeOrLiteralChecker(attempt)(node) + case IRIOrLiteral => iriOrLiteralChecker(attempt)(node) + } + } + + private def datatypeChecker(d: IRI): NodeChecker = attempt => + node => + for { + rdf <- getRDF + b <- hasDatatype(rdf, node, d).handleErrorWith { err => + addNotEvidence(attempt, err, s"Error checking datatype ${d.show} for node ${node.show}") >> ok(false) + } + t <- condition(b, attempt, datatypeError(node, d, attempt), s"$node has datatype $d") + } yield t + + private def unsupportedNodeChecker(msg: String): NodeChecker = attempt => node => fail(s"Unsupported feature: $msg") + + private def iriChecker: NodeChecker = attempt => + node => { + condition(node.isIRI, attempt, iriKindError(node, attempt), s"$node is an IRI", "iri") + } + + def compare( + control: RDFNode, + comparison: (RDFNode, RDFNode) => Either[String, Boolean], + err: (RDFNode, Attempt, RDFNode) => ValidationResult, + msg: String + ): NodeChecker = attempt => + node => { + val c = comparison(control, node).getOrElse(false) + for { + t <- condition(c, attempt, err(node, attempt, control), s"$node satisfies $msg(${control})") + } yield t + } + + def compareLiterals( + n: Literal, + f: (NumericLiteral, NumericLiteral) => Boolean, + err: (RDFNode, Attempt, RDFNode) => ValidationResult, + msg: String + ): NodeChecker = attempt => + node => + for { + ctrolValue <- checkNumeric(n, attempt) + value <- checkNumeric(node, attempt) + t <- condition(f(ctrolValue, value), attempt, err(node, attempt, n), s"$node satisfies $msg(${n})") + } yield t + + private def minExclusive(n: Literal): NodeChecker = + compare(n, lessThanNodes, minExclusiveError, "minExclusive") + + private def minInclusive(n: Literal): NodeChecker = { + compare(n, lessThanOrEqualsNodes, minInclusiveError, "minInclusive") + } + + private def maxExclusive(n: Literal): NodeChecker = + compare(n, greaterThanNodes, maxExclusiveError, "maxExclusive") + + private def maxInclusive(n: Literal): NodeChecker = + compare(n, greaterThanOrEqualsNodes, maxInclusiveError, "maxInclusive") + + private def minLength(n: Int): NodeChecker = attempt => + node => + condition( + !node.isBNode && node.getLexicalForm.length >= n, + attempt, + minLengthError(node, attempt, n), + s"$node satisfies minLength($n)" + ) + + private def maxLength(n: Int): NodeChecker = attempt => + node => + condition( + !node.isBNode && node.getLexicalForm.length <= n, + attempt, + maxLengthError(node, attempt, n), + s"$node satisfies maxLength($n)" + ) + + private def pattern(p: String, flags: Option[String]): NodeChecker = attempt => + node => + for { + b <- regexMatch(p, flags, node.getLexicalForm, node, attempt) + t <- condition( + !node.isBNode && b, + attempt, + patternError(node, attempt, p, flags), + s"$node satisfies pattern ~/$p/${flags.getOrElse("")}" + ) + } yield t + + private def regexMatch( + p: String, + flags: Option[String], + str: String, + node: RDFNode, + attempt: Attempt + ): Check[Boolean] = + RegEx(p, flags).matches(str) match { + case Left(msg) => err(regexError(node, attempt, msg)) + case Right(b) => ok(b) + } + + private def uniqueLang(b: Boolean, os: Seq[RDFNode], attempt: Attempt, path: SHACLPath): CheckTyping = if (b) { + val node = attempt.node + for { + t <- condition( + checkUniqueLang(os), + attempt, + uniqueLangError(node, attempt, path, os), + s"Checked uniqueLang(true) for path $path on node $node" + ) + } yield t + } else done + + private def checkUniqueLang(os: Seq[RDFNode]): Boolean = { + def getLanguageTag(n: RDFNode): Option[String] = { + n match { + case LangLiteral(_, l) => Some(l.lang) + case _ => None + } + } + val langs: Seq[String] = os.map(getLanguageTag).flatten + + // If there are duplicated langs, the following condition fails + langs.distinct.size == langs.size + } + + private def languageIn(langs: List[String]): NodeChecker = attempt => + node => + for { + t <- condition( + checkLangIn(node, langs), + attempt, + languageInError(node, attempt, langs), + s"$node satisfies languageIn(${langs.mkString(",")})" + ) + } yield t + + private def checkLangIn(node: RDFNode, langs: List[String]): Boolean = { + node match { + case LangLiteral(_, l) => langs.contains(l.lang) + case _ => false + } + } + + private def lessThanOrEqualsNodes(n1: RDFNode, n2: RDFNode): Either[String, Boolean] = + for { + c1 <- n1.lessThan(n2) + c2 <- n1.isEqualTo(n2) + } yield c1 || c2 + + private def lessThanNodes(n1: RDFNode, n2: RDFNode): Either[String, Boolean] = + n1.lessThan(n2) + + private def greaterThanNodes(n1: RDFNode, n2: RDFNode): Either[String, Boolean] = + n2.lessThan(n1) + + private def greaterThanOrEqualsNodes(n1: RDFNode, n2: RDFNode): Either[String, Boolean] = + for { + c1 <- n2.lessThan(n1) + c2 <- n2.isEqualTo(n1) + } yield c1 || c2 + + def equals(p: IRI): NodeChecker = + comparison(p, "equals", equalsError, equalsNode) + def disjoint(p: IRI): NodeChecker = + comparison(p, "disjoint", disjointError, disjointNode) + def lessThan(p: IRI): NodeChecker = + comparison(p, "lessThan", lessThanError, lessThanNode) + def lessThanOrEquals(p: IRI): NodeChecker = + comparison(p, "lessThanOrEquals", lessThanOrEqualsError, lessThanOrEqualNode) + + // TODO: Maybe add a check to see if the nodes are comparable + // With current definition, if nodes are not comparable, always returns false without raising any error... + private def comparison( + p: IRI, + name: String, + errorMaker: (RDFNode, Attempt, IRI, Set[RDFNode]) => ValidationResult, + cond: (RDFNode, RDFNode) => Boolean + ): NodeChecker = + attempt => + node => { + logger.debug(s"Comparison on node $node") + for { + rdf <- getRDF + subject = attempt.node + vs <- fromStreamIO(rdf.triplesWithSubjectPredicate(subject, p)) + os = vs.map(_.obj) + t <- { + logger.debug(s"Values: $vs") + if (os.isEmpty) for { + t1 <- addNotEvidence( + attempt, + errorMaker(node, attempt, p, os.toSet), + s"No values for node $subject with predicate $p" + ) + } yield (t1, false) + else + condition( + os.forall(cond(node, _)), + attempt, + errorMaker(node, attempt, p, os.toSet), + s"$node satisfies $name $p with values ${os})" + ) + } + } yield t + } + + private def and(srefs: Seq[RefNode]): NodeChecker = attempt => + node => { + for { + shapes <- getShapeRefs(srefs.toList, attempt, node) + r <- checkAllWithTyping(shapes.toLazyList, (s: Shape) => nodeShape(node, s)) + } yield r + } + + /* private def checkAnd(node: RDFNode, shapes: List[Shape], t: ShapeTyping) : Boolean = { + t.getFailedValues(node).isEmpty + } */ + + private def xone(sRefs: Seq[RefNode]): NodeChecker = attempt => + node => { + for { + t <- getTyping + // shapes <- getShapeRefs(sRefs.toList, attempt, node) + r <- checkSomeFlagCount(sRefs.toLazyList, (s: RefNode) => nodeShapeRef(node, s, attempt), t) + count = r._2 + t1 <- condition(count == 1, attempt, xoneError(node, attempt, sRefs), s"$node satisfies exactly one of $sRefs") + } yield t1 + } + + def checkXoneType(node: RDFNode, shapes: List[Shape], t: ShapeTyping): Boolean = { + shapes.map(t.hasType(node, _)).count(_ == true) == 1 + } + + private def qualifiedValueShape( + shape: RefNode, + p: PropertyShape, + min: Option[Int], + max: Option[Int], + maybeDisjoint: Option[Boolean], + values: Seq[RDFNode], + attempt: Attempt, + path: SHACLPath, + node: RDFNode + ): CheckTyping = { + // println(s"qualifiedValueShape: $shape, $p, $min, $max") + val disjoint = maybeDisjoint.getOrElse(false) + for { + s <- getShapeRef(shape, attempt, node) + // _ <- IO(println(s"checking qualifiedValueShape: $s, $shape, $p, $min, $max, $disjoint")) + typing <- getTyping + vs <- + if (disjoint) filterConformSiblings(values, p, attempt) + else ok(values) + t <- + if (typing.getOkValues(node).contains(s)) done + else if (typing.getFailedValues(node).contains(s)) fail(s"getFailedValues($node) already contains $s") + else + for { + r <- checkSomeFlagCount(vs.toLazyList, (n: RDFNode) => nodeShapeRef(n, shape, attempt), typing) + value = r._2 + t <- condition( + between(value, min, max), + attempt, + qualifiedShapeError(attempt.node, attempt, value, min, max), + s"qualifiedValueShape value = ${value}, min=${min.map(_.toString).getOrElse("-")}, max=${max.map(_.toString).getOrElse("-")}" + ) + } yield t + } yield { + logger.debug(s"qualifiedValueShape(attempt: ${attempt},${shape.showId}): t=\n${showResult(t)}") + t + } + } + + private def filterConformSiblings(values: Seq[RDFNode], p: PropertyShape, attempt: Attempt): Check[Seq[RDFNode]] = { + val shapes = schema.siblingQualifiedShapes(RefNode(p.id)) + filterConformShapes(values, shapes, attempt) + } + + private def filterConformShapes(values: Seq[RDFNode], shapes: Seq[RefNode], attempt: Attempt): Check[Seq[RDFNode]] = { + logger.debug(s"FilterConformShapes(values=$values, shapes=$shapes)") + def checkValuesShapes: Check[List[(RDFNode, Boolean)]] = { + sequence(values.toList.map(value => conformsNodeShapes(value, shapes, attempt))) + } + for { + cs <- checkValuesShapes + rs = cs.collect { case (n, false) => n } + } yield { + logger.debug(s"Result of FilterConformShapes($values,$shapes,$attempt) = $rs") + rs.toSeq + } + } + + private def conformsNodeShapes(node: RDFNode, shapes: Seq[RefNode], attempt: Attempt): Check[(RDFNode, Boolean)] = + for { + ls <- checkLs(shapes.toList.map(nodeShapeRef(node, _, attempt))) + } yield (node, !ls.isEmpty) + + def between(v: Int, maybeMin: Option[Int], maybeMax: Option[Int]): Boolean = (maybeMin, maybeMax) match { + case (None, None) => true + case (Some(min), None) => v >= min + case (None, Some(max)) => v <= max + case (Some(min), Some(max)) => v >= min && v <= max + } + + private def or(sRefs: Seq[RefNode]): NodeChecker = attempt => + node => { + val last: CheckTyping = fail(s"None of the components of or pass") + def fn(sref: RefNode): CheckTyping = nodeShapeRef(node, sref, attempt) + checkSomeFlag(sRefs.toLazyList, fn, last) + } + + private def not(sref: RefNode): NodeChecker = attempt => + node => { + for { + shape <- getShapeRef(sref, attempt, node) + typing <- getTyping + t <- { + logger.debug(s"\nTesting not nodeShape($node,${shape.showId}) with typing\n${typing}") + nodeShape(node, shape) + } + t1 <- { + logger.debug(s"\nnot($sref). Value of nodeShape($node,${shape.showId})=\n$t") + condition( + !t._1.hasType(node, shape), + attempt, + notShapeError(node, sref, attempt), + s"$node does not have shape $sref" + ) + } + } yield t1 + } + + private def checkNumeric(node: RDFNode, attempt: Attempt): Check[NumericLiteral] = + numericValue(node).fold(e => err(notNumeric(node, attempt)), value => ok(value)) + + private def literalChecker: NodeChecker = attempt => + node => { + condition(node.isLiteral, attempt, literalKindError(node, attempt), s"$node is a Literal") + } + + private def blankNodeChecker: NodeChecker = nodeShape => + node => { + condition(node.isBNode, nodeShape, bNodeKindError(node, nodeShape), s"$node is a Blank Node") + } + + private def blankNodeOrIRIChecker: NodeChecker = nodeShape => + node => { + condition( + node.isBNode || node.isIRI, + nodeShape, + bNodeOrIRIKindError(node, nodeShape), + s"$node is a Blank Node or an IRI" + ) + } + + private def blankNodeOrLiteralChecker: NodeChecker = attempt => + node => { + condition( + node.isBNode || node.isLiteral, + attempt, + bNodeOrLiteralKindError(node, attempt), + s"$node is a Blank Node or Literal" + ) + } + + private def iriOrLiteralChecker: NodeChecker = attempt => + node => { + condition( + node.isIRI || node.isLiteral, + attempt, + iriOrLiteralKindError(node, attempt), + s"$node is a IRI or Literal" + ) + } + + private def hasValuePropertyChecker( + v: Value, + os: List[RDFNode], + attempt: Attempt, + node: RDFNode, + path: SHACLPath + ): CheckTyping = for { + t <- getTyping + newT <- os.size match { + case 0 => + for { + t1 <- addNotEvidence( + attempt, + hasValueErrorNoValue(node, attempt, v, path), + s"HasValue($v) failed. $node has not value" + ) + } yield (t1, false) + case 1 => hasValue(v)(attempt)(os.head) + case n => + for { + t1 <- addNotEvidence( + attempt, + hasValueErrorMoreThanOne(node, attempt, v, path, n), + s"HasValue($v) failed. $node has more $n values" + ) + } yield (t1, false) + } + } yield newT + + private def hasValue(value: Value): NodeChecker = attempt => + currentNode => { + condition( + isValue(currentNode, value), + attempt, + hasValueError(currentNode, attempt, value), + s"Checked $currentNode sh:hasValue $value" + ) + } + + private def inChecker(values: Seq[Value]): NodeChecker = attempt => + currentNode => { + condition( + inValues(currentNode, values), + attempt, + inError(currentNode, attempt, values), + s"Checked $currentNode sh:in $values" + ) + } + + private def minCount(minCount: Int, os: Seq[RDFNode], attempt: Attempt, path: SHACLPath): CheckTyping = { + logger.debug(s"minCount $minCount, os: $os, attempt: $attempt, path: $path") + val count = os.size + val node = attempt.node + condition( + count >= minCount, + attempt, + minCountError(node, attempt, minCount, os.size), + s"Checked minCount($minCount) for path($path) on node $node" + ) + } + + private def maxCount(maxCount: Int, os: Seq[RDFNode], attempt: Attempt, path: SHACLPath): CheckTyping = { + val count = os.size + val node = attempt.node + condition( + count <= maxCount, + attempt, + maxCountError(node, attempt, maxCount, count), + s"Checked maxCount($maxCount) for path($path) on node $node" + ) + } + + private def equalsPath( + os: List[RDFNode], + values: List[RDFNode], + equalsIri: IRI, + attempt: Attempt, + path: SHACLPath + ): CheckTyping = { + logger.debug(s"equalsPath $equalsIri, os: $os, values: $values, attempt: $attempt, path: $path") + Comparisons.different(os.toList, values.toList) match { + case Left(msg) => + for { + t <- addNotEvidence( + attempt, + equalsError(attempt.node, attempt, equalsIri, Set()), + s"node ${attempt.node} fails equals condition. Error: $msg" + ) + } yield (t, false) + case Right(List()) => + for { + t <- addEvidence( + attempt, + s"equals(${equalsIri.show}. nodes ${os.show} pass equals condition with values ${values.show}" + ) + } yield (t, true) + + case Right(ls) => + checkAllWithTyping( + ls.toLazyList, + (n: RDFNode) => + for { + t <- addNotEvidence( + attempt, + equalsError(n, attempt, equalsIri, Set()), + s"node $n fails equals condition. " + ) + } yield (t, false) + ) + } + } + + private def checkClosed(ignoredProperties: List[IRI], allowedProperties: List[IRI]): NodeChecker = attempt => + node => { + logger.debug(s"checkClosed(ignored=$ignoredProperties, allowed=$allowedProperties") + for { + rdf <- getRDF + neighbours <- fromStreamIO(rdf.triplesWithSubject(node)) + predicates = neighbours.map(_.pred).toList + notAllowed = predicates.diff(ignoredProperties).diff(allowedProperties) + t <- { + condition( + notAllowed.isEmpty, + attempt, + closedError(node, attempt, allowedProperties, ignoredProperties, notAllowed), + s"Passes closed condition with predicates $predicates and ignoredProperties $ignoredProperties" + ) + } + } yield t + } + + private def getShapeRefs(sRefs: List[RefNode], attempt: Attempt, node: RDFNode): Check[List[Shape]] = + sequence(sRefs.map(getShapeRef(_, attempt, node))) + + private def getPropertyShapeRefs(srefs: List[RefNode], attempt: Attempt, node: RDFNode): Check[List[PropertyShape]] = + sequence(srefs.map(getPropertyShapeRef(_, attempt, node))) + + private def getPropertyShapeRef(sref: RefNode, attempt: Attempt, node: RDFNode): Check[PropertyShape] = for { + shape <- getShapeRef(sref, attempt, node) + ps <- shape2PropertyShape(shape, attempt, node) + } yield ps + + private def shape2PropertyShape(shape: Shape, attempt: Attempt, node: RDFNode): Check[PropertyShape] = shape match { + case ps: PropertyShape => ok(ps) + case _ => err(expectedPropertyShape(node, attempt, s"Expected shape $shape to be a property shape")) + } + + private def addEvidence(attempt: Attempt, msg: String): Check[ShapeTyping] = { + for { + t <- getTyping + shape <- getShapeRef(attempt.shapeRef, attempt, attempt.node) + _ <- addLog(List(NodeShapeEvidence(attempt.node, attempt.shapeRef, msg))) + } yield t.addEvidence(attempt.node, shape, msg) + } + + private def addNotEvidence(attempt: Attempt, e: AbstractResult, msg: String): Check[ShapeTyping] = { + val node = attempt.node + val sref = attempt.shapeRef + for { + t <- getTyping + shape <- getShapeRef(sref, attempt, node) + _ <- addLog(List(NodeShapeEvidence(attempt.node, sref, msg))) + } yield { + t.addNotEvidence(node, shape, e) + } + } + + private def getShapeRef(sref: RefNode, attempt: Attempt, node: RDFNode): Check[Shape] = + schema.shapesMap.get(sref) match { + case Some(shape) => ok(shape) + case None => + err( + notFoundShapeRef( + node, + attempt, + s"Shape ${sref.showId} not found in schema. Available srefs: ${schema.shapesMap.keys.map(_.showId).mkString(",")}" + ) + ) + } + + def validateAll(rdf: RDFReader): IO[CheckResult[AbstractResult, (ShapeTyping, Boolean), Log]] = { + runCheck(checkSchemaAll, rdf) + } + + def showResult(t: (ShapeTyping, Boolean)): String = + t.show + + //////////////////////////////////////////// + + /** if condition is true adds an evidence, otherwise, adds a not typing with the Violation error as evidence + * @param condition + * condition to check + * @param attempt + * current validation attempt that is being tried + * @param error + * error to raise in case `condition` is false + * @param evidence + * evidence to add to `attempt` in case `condition` is true + */ + private[validator] def condition( + condition: Boolean, + attempt: Attempt, + error: AbstractResult, + evidence: String, + conditionName: String = "" + ): CheckTyping = { + logger.debug(s"condition($conditionName,...)") + for { + t <- getTyping + r <- condFlag( + validateCheck(condition, error), + (_: Unit) => addEvidence(attempt, evidence), + err => addNotEvidence(attempt, err, "Condition failed") + ) + } yield { + logger.debug(s"result of condition: $r") + r + } + } + + // TODO: Refactor the following code... + // move to SRDF and check SPARQL compatibility + // SPARQL comparison opetators: https://www.w3.org/TR/sparql11-query/#OperatorMapping + private def equalsNode(n1: RDFNode, n2: RDFNode): Boolean = (n1, n2) match { + case (l1: Literal, l2: Literal) => l1 == l2 + case (i1: IRI, i2: IRI) => i1 == i2 + case (b1: BNode, b2: BNode) => b1 == b2 + case (_, _) => false + } + + private def disjointNode(n1: RDFNode, n2: RDFNode): Boolean = n1 != n2 + private def lessThanNode(n1: RDFNode, n2: RDFNode): Boolean = (n1, n2) match { + case (IntegerLiteral(n1, _), IntegerLiteral(n2, _)) => n1 < n2 + case (DecimalLiteral(n1, _), DecimalLiteral(n2, _)) => n1 < n2 + case (DoubleLiteral(n1, _), DoubleLiteral(n2, _)) => n1 < n2 + case (StringLiteral(n1), StringLiteral(n2)) => n1 < n2 + case (DatatypeLiteral(n1, d1), DatatypeLiteral(n2, d2)) => d1 == d2 && n1 < n2 + case (LangLiteral(n1, l1), LangLiteral(n2, l2)) => n1 < n2 + case (i1: IRI, i2: IRI) => i1.str < i2.str + case (b1: BNode, b2: BNode) => b1.id < b2.id + case (_, _) => false + } + private def lessThanOrEqualNode(n1: RDFNode, n2: RDFNode): Boolean = (n1, n2) match { + case (IntegerLiteral(n1, _), IntegerLiteral(n2, _)) => n1 <= n2 + case (DecimalLiteral(n1, _), DecimalLiteral(n2, _)) => n1 <= n2 + case (DoubleLiteral(n1, _), DoubleLiteral(n2, _)) => n1 <= n2 + case (StringLiteral(n1), StringLiteral(n2)) => n1 <= n2 + case (DatatypeLiteral(n1, d1), DatatypeLiteral(n2, d2)) => d1 == d2 && n1 <= n2 + case (LangLiteral(n1, l1), LangLiteral(n2, l2)) => n1 <= n2 + case (i1: IRI, i2: IRI) => i1.str <= i2.str + case (b1: BNode, b2: BNode) => b1.id <= b2.id + case (_, _) => false + } + + /** Checks that `node` is one of `values` + */ + private def inValues(node: RDFNode, values: Seq[Value]): Boolean = { + values.exists(_.matchNode(node)) + } + + private def isValue(node: RDFNode, value: Value): Boolean = { + value.matchNode(node) + } + + private def hasDatatype(rdf: RDFReader, node: RDFNode, d: IRI): Check[Boolean] = { + for { + eitherBoolean <- fromIO(rdf.checkDatatype(node, d).attempt) + b <- eitherBoolean.fold(_ => ok(false), ok(_)) + } yield b + } + + private[validator] def debug(msg: String): Check[Unit] = { + logger.debug(msg) + ok(()) + } + +} + +object Validator { + def empty = Validator(schema = Schema.empty) + + def validate(schema: Schema, rdf: RDFReader): IO[Either[AbstractResult, (ShapeTyping, Boolean)]] = { + Validator(schema).validateAll(rdf).map(_.result) + } + +} diff --git a/modules/shacl/src/main/scala/es/weso/shacl/validator/package.scala b/modules/shacl/src/main/scala/es/weso/shacl/validator/package.scala index 653be5f..10159f8 100644 --- a/modules/shacl/src/main/scala/es/weso/shacl/validator/package.scala +++ b/modules/shacl/src/main/scala/es/weso/shacl/validator/package.scala @@ -1,15 +1,15 @@ -package es.weso.shacl - -import es.weso.rdf.nodes.RDFNode -import es.weso.rdf.path.SHACLPath -import es.weso.shacl.validator.SHACLChecker.Check - -package object validator { - type Result = (ShapeTyping, Boolean) - type CheckTyping = Check[Result] - type PropertyChecker = (Attempt, SHACLPath) => CheckTyping - type NodeChecker = Attempt => RDFNode => CheckTyping - type ShapeChecker = Shape => CheckTyping - type NodeShapeChecker = (RDFNode, Shape) => CheckTyping - -} +package es.weso.shacl + +import es.weso.rdf.nodes.RDFNode +import es.weso.rdf.path.SHACLPath +import es.weso.shacl.validator.SHACLChecker.Check + +package object validator { + type Result = (ShapeTyping, Boolean) + type CheckTyping = Check[Result] + type PropertyChecker = (Attempt, SHACLPath) => CheckTyping + type NodeChecker = Attempt => RDFNode => CheckTyping + type ShapeChecker = Shape => CheckTyping + type NodeShapeChecker = (RDFNode, Shape) => CheckTyping + +} diff --git a/modules/shacl/src/test/scala/es/weso/shacl/AbstractSyntaxTest.scala b/modules/shacl/src/test/scala/es/weso/shacl/AbstractSyntaxTest.scala index 79951dc..4fec851 100644 --- a/modules/shacl/src/test/scala/es/weso/shacl/AbstractSyntaxTest.scala +++ b/modules/shacl/src/test/scala/es/weso/shacl/AbstractSyntaxTest.scala @@ -1,32 +1,32 @@ -package es.weso.shacl - -import munit._ -import es.weso.rdf.nodes._ - -class AbstractSyntaxTest extends FunSuite { - - test("should be able to create a shape") { - val x = BNode("x") - val id = IRI("http://example.org/s") - val shape = NodeShape( - id = id, - components = List(), - targets = List(), - propertyShapes = List(RefNode(x)), - closed = false, - List(), - deactivated = false, - MessageMap.empty, - None, - name = MessageMap.empty, - description = MessageMap.empty, - order = None, - group = None, - sourceIRI = None - ) - - assertEquals(shape.id, id) - - } - -} +package es.weso.shacl + +import munit._ +import es.weso.rdf.nodes._ + +class AbstractSyntaxTest extends FunSuite { + + test("should be able to create a shape") { + val x = BNode("x") + val id = IRI("http://example.org/s") + val shape = NodeShape( + id = id, + components = List(), + targets = List(), + propertyShapes = List(RefNode(x)), + closed = false, + List(), + deactivated = false, + MessageMap.empty, + None, + name = MessageMap.empty, + description = MessageMap.empty, + order = None, + group = None, + sourceIRI = None + ) + + assertEquals(shape.id, id) + + } + +} diff --git a/modules/shacl/src/test/scala/es/weso/shacl/DeactivatedTest.scala b/modules/shacl/src/test/scala/es/weso/shacl/DeactivatedTest.scala index 9d6418c..5d3d78f 100644 --- a/modules/shacl/src/test/scala/es/weso/shacl/DeactivatedTest.scala +++ b/modules/shacl/src/test/scala/es/weso/shacl/DeactivatedTest.scala @@ -1,40 +1,46 @@ -package es.weso.shacl - -import es.weso.rdf.jena.RDFAsJenaModel -import es.weso.shacl.converter.RDF2Shacl -import es.weso.shacl.validator.Validator -import munit._ - -class DeactivatedTest extends CatsEffectSuite { - - test("checks a deactivated shape") { - val str = - s"""|prefix : - |prefix sh: - |prefix xsd: - |prefix rdfs: - |:PersonShape a sh:NodeShape ; - | sh:targetNode :alice; - | sh:targetNode :bob ; - | sh:property :HasName ; - | sh:property :HasAge . - |:NotPerson a sh:NodeShape ; - | sh:not :PersonShape . - |:HasName a sh:PropertyShape ; sh:path :name ; sh:minCount 1 ; sh:deactivated true . - |:HasAge a sh:PropertyShape ; sh:path :age ; sh:minCount 1 . - | - |:alice a :Person; :age 35 . - |:bob a :Person ; :age 23; :name "Robert" . - |:carol a :Person . - |:NotPerson sh:targetNode :carol . - | """.stripMargin - - val r = RDFAsJenaModel.fromString(str, "TURTLE", None).flatMap(_.use(rdf => for { - schema <- RDF2Shacl.getShacl(rdf) - result <- Validator.validate(schema, rdf) - } yield result)) - - r.attempt.map(v => assertEquals(v.isRight,true)) - - } -} +package es.weso.shacl + +import es.weso.rdf.jena.RDFAsJenaModel +import es.weso.shacl.converter.RDF2Shacl +import es.weso.shacl.validator.Validator +import munit._ + +class DeactivatedTest extends CatsEffectSuite { + + test("checks a deactivated shape") { + val str = + s"""|prefix : + |prefix sh: + |prefix xsd: + |prefix rdfs: + |:PersonShape a sh:NodeShape ; + | sh:targetNode :alice; + | sh:targetNode :bob ; + | sh:property :HasName ; + | sh:property :HasAge . + |:NotPerson a sh:NodeShape ; + | sh:not :PersonShape . + |:HasName a sh:PropertyShape ; sh:path :name ; sh:minCount 1 ; sh:deactivated true . + |:HasAge a sh:PropertyShape ; sh:path :age ; sh:minCount 1 . + | + |:alice a :Person; :age 35 . + |:bob a :Person ; :age 23; :name "Robert" . + |:carol a :Person . + |:NotPerson sh:targetNode :carol . + | """.stripMargin + + val r = RDFAsJenaModel + .fromString(str, "TURTLE", None) + .flatMap( + _.use(rdf => + for { + schema <- RDF2Shacl.getShacl(rdf) + result <- Validator.validate(schema, rdf) + } yield result + ) + ) + + r.attempt.map(v => assertEquals(v.isRight, true)) + + } +} diff --git a/modules/shacl/src/test/scala/es/weso/shacl/ImportTest.scala b/modules/shacl/src/test/scala/es/weso/shacl/ImportTest.scala index e6d532b..965aa28 100644 --- a/modules/shacl/src/test/scala/es/weso/shacl/ImportTest.scala +++ b/modules/shacl/src/test/scala/es/weso/shacl/ImportTest.scala @@ -1,62 +1,66 @@ -package es.weso.shacl - -import java.nio.file.Paths -import com.typesafe.config.{Config, ConfigFactory} -import es.weso.rdf.jena.RDFAsJenaModel -import es.weso.rdf.nodes._ -import es.weso.shacl.converter.RDF2Shacl -import es.weso.shacl.validator.Validator -import cats.effect._ -import munit._ - -class ImportTest extends CatsEffectSuite { - - val conf: Config = ConfigFactory.load() - val shaclFolderStr = conf.getString("shaclTests") - val shaclFolder = IRI(Paths.get(shaclFolderStr).normalize.toUri.toString) + "imports/" - - test("import") { - val r = - RDFAsJenaModel.fromIRI(iri = shaclFolder + "import.ttl", format = "TURTLE", base = Some(shaclFolder)).flatMap(_.use( - rdf => for { - //_ <- { println(s"RDF: ${rdf.serialize("TURTLE").getOrElse("")}"); Right(()) } - // extendedRdf <- rdf.extendImports() - // _ <- { println(s"Extended RDF: ${extendedRdf.serialize("TURTLE").getOrElse("")}"); Right(()) } - schema <- RDF2Shacl.getShacl(rdf) - //_ <- { println(s"----\nSchema: ${schema.serialize("TURTLE", None,RDFAsJenaModel.empty)}"); Right(()) } - eitherResult <- Validator.validate(schema, rdf) - result <- eitherResult.fold(s => IO.raiseError(new RuntimeException(s"Error validating: $s")),IO.pure(_)) - } yield result)) - - r.map(pair => { - val (typing, ok) = pair - val alice = IRI("http://example.org/alice") - val bob = IRI("http://example.org/bob") - val person = IRI("http://example.org/Person") - val hasName = IRI("http://example.org/hasName") - assertEquals(typing.getFailedValues(alice).map(_.id), Set[RDFNode]()) - assertEquals(typing.getFailedValues(bob).map(_.id), Set[RDFNode](person,hasName)) - }) - } - -/* it(s"Validates a shape that imports another one with a loop") { - val r = for { - rdf <- RDFAsJenaModel.fromIRI(shaclFolder + "imports/importWithLoop.ttl") - schema <- RDF2Shacl.getShacl(rdf) - result <- Validator.validate(schema, rdf).leftMap(ar => s"AbstractResult: $ar") - } yield result - - r.fold( - e => fail(s"Error reading: $e"), - pair => { - val (typing, ok) = pair - val alice = IRI("http://example.org/alice") - val bob = IRI("http://example.org/bob") - val person = IRI("http://example.org/Person") - val hasName = IRI("http://example.org/hasName") - typing.getFailedValues(alice).map(_.id) should contain theSameElementsAs(List()) - typing.getFailedValues(bob).map(_.id) should contain theSameElementsAs(List(person,hasName)) - }) - } */ - } - +package es.weso.shacl + +import java.nio.file.Paths +import com.typesafe.config.{Config, ConfigFactory} +import es.weso.rdf.jena.RDFAsJenaModel +import es.weso.rdf.nodes._ +import es.weso.shacl.converter.RDF2Shacl +import es.weso.shacl.validator.Validator +import cats.effect._ +import munit._ + +class ImportTest extends CatsEffectSuite { + + val conf: Config = ConfigFactory.load() + val shaclFolderStr = conf.getString("shaclTests") + val shaclFolder = IRI(Paths.get(shaclFolderStr).normalize.toUri.toString) + "imports/" + + test("import") { + val r = + RDFAsJenaModel + .fromIRI(iri = shaclFolder + "import.ttl", format = "TURTLE", base = Some(shaclFolder)) + .flatMap( + _.use(rdf => + for { + // _ <- { println(s"RDF: ${rdf.serialize("TURTLE").getOrElse("")}"); Right(()) } + // extendedRdf <- rdf.extendImports() + // _ <- { println(s"Extended RDF: ${extendedRdf.serialize("TURTLE").getOrElse("")}"); Right(()) } + schema <- RDF2Shacl.getShacl(rdf) + // _ <- { println(s"----\nSchema: ${schema.serialize("TURTLE", None,RDFAsJenaModel.empty)}"); Right(()) } + eitherResult <- Validator.validate(schema, rdf) + result <- eitherResult.fold(s => IO.raiseError(new RuntimeException(s"Error validating: $s")), IO.pure(_)) + } yield result + ) + ) + + r.map(pair => { + val (typing, ok) = pair + val alice = IRI("http://example.org/alice") + val bob = IRI("http://example.org/bob") + val person = IRI("http://example.org/Person") + val hasName = IRI("http://example.org/hasName") + assertEquals(typing.getFailedValues(alice).map(_.id), Set[RDFNode]()) + assertEquals(typing.getFailedValues(bob).map(_.id), Set[RDFNode](person, hasName)) + }) + } + + /* it(s"Validates a shape that imports another one with a loop") { + val r = for { + rdf <- RDFAsJenaModel.fromIRI(shaclFolder + "imports/importWithLoop.ttl") + schema <- RDF2Shacl.getShacl(rdf) + result <- Validator.validate(schema, rdf).leftMap(ar => s"AbstractResult: $ar") + } yield result + + r.fold( + e => fail(s"Error reading: $e"), + pair => { + val (typing, ok) = pair + val alice = IRI("http://example.org/alice") + val bob = IRI("http://example.org/bob") + val person = IRI("http://example.org/Person") + val hasName = IRI("http://example.org/hasName") + typing.getFailedValues(alice).map(_.id) should contain theSameElementsAs(List()) + typing.getFailedValues(bob).map(_.id) should contain theSameElementsAs(List(person,hasName)) + }) + } */ +} diff --git a/modules/shacl/src/test/scala/es/weso/shacl/RDF2ShaclTest.scala b/modules/shacl/src/test/scala/es/weso/shacl/RDF2ShaclTest.scala index 4ce2570..5bc2370 100644 --- a/modules/shacl/src/test/scala/es/weso/shacl/RDF2ShaclTest.scala +++ b/modules/shacl/src/test/scala/es/weso/shacl/RDF2ShaclTest.scala @@ -1,256 +1,261 @@ -package es.weso.shacl - -import es.weso.rdf.nodes._ -import es.weso.rdf.jena.RDFAsJenaModel -import es.weso.shacl.converter.RDF2Shacl -import es.weso.rdf.path._ -import cats.effect._ -import es.weso.utils.IOUtils -import munit.CatsEffectSuite - -class RDF2ShaclTest extends CatsEffectSuite { - - { - val ex = IRI("http://example.org/") - val str = - """|@prefix : - |@prefix sh: - | - |:S a sh:Shape . - |""".stripMargin - checkContainsShapes("get a shape", str, Set(ex + "S")) - } - - { - val ex = IRI("http://example.org/") - val str = - """|@prefix : - |@prefix sh: - | - |:S a sh:Shape . - |:T a sh:Shape . - |""".stripMargin - val s = ex + "S" - val t = ex + "T" - checkContainsShapes("should be able to get the list of shapes", str, Set(s, t)) - } -/* - it("should be able to get the list of target nodes") { - val ex = IRI("http://example.org/") - val str = - """|@prefix : - |@prefix sh: - | - |:S a sh:Shape; sh:targetNode :n1 . - |:T a sh:Shape . - |""".stripMargin - val s = ex + "S" - val n1 = ex + "n1" - val cmp: IO[Shape] = RDFAsJenaModel.fromString(str, "TURTLE").flatMap(_.use(rdf => for { - schema <- RDF2Shacl.getShacl(rdf) - shape <- IOUtils.fromES(schema.shape(s)) - } yield shape)) - cmp.attempt.unsafeRunSync match { - case Left(e) => fail(s"Failed $e") - case Right(shape) => - shape.targetNodes should contain only n1 - } - } - - it("should be able to get the target node declarations") { - val ex = IRI("http://example.org/") - val str = - """|@prefix : - |@prefix sh: - | - |:S a sh:Shape; sh:targetNode :s1, :s2 . - |:T a sh:Shape; sh:targetNode :t1 . - |""".stripMargin - val S = ex + "S" - val T = ex + "T" - val s1 = ex + "s1" - val s2 = ex + "s2" - val t1 = ex + "t1" - val cmp = RDFAsJenaModel.fromString(str, "TURTLE").flatMap(_.use(rdf => for { - schema <- RDF2Shacl.getShacl(rdf) - } yield schema)) - cmp.attempt.unsafeRunSync match { - case Left(e) => fail(s"Failed $e") - case Right(schema) => - schema.targetNodeDeclarations should contain only ((s2, S), (s1, S), (t1, T)) - } - } - - it("should be able to get some property constraints") { - val ex = IRI("http://example.org/") - val str = - """|@prefix : - |@prefix sh: - | - |:S a sh:Shape; - | sh:property :prop . - | - | :prop sh:path :p; - | sh:nodeKind sh:IRI . - |""".stripMargin - val S = ex + "S" - val prop = ex + "prop" - val cmp = RDFAsJenaModel.fromString(str, "TURTLE").flatMap(_.use(rdf => for { - schema <- RDF2Shacl.getShacl(rdf) - shape <- IOUtils.fromES(schema.shape(S)) - } yield shape)) - cmp.attempt.unsafeRunSync match { - case Left(e) => fail(s"Failed $e") - case Right(shape) => - shape.propertyShapes should contain only RefNode(prop) - } - - } - - it("should be able to get a property constraint with cardinalities") { - val ex = IRI("http://example.org/") - val str = - """|@prefix : - |@prefix sh: - | - |:S a sh:Shape; - | sh:property :prop . - | - |:prop sh:path :p; - | sh:nodeKind sh:IRI; - | sh:minCount 1; - | sh:maxCount 1 - | . - |""".stripMargin - val S = ex + "S" - val p = ex + "p" - val prop = ex + "prop" - val cmp: IO[(Shape, Schema)] = RDFAsJenaModel.fromString(str, "TURTLE").flatMap(_.use(rdf => for { - schema <- RDF2Shacl.getShacl(rdf) - shape <- IOUtils.fromES(schema.shape(S)) - } yield (shape, schema))) - - cmp.attempt.unsafeRunSync match { - case Left(e) => fail(s"Failed $e") - case Right((shape, schema)) => - shape.propertyShapes.length should be(1) - val sref = shape.propertyShapes.head - info(s"Shape ref: $sref") - schema.shapesMap.get(sref) match { - case Some(pc: PropertyShape) => - pc.id should be(prop) - pc.predicate should be(Some(p)) - pc.components should contain only (NodeKind(IRIKind), MinCount(1), MaxCount(1)) - case other => fail(s"Failed with $other") - } - } - } - - it("should be able to get the property constraint with minCount cardinality only") { - val ex = "http://example.org/" - val str = s"""|prefix : <$ex> - |prefix sh: - | - |:S a sh:Shape; - | sh:property :prop . - |:prop sh:path :p; - | sh:minCount 1 . - |""".stripMargin - val S = IRI(ex) + "S" - val p = IRI(ex) + "p" - val prop = IRI(ex) + "prop" - val cmp = RDFAsJenaModel.fromString(str, "TURTLE").flatMap(_.use(rdf => for { - schema <- RDF2Shacl.getShacl(rdf) - shape <- IOUtils.fromES(schema.shape(S)) - } yield (shape, schema))) - cmp.attempt.unsafeRunSync match { - case Left(e) => fail(s"Failed $e") - case Right((shape, schema)) => - shape.propertyShapes.length should be(1) - val sref = shape.propertyShapes.head - schema.shapesMap.get(sref) match { - case None => fail(s"Not found shape with ref $sref in $schema") - case Some(ps: PropertyShape) => - ps.id should be(prop) - ps.predicate should be(Some(p)) - ps.components should contain only MinCount(1) - case other => fail(s"Unexpected value $other") - } - } - } - - it("should be able to get a path") { - val ex = "http://example.org/" - val str = s"""|prefix : <$ex> - |prefix sh: - | - |:S a sh:Shape; - | sh:property :prop . - |:prop sh:path [ sh:inversePath :p ]; - | sh:minCount 1 . - |""".stripMargin - val S = IRI(ex) + "S" - val p = IRI(ex) + "p" - val prop = IRI(ex) + "prop" - val cmp = RDFAsJenaModel.fromString(str, "TURTLE").flatMap(_.use(rdf => for { - schema <- RDF2Shacl.getShacl(rdf) - shape <- IOUtils.fromES(schema.shape(S)) - } yield (shape, schema))) - cmp.attempt.unsafeRunSync match { - case Left(e) => fail(s"Error parsing $e") - case Right((shape, schema)) => - val ip = InversePath(PredicatePath(p)) - shape.propertyShapes.length should be(1) - val sref = shape.propertyShapes.head - schema.shapesMap.get(sref) match { - case Some(ps: PropertyShape) => - ps.id should be(prop) - ps.path should be(ip) - ps.components should contain only MinCount(1) - case other => fail(s"Unexpected value $other") - } - } - } - - it("should be able to get shape with minInclusive") { - val ex = IRI("http://example.org/") - val str = - """|@prefix : - |@prefix sh: - | - |:S a sh:Shape; - | sh:property :prop . - | - |:prop sh:path :p; - | sh:minInclusive 3 . - |""".stripMargin - val S = ex + "S" - val prop = ex + "prop" - val cmp = RDFAsJenaModel.fromString(str, "TURTLE").flatMap(_.use(rdf => for { - schema <- RDF2Shacl.getShacl(rdf) - shape <- IOUtils.fromES(schema.shape(S)) - propShape <- IOUtils.fromES(schema.shape(prop)) - } yield (shape, propShape))) - cmp.attempt.unsafeRunSync match { - case Left(e) => fail(s"Failed $e") - case Right((shape,propShape)) => - shape.propertyShapes should contain only RefNode(prop) - propShape.components should contain only MinInclusive(IntegerLiteral(3,"3")) - } - - } - -} */ - - def checkContainsShapes( - name: String, - shaclStr: String, - expected: Set[RDFNode])(implicit loc: munit.Location): Unit = { - test(s"checkContainsShapes: $name") { - RDFAsJenaModel.fromString(shaclStr, "TURTLE").flatMap(_.use(rdf => for { - schema <- RDF2Shacl.getShacl(rdf) - } yield assertEquals(schema.shapes.map(_.id).toSet, expected))) - } - } - -} \ No newline at end of file +package es.weso.shacl + +import es.weso.rdf.nodes._ +import es.weso.rdf.jena.RDFAsJenaModel +import es.weso.shacl.converter.RDF2Shacl +import es.weso.rdf.path._ +import cats.effect._ +import es.weso.utils.IOUtils +import munit.CatsEffectSuite + +class RDF2ShaclTest extends CatsEffectSuite { + + { + val ex = IRI("http://example.org/") + val str = + """|@prefix : + |@prefix sh: + | + |:S a sh:Shape . + |""".stripMargin + checkContainsShapes("get a shape", str, Set(ex + "S")) + } + + { + val ex = IRI("http://example.org/") + val str = + """|@prefix : + |@prefix sh: + | + |:S a sh:Shape . + |:T a sh:Shape . + |""".stripMargin + val s = ex + "S" + val t = ex + "T" + checkContainsShapes("should be able to get the list of shapes", str, Set(s, t)) + } + /* + it("should be able to get the list of target nodes") { + val ex = IRI("http://example.org/") + val str = + """|@prefix : + |@prefix sh: + | + |:S a sh:Shape; sh:targetNode :n1 . + |:T a sh:Shape . + |""".stripMargin + val s = ex + "S" + val n1 = ex + "n1" + val cmp: IO[Shape] = RDFAsJenaModel.fromString(str, "TURTLE").flatMap(_.use(rdf => for { + schema <- RDF2Shacl.getShacl(rdf) + shape <- IOUtils.fromES(schema.shape(s)) + } yield shape)) + cmp.attempt.unsafeRunSync match { + case Left(e) => fail(s"Failed $e") + case Right(shape) => + shape.targetNodes should contain only n1 + } + } + + it("should be able to get the target node declarations") { + val ex = IRI("http://example.org/") + val str = + """|@prefix : + |@prefix sh: + | + |:S a sh:Shape; sh:targetNode :s1, :s2 . + |:T a sh:Shape; sh:targetNode :t1 . + |""".stripMargin + val S = ex + "S" + val T = ex + "T" + val s1 = ex + "s1" + val s2 = ex + "s2" + val t1 = ex + "t1" + val cmp = RDFAsJenaModel.fromString(str, "TURTLE").flatMap(_.use(rdf => for { + schema <- RDF2Shacl.getShacl(rdf) + } yield schema)) + cmp.attempt.unsafeRunSync match { + case Left(e) => fail(s"Failed $e") + case Right(schema) => + schema.targetNodeDeclarations should contain only ((s2, S), (s1, S), (t1, T)) + } + } + + it("should be able to get some property constraints") { + val ex = IRI("http://example.org/") + val str = + """|@prefix : + |@prefix sh: + | + |:S a sh:Shape; + | sh:property :prop . + | + | :prop sh:path :p; + | sh:nodeKind sh:IRI . + |""".stripMargin + val S = ex + "S" + val prop = ex + "prop" + val cmp = RDFAsJenaModel.fromString(str, "TURTLE").flatMap(_.use(rdf => for { + schema <- RDF2Shacl.getShacl(rdf) + shape <- IOUtils.fromES(schema.shape(S)) + } yield shape)) + cmp.attempt.unsafeRunSync match { + case Left(e) => fail(s"Failed $e") + case Right(shape) => + shape.propertyShapes should contain only RefNode(prop) + } + + } + + it("should be able to get a property constraint with cardinalities") { + val ex = IRI("http://example.org/") + val str = + """|@prefix : + |@prefix sh: + | + |:S a sh:Shape; + | sh:property :prop . + | + |:prop sh:path :p; + | sh:nodeKind sh:IRI; + | sh:minCount 1; + | sh:maxCount 1 + | . + |""".stripMargin + val S = ex + "S" + val p = ex + "p" + val prop = ex + "prop" + val cmp: IO[(Shape, Schema)] = RDFAsJenaModel.fromString(str, "TURTLE").flatMap(_.use(rdf => for { + schema <- RDF2Shacl.getShacl(rdf) + shape <- IOUtils.fromES(schema.shape(S)) + } yield (shape, schema))) + + cmp.attempt.unsafeRunSync match { + case Left(e) => fail(s"Failed $e") + case Right((shape, schema)) => + shape.propertyShapes.length should be(1) + val sref = shape.propertyShapes.head + info(s"Shape ref: $sref") + schema.shapesMap.get(sref) match { + case Some(pc: PropertyShape) => + pc.id should be(prop) + pc.predicate should be(Some(p)) + pc.components should contain only (NodeKind(IRIKind), MinCount(1), MaxCount(1)) + case other => fail(s"Failed with $other") + } + } + } + + it("should be able to get the property constraint with minCount cardinality only") { + val ex = "http://example.org/" + val str = s"""|prefix : <$ex> + |prefix sh: + | + |:S a sh:Shape; + | sh:property :prop . + |:prop sh:path :p; + | sh:minCount 1 . + |""".stripMargin + val S = IRI(ex) + "S" + val p = IRI(ex) + "p" + val prop = IRI(ex) + "prop" + val cmp = RDFAsJenaModel.fromString(str, "TURTLE").flatMap(_.use(rdf => for { + schema <- RDF2Shacl.getShacl(rdf) + shape <- IOUtils.fromES(schema.shape(S)) + } yield (shape, schema))) + cmp.attempt.unsafeRunSync match { + case Left(e) => fail(s"Failed $e") + case Right((shape, schema)) => + shape.propertyShapes.length should be(1) + val sref = shape.propertyShapes.head + schema.shapesMap.get(sref) match { + case None => fail(s"Not found shape with ref $sref in $schema") + case Some(ps: PropertyShape) => + ps.id should be(prop) + ps.predicate should be(Some(p)) + ps.components should contain only MinCount(1) + case other => fail(s"Unexpected value $other") + } + } + } + + it("should be able to get a path") { + val ex = "http://example.org/" + val str = s"""|prefix : <$ex> + |prefix sh: + | + |:S a sh:Shape; + | sh:property :prop . + |:prop sh:path [ sh:inversePath :p ]; + | sh:minCount 1 . + |""".stripMargin + val S = IRI(ex) + "S" + val p = IRI(ex) + "p" + val prop = IRI(ex) + "prop" + val cmp = RDFAsJenaModel.fromString(str, "TURTLE").flatMap(_.use(rdf => for { + schema <- RDF2Shacl.getShacl(rdf) + shape <- IOUtils.fromES(schema.shape(S)) + } yield (shape, schema))) + cmp.attempt.unsafeRunSync match { + case Left(e) => fail(s"Error parsing $e") + case Right((shape, schema)) => + val ip = InversePath(PredicatePath(p)) + shape.propertyShapes.length should be(1) + val sref = shape.propertyShapes.head + schema.shapesMap.get(sref) match { + case Some(ps: PropertyShape) => + ps.id should be(prop) + ps.path should be(ip) + ps.components should contain only MinCount(1) + case other => fail(s"Unexpected value $other") + } + } + } + + it("should be able to get shape with minInclusive") { + val ex = IRI("http://example.org/") + val str = + """|@prefix : + |@prefix sh: + | + |:S a sh:Shape; + | sh:property :prop . + | + |:prop sh:path :p; + | sh:minInclusive 3 . + |""".stripMargin + val S = ex + "S" + val prop = ex + "prop" + val cmp = RDFAsJenaModel.fromString(str, "TURTLE").flatMap(_.use(rdf => for { + schema <- RDF2Shacl.getShacl(rdf) + shape <- IOUtils.fromES(schema.shape(S)) + propShape <- IOUtils.fromES(schema.shape(prop)) + } yield (shape, propShape))) + cmp.attempt.unsafeRunSync match { + case Left(e) => fail(s"Failed $e") + case Right((shape,propShape)) => + shape.propertyShapes should contain only RefNode(prop) + propShape.components should contain only MinInclusive(IntegerLiteral(3,"3")) + } + + } + +} */ + + def checkContainsShapes(name: String, shaclStr: String, expected: Set[RDFNode])(implicit + loc: munit.Location + ): Unit = { + test(s"checkContainsShapes: $name") { + RDFAsJenaModel + .fromString(shaclStr, "TURTLE") + .flatMap( + _.use(rdf => + for { + schema <- RDF2Shacl.getShacl(rdf) + } yield assertEquals(schema.shapes.map(_.id).toSet, expected) + ) + ) + } + } + +} diff --git a/modules/shacl/src/test/scala/es/weso/shacl/ShapeValidatorTest.scala b/modules/shacl/src/test/scala/es/weso/shacl/ShapeValidatorTest.scala index 1f54759..3626160 100644 --- a/modules/shacl/src/test/scala/es/weso/shacl/ShapeValidatorTest.scala +++ b/modules/shacl/src/test/scala/es/weso/shacl/ShapeValidatorTest.scala @@ -1,59 +1,59 @@ -package es.weso.shacl - -// import es.weso.rdf.nodes._ -import es.weso.rdf.jena.RDFAsJenaModel -import munit._ - -import util._ -import es.weso.shacl.converter.RDF2Shacl -import es.weso.shacl.validator.Validator -// import cats.data.EitherT -import cats.effect._ - -// import cats.implicits._ -// import es.weso.utils.IOUtils - -class ShapeValidatorTest extends CatsEffectSuite { - - test("Should validate single shape") { - /*val str = """|@prefix : - |@prefix sh: - |@prefix xsd: - | - |:S a sh:Shape; - | sh:targetNode :x; - | sh:property [sh:path :p; sh:datatype xsd:string; sh:minCount 1] ; - | sh:property [sh:path :q; sh:datatype xsd:integer; sh:minCount 1] . - |:x :p "23"; :q 33 . - |""".stripMargin */ - val str = """|prefix : - |prefix sh: - |prefix xsd: - | - |# Separation test reach1 - |:R a sh:NodeShape ; - | sh:targetNode :a ; - | sh:property - | [ sh:path :p ; - | sh:qualifiedValueShape :R ; - | sh:qualifiedMinCount 1 - | ] . - | - |:a :p :a . - |""".stripMargin - val cmp = RDFAsJenaModel - .fromString(str, "TURTLE") - .flatMap( - _.use(rdf => - for { - schema <- RDF2Shacl.getShacl(rdf) - eitherResult <- Validator.validate(schema, rdf) - _ <- IO.println(s"Either result: $eitherResult") - result <- eitherResult - .fold(err => IO.raiseError(new RuntimeException(s"Error validating: ${err.toString}")), IO.pure(_)) - } yield (result._2) - ) - ) - cmp.map(result => assertEquals(result, true)) - } -} +package es.weso.shacl + +// import es.weso.rdf.nodes._ +import es.weso.rdf.jena.RDFAsJenaModel +import munit._ + +import util._ +import es.weso.shacl.converter.RDF2Shacl +import es.weso.shacl.validator.Validator +// import cats.data.EitherT +import cats.effect._ + +// import cats.implicits._ +// import es.weso.utils.IOUtils + +class ShapeValidatorTest extends CatsEffectSuite { + + test("Should validate single shape") { + /*val str = """|@prefix : + |@prefix sh: + |@prefix xsd: + | + |:S a sh:Shape; + | sh:targetNode :x; + | sh:property [sh:path :p; sh:datatype xsd:string; sh:minCount 1] ; + | sh:property [sh:path :q; sh:datatype xsd:integer; sh:minCount 1] . + |:x :p "23"; :q 33 . + |""".stripMargin */ + val str = """|prefix : + |prefix sh: + |prefix xsd: + | + |# Separation test reach1 + |:R a sh:NodeShape ; + | sh:targetNode :a ; + | sh:property + | [ sh:path :p ; + | sh:qualifiedValueShape :R ; + | sh:qualifiedMinCount 1 + | ] . + | + |:a :p :a . + |""".stripMargin + val cmp = RDFAsJenaModel + .fromString(str, "TURTLE") + .flatMap( + _.use(rdf => + for { + schema <- RDF2Shacl.getShacl(rdf) + eitherResult <- Validator.validate(schema, rdf) + _ <- IO.println(s"Either result: $eitherResult") + result <- eitherResult + .fold(err => IO.raiseError(new RuntimeException(s"Error validating: ${err.toString}")), IO.pure(_)) + } yield (result._2) + ) + ) + cmp.map(result => assertEquals(result, true)) + } +} diff --git a/modules/shacl/src/test/scala/es/weso/shacl/TypingTest.scala b/modules/shacl/src/test/scala/es/weso/shacl/TypingTest.scala index 3d28a6f..2964267 100644 --- a/modules/shacl/src/test/scala/es/weso/shacl/TypingTest.scala +++ b/modules/shacl/src/test/scala/es/weso/shacl/TypingTest.scala @@ -5,22 +5,23 @@ import es.weso.typing._ class TypingTest extends FunSuite { - test("should be able to add evidences") { - type Evidence = String - type Error = String - type Node = String - type Shape = String + test("should be able to add evidences") { + type Evidence = String + type Error = String + type Node = String + type Shape = String - val t1: Typing[Node, Shape, Evidence, Error] = Typing.empty - val r = t1.addEvidence("x", "S", "x-S1"). - addEvidence("x", "S", "x-S2"). - addEvidence("x", "T", "x-T1"). - addEvidence("x", "T", "x-T2"). - addEvidence("y", "S", "y-S1") - val oksX = r.getOkValues("x") - assertEquals(oksX, Set("S", "T")) - val es = r.getEvidences("x", "S").get - assertEquals(es, List("x-S1", "x-S2")) - } + val t1: Typing[Node, Shape, Evidence, Error] = Typing.empty + val r = t1 + .addEvidence("x", "S", "x-S1") + .addEvidence("x", "S", "x-S2") + .addEvidence("x", "T", "x-T1") + .addEvidence("x", "T", "x-T2") + .addEvidence("y", "S", "y-S1") + val oksX = r.getOkValues("x") + assertEquals(oksX, Set("S", "T")) + val es = r.getEvidences("x", "S").get + assertEquals(es, List("x-S1", "x-S2")) + } } diff --git a/modules/shacl/src/test/scala/es/weso/shacl/ValidateFolderTest.scala b/modules/shacl/src/test/scala/es/weso/shacl/ValidateFolderTest.scala index 22afe11..39802d8 100644 --- a/modules/shacl/src/test/scala/es/weso/shacl/ValidateFolderTest.scala +++ b/modules/shacl/src/test/scala/es/weso/shacl/ValidateFolderTest.scala @@ -1,51 +1,56 @@ -package es.weso.shacl - -import java.io.File -import com.typesafe.config.{Config, ConfigFactory} -import es.weso.rdf.jena.RDFAsJenaModel -import es.weso.shacl.converter.RDF2Shacl -import es.weso.shacl.validator.Validator -import es.weso.utils.FileUtils._ - -import scala.io.Source -import cats.effect._ -import cats.implicits._ -import munit.CatsEffectSuite - -class ValidateFolderTest extends CatsEffectSuite { - - val conf: Config = ConfigFactory.load() - val shaclFolder = conf.getString("shaclTests") - - lazy val ignoreFiles: List[String] = List() - - def getTtlFiles(schemasDir: String): IO[List[File]] = { - getFilesFromFolderWithExt(schemasDir, "ttl", ignoreFiles) - } - - { - val r = getTtlFiles(shaclFolder).map(files => - files.map(name => validate(name))).void - r.unsafeRunSync() - } - - def validate(name: File): Unit = test(name.getAbsolutePath()) { - val cmp = RDFAsJenaModel.fromFile(name, "TURTLE").flatMap(_.use(rdf => for { - schema <- RDF2Shacl.getShacl(rdf) - eitherResult <- Validator.validate(schema, rdf) - b <- eitherResult.fold( - err => IO.raiseError(new RuntimeException(s"Error: ${err}")), - result => { - val (typing,ok) = result - if (!ok) { - fail(s"Failed nodes: ${typing.t.getFailed}") - } else - typing.t.allOk.pure[IO] - } - ) - } yield b)) - - assertIO(cmp, true) - } - -} +package es.weso.shacl + +import java.io.File +import com.typesafe.config.{Config, ConfigFactory} +import es.weso.rdf.jena.RDFAsJenaModel +import es.weso.shacl.converter.RDF2Shacl +import es.weso.shacl.validator.Validator +import es.weso.utils.FileUtils._ + +import scala.io.Source +import cats.effect._ +import cats.implicits._ +import munit.CatsEffectSuite + +class ValidateFolderTest extends CatsEffectSuite { + + val conf: Config = ConfigFactory.load() + val shaclFolder = conf.getString("shaclTests") + + lazy val ignoreFiles: List[String] = List() + + def getTtlFiles(schemasDir: String): IO[List[File]] = { + getFilesFromFolderWithExt(schemasDir, "ttl", ignoreFiles) + } + + { + val r = getTtlFiles(shaclFolder).map(files => files.map(name => validate(name))).void + r.unsafeRunSync() + } + + def validate(name: File): Unit = test(name.getAbsolutePath()) { + val cmp = RDFAsJenaModel + .fromFile(name, "TURTLE") + .flatMap( + _.use(rdf => + for { + schema <- RDF2Shacl.getShacl(rdf) + eitherResult <- Validator.validate(schema, rdf) + b <- eitherResult.fold( + err => IO.raiseError(new RuntimeException(s"Error: ${err}")), + result => { + val (typing, ok) = result + if (!ok) { + fail(s"Failed nodes: ${typing.t.getFailed}") + } else + typing.t.allOk.pure[IO] + } + ) + } yield b + ) + ) + + assertIO(cmp, true) + } + +} diff --git a/modules/shacl/src/test/scala/es/weso/shacl/ValidateSingleTest.scala b/modules/shacl/src/test/scala/es/weso/shacl/ValidateSingleTest.scala index 36e9f6f..72227ce 100644 --- a/modules/shacl/src/test/scala/es/weso/shacl/ValidateSingleTest.scala +++ b/modules/shacl/src/test/scala/es/weso/shacl/ValidateSingleTest.scala @@ -1,54 +1,54 @@ -package es.weso.shacl - -import cats.implicits._ -import com.typesafe.config.{Config, ConfigFactory} -import es.weso.rdf.jena.RDFAsJenaModel -import es.weso.shacl.converter.RDF2Shacl -import es.weso.shacl.validator.Validator -import es.weso.utils.FileUtils._ -import munit._ -import scala.io.Source -import scala.util._ -// import cats.data.EitherT -import cats.effect._ -import es.weso.utils.IOUtils2.either2io - -class ValidateSingleTest extends CatsEffectSuite { - - val name = "good7" - - val conf: Config = ConfigFactory.load() - val shaclFolder: String = conf.getString("shaclTests") - - lazy val ignoreFiles: List[String] = List() - - test(s"Validate single test $name, folder: $shaclFolder") { - val file = getFileFromFolderWithExt(shaclFolder, name, "ttl").unsafeRunSync() - val str = Source.fromFile(file)("UTF-8").mkString - println(s"File: $file") - validate(name, str) - } - - def validate(name: String, str: String): Unit = { - val cmp = for { - res1 <- RDFAsJenaModel.fromString(str, "TURTLE") - res2 <- RDFAsJenaModel.empty - vv <- (res1, res2).tupled.use { case (rdf, builder) => - for { - schema <- RDF2Shacl.getShacl(rdf) - _ <- IO.println(s"Schema: $schema") - eitherresult <- Validator.validate(schema, rdf) - result <- either2io(eitherresult) - (typing, ok) = result - report <- typing.toValidationReport.toRDF(builder) - strReport <- report.serialize("TURTLE") - _ <- - if (!ok) IO.println(s"Not valid, report:\n$strReport") - else ().pure[IO] - } yield ok - } - } yield vv - assertIO(cmp, true) - } - -} +package es.weso.shacl + +import cats.implicits._ +import com.typesafe.config.{Config, ConfigFactory} +import es.weso.rdf.jena.RDFAsJenaModel +import es.weso.shacl.converter.RDF2Shacl +import es.weso.shacl.validator.Validator +import es.weso.utils.FileUtils._ +import munit._ +import scala.io.Source +import scala.util._ +// import cats.data.EitherT +import cats.effect._ +import es.weso.utils.IOUtils2.either2io + +class ValidateSingleTest extends CatsEffectSuite { + + val name = "good7" + + val conf: Config = ConfigFactory.load() + val shaclFolder: String = conf.getString("shaclTests") + + lazy val ignoreFiles: List[String] = List() + + test(s"Validate single test $name, folder: $shaclFolder") { + val file = getFileFromFolderWithExt(shaclFolder, name, "ttl").unsafeRunSync() + val str = Source.fromFile(file)("UTF-8").mkString + println(s"File: $file") + validate(name, str) + } + + def validate(name: String, str: String): Unit = { + val cmp = for { + res1 <- RDFAsJenaModel.fromString(str, "TURTLE") + res2 <- RDFAsJenaModel.empty + vv <- (res1, res2).tupled.use { case (rdf, builder) => + for { + schema <- RDF2Shacl.getShacl(rdf) + _ <- IO.println(s"Schema: $schema") + eitherresult <- Validator.validate(schema, rdf) + result <- either2io(eitherresult) + (typing, ok) = result + report <- typing.toValidationReport.toRDF(builder) + strReport <- report.serialize("TURTLE") + _ <- + if (!ok) IO.println(s"Not valid, report:\n$strReport") + else ().pure[IO] + } yield ok + } + } yield vv + assertIO(cmp, true) + } + +} diff --git a/modules/shacl/src/test/scala/es/weso/shacl/ValidatorTest.scala b/modules/shacl/src/test/scala/es/weso/shacl/ValidatorTest.scala index c680e0e..4947326 100644 --- a/modules/shacl/src/test/scala/es/weso/shacl/ValidatorTest.scala +++ b/modules/shacl/src/test/scala/es/weso/shacl/ValidatorTest.scala @@ -9,33 +9,38 @@ import es.weso.shacl.converter.RDF2Shacl import es.weso.shacl.validator.Validator import munit.CatsEffectSuite - class ValidatorTest extends CatsEffectSuite { - test("should be able to obtain the target nodes to validate") { - val ex = IRI("http://example.org/") - val str = """|@prefix : + test("should be able to obtain the target nodes to validate") { + val ex = IRI("http://example.org/") + val str = """|@prefix : |@prefix sh: | |:S a sh:Shape; sh:targetNode :x, :y . |:T a sh:Shape; sh:targetNode :z . |""".stripMargin - val S = ex + "S" - val T = ex + "T" - val x = ex + "x" - val y = ex + "y" - val z = ex + "z" - val expected = List((y, S), (x, S),(z, T)) - val cmp = RDFAsJenaModel.fromString(str, "TURTLE").flatMap(_.use(rdf => for { - schema <- RDF2Shacl.getShacl(rdf) - } yield Validator(schema).targetNodes.map { case (node, shape) => (node, shape.id)})) - assertIO(cmp, expected) - } + val S = ex + "S" + val T = ex + "T" + val x = ex + "x" + val y = ex + "y" + val z = ex + "z" + val expected = List((y, S), (x, S), (z, T)) + val cmp = RDFAsJenaModel + .fromString(str, "TURTLE") + .flatMap( + _.use(rdf => + for { + schema <- RDF2Shacl.getShacl(rdf) + } yield Validator(schema).targetNodes.map { case (node, shape) => (node, shape.id) } + ) + ) + assertIO(cmp, expected) + } - test("should be able to validate minCount") { - val ex = IRI("http://example.org/") - val str = """|@prefix : + test("should be able to validate minCount") { + val ex = IRI("http://example.org/") + val str = """|@prefix : |@prefix sh: | |:S a sh:NodeShape; @@ -50,42 +55,48 @@ class ValidatorTest extends CatsEffectSuite { |:good2 :p 1, 2 . |:bad1 :q 1 . |""".stripMargin - val S = ex + "S" - val PS = ex + "PS" - val x = ex + "x" - // val good1 = ex + "good1" - // val good2 = ex + "good2" - // val bad1 = ex + "bad1" - // val ps = Shape.emptyPropertyShape(PS, PredicatePath(p)).copy(components = List(MinCount(1))) - val psRefs = Seq(RefNode(PS)) - val s = Shape.empty(S).copy( - targets = Seq(TargetNode(x)), - propertyShapes = psRefs) - val cmp = RDFAsJenaModel.fromString(str, "TURTLE").flatMap(_.use(rdf => for { - schema <- RDF2Shacl.getShacl(rdf) - validator = Validator(schema) - checked <- validator.validateAll(rdf) - _ <- assertEquals(validator.targetNodes, List((x,s))).pure[IO] - _ <- assertEquals(checked.isOK,true).pure[IO] - } yield (rdf, schema, validator,checked))) + val S = ex + "S" + val PS = ex + "PS" + val x = ex + "x" + // val good1 = ex + "good1" + // val good2 = ex + "good2" + // val bad1 = ex + "bad1" + // val ps = Shape.emptyPropertyShape(PS, PredicatePath(p)).copy(components = List(MinCount(1))) + val psRefs = Seq(RefNode(PS)) + val s = Shape.empty(S).copy(targets = Seq(TargetNode(x)), propertyShapes = psRefs) + val cmp = RDFAsJenaModel + .fromString(str, "TURTLE") + .flatMap( + _.use(rdf => + for { + schema <- RDF2Shacl.getShacl(rdf) + validator = Validator(schema) + checked <- validator.validateAll(rdf) + _ <- assertEquals(validator.targetNodes, List((x, s))).pure[IO] + _ <- assertEquals(checked.isOK, true).pure[IO] + } yield (rdf, schema, validator, checked) + ) + ) } - test("minCount - validates minCount(1) when there is exactly 1") { - val ex = IRI("http://example.org/") - val str = s"""|@prefix : $ex + val ex = IRI("http://example.org/") + val str = s"""|@prefix : $ex |:x :p 1 . |""".stripMargin - val cmp = RDFAsJenaModel.fromChars(str, "TURTLE").flatMap(_.use(rdf => { + val cmp = RDFAsJenaModel + .fromChars(str, "TURTLE") + .flatMap(_.use(rdf => { val validator = Validator(Schema.empty) for { - checked <- validator.validateAll(rdf) - } yield checked.isOK})) + checked <- validator.validateAll(rdf) + } yield checked.isOK + })) - assertIO(cmp, true) + assertIO(cmp, true) } - /* + /* it("validates minCount(1) when there are 2") { val ex = IRI("http://example.org/") val str = s"""|@prefix : $ex @@ -144,7 +155,7 @@ class ValidatorTest extends CatsEffectSuite { val validator = Validator(Schema.empty) validator.minCount(2).validateAll(x,(rdf,p)).isOK should be(false) } -*/ + */ /* describe("Property constraint"){ it("validates minCount(1), maxCount(1) when there is exactly 1") { diff --git a/modules/shacl/src/test/scala/es/weso/shacl/manifest/Manifest.scala b/modules/shacl/src/test/scala/es/weso/shacl/manifest/Manifest.scala index d5c433f..8ae4ea4 100644 --- a/modules/shacl/src/test/scala/es/weso/shacl/manifest/Manifest.scala +++ b/modules/shacl/src/test/scala/es/weso/shacl/manifest/Manifest.scala @@ -4,23 +4,25 @@ import es.weso.rdf.nodes._ import ManifestPrefixes._ case class Manifest( - label: Option[String], - comment: Option[String], - entries: List[Entry], - includes: List[(RDFNode, Option[Manifest])]) + label: Option[String], + comment: Option[String], + entries: List[Entry], + includes: List[(RDFNode, Option[Manifest])] +) object Manifest { def empty: Manifest = Manifest(None, None, List(), List()) } case class Entry( - node: RDFNode, - entryType: EntryType, - name: Option[String], - action: ManifestAction, - result: Result, - status: Status, - specRef: Option[IRI]) + node: RDFNode, + entryType: EntryType, + name: Option[String], + action: ManifestAction, + result: Result, + status: Status, + specRef: Option[IRI] +) sealed trait EntryType { def iri: IRI @@ -45,14 +47,15 @@ final case object ConvertSchemaSyntax extends EntryType { } case class ManifestAction( - schema: Option[IRI], - schemaFormat: Option[String], - data: Option[IRI], - dataFormat: Option[String], - schemaOutputFormat: Option[IRI], - triggerMode: Option[IRI], - node: Option[IRI], - shape: Option[IRI]) { + schema: Option[IRI], + schemaFormat: Option[String], + data: Option[IRI], + dataFormat: Option[String], + schemaOutputFormat: Option[IRI], + triggerMode: Option[IRI], + node: Option[IRI], + shape: Option[IRI] +) { def setSchema(iri: IRI): ManifestAction = { this.copy(schema = Some(iri)) } @@ -73,7 +76,8 @@ object ManifestAction { schemaOutputFormat = None, triggerMode = None, node = None, - shape = None) + shape = None + ) } } @@ -82,7 +86,7 @@ sealed trait Result { def asBoolean: Option[Boolean] = { this match { case BooleanResult(b) => Some(b) - case _ => None + case _ => None } } @@ -93,22 +97,17 @@ final case class ReportResult(report: ValidationReport) extends Result { override val isValid = false } -case class ValidPair( - node: RDFNode, - shape: RDFNode) +case class ValidPair(node: RDFNode, shape: RDFNode) -final case class BooleanResult( - value: Boolean) extends Result { +final case class BooleanResult(value: Boolean) extends Result { override val isValid = value } -final case class IRIResult( - value: IRI) extends Result { +final case class IRIResult(value: IRI) extends Result { override val isValid = false } -final case object EmptyResult - extends Result { +final case object EmptyResult extends Result { override val isValid = true } @@ -116,21 +115,21 @@ final case class ValidationReport(violationErrors: Set[ViolationError]) { def failingNodes: Set[RDFNode] = violationErrors.map(_.focusNode).flatten - def failingNodesShapes: List[(RDFNode,IRI)] = + def failingNodesShapes: List[(RDFNode, IRI)] = violationErrors.toList.collect { case v if v.focusNode.isDefined && v.sourceShape.isDefined => - (v.focusNode.get,v.sourceShape.get) + (v.focusNode.get, v.sourceShape.get) } } final case class ViolationError( - errorType: Option[IRI], - focusNode: Option[RDFNode], - path: Option[IRI], - severity: Option[IRI], - sourceConstraintComponent: Option[IRI], - sourceShape: Option[IRI], - value: Option[RDFNode]) + errorType: Option[IRI], + focusNode: Option[RDFNode], + path: Option[IRI], + severity: Option[IRI], + sourceConstraintComponent: Option[IRI], + sourceShape: Option[IRI], + value: Option[RDFNode] +) final case class Status(value: IRI) - diff --git a/modules/shacl/src/test/scala/es/weso/shacl/manifest/ManifestPrefixes.scala b/modules/shacl/src/test/scala/es/weso/shacl/manifest/ManifestPrefixes.scala index c540fa8..fafa444 100644 --- a/modules/shacl/src/test/scala/es/weso/shacl/manifest/ManifestPrefixes.scala +++ b/modules/shacl/src/test/scala/es/weso/shacl/manifest/ManifestPrefixes.scala @@ -4,64 +4,64 @@ import es.weso.rdf.nodes.IRI object ManifestPrefixes { - lazy val mf = IRI("http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#") - lazy val qt = IRI("http://www.w3.org/2001/sw/DataAccess/tests/test-query#") - lazy val sht = IRI("http://www.w3.org/ns/shacl-test#") - lazy val dc = IRI("http://purl.org/dc/elements/1.1/") + lazy val mf = IRI("http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#") + lazy val qt = IRI("http://www.w3.org/2001/sw/DataAccess/tests/test-query#") + lazy val sht = IRI("http://www.w3.org/ns/shacl-test#") + lazy val dc = IRI("http://purl.org/dc/elements/1.1/") lazy val dawgt = IRI("http://www.w3.org/2001/sw/DataAccess/tests/test-dawg#") - lazy val rdfs = IRI("http://www.w3.org/2000/01/rdf-schema#") - lazy val rdf = IRI("http://www.w3.org/1999/02/22-rdf-syntax-ns#") - lazy val sh = IRI("http://www.w3.org/ns/shacl#") + lazy val rdfs = IRI("http://www.w3.org/2000/01/rdf-schema#") + lazy val rdf = IRI("http://www.w3.org/1999/02/22-rdf-syntax-ns#") + lazy val sh = IRI("http://www.w3.org/ns/shacl#") lazy val mf_Manifest = mf.add("Manifest") - lazy val mf_entries = mf.add("entries") - lazy val mf_include = mf.add("include") - lazy val mf_name = mf.add("name") - lazy val mf_action = mf.add("action") - lazy val mf_result = mf.add("result") - lazy val mf_status = mf.add("status") + lazy val mf_entries = mf.add("entries") + lazy val mf_include = mf.add("include") + lazy val mf_name = mf.add("name") + lazy val mf_action = mf.add("action") + lazy val mf_result = mf.add("result") + lazy val mf_status = mf.add("status") lazy val rdf_type = rdf + "type" - lazy val sh_focusNode = sh + "focusNode" - lazy val sh_path = sh + "path" - lazy val sh_severity = sh + "severity" + lazy val sh_focusNode = sh + "focusNode" + lazy val sh_path = sh + "path" + lazy val sh_severity = sh + "severity" lazy val sh_sourceConstraintComponent = sh + "sourceConstraintComponent" - lazy val sh_sourceShape = sh + "sourceShape" - lazy val sh_value = sh + "value" - lazy val sh_result = sh + "result" - lazy val sh_ValidationReport = sh + "ValidationReport" + lazy val sh_sourceShape = sh + "sourceShape" + lazy val sh_value = sh + "value" + lazy val sh_result = sh + "result" + lazy val sh_ValidationReport = sh + "ValidationReport" - lazy val rdfs_label = rdfs.add("label") + lazy val rdfs_label = rdfs.add("label") lazy val rdfs_comment = rdfs.add("comment") - lazy val sht_proposed = sht.add("proposed") - lazy val sht_approved = sht.add("approved") - lazy val sht_rejected = sht.add("rejected") - lazy val sht_specRef = sht.add("specRef") - lazy val sht_shapesGraph = sht.add("shapesGraph") - lazy val sht_schema_format = sht.add("schema-format") - lazy val sht_dataGraph = sht.add("dataGraph") - lazy val sht_data_format = sht.add("data-format") + lazy val sht_proposed = sht.add("proposed") + lazy val sht_approved = sht.add("approved") + lazy val sht_rejected = sht.add("rejected") + lazy val sht_specRef = sht.add("specRef") + lazy val sht_shapesGraph = sht.add("shapesGraph") + lazy val sht_schema_format = sht.add("schema-format") + lazy val sht_dataGraph = sht.add("dataGraph") + lazy val sht_data_format = sht.add("data-format") lazy val sht_schema_output_format = sht.add("schema-output-format") - lazy val sht_node = sht.add("node") - lazy val sht_focus = sht.add("focus") - lazy val sht_triggerMode = sht.add("triggerMode") - lazy val sht_shape = sht.add("shape") - lazy val sht_details = sht + "details" - lazy val sht_pair = sht + "pair" - lazy val sht_validatedPairs = sht + "validatedPairs" - lazy val sht_Valid = sht + "Valid" - lazy val sht_NotValid = sht + "NotValid" - lazy val sht_Validate = sht.add("Validate") - lazy val sht_ValidationTest = sht.add("ValidationTest") - lazy val sht_ValidationFailure = sht.add("ValidationFailure") + lazy val sht_node = sht.add("node") + lazy val sht_focus = sht.add("focus") + lazy val sht_triggerMode = sht.add("triggerMode") + lazy val sht_shape = sht.add("shape") + lazy val sht_details = sht + "details" + lazy val sht_pair = sht + "pair" + lazy val sht_validatedPairs = sht + "validatedPairs" + lazy val sht_Valid = sht + "Valid" + lazy val sht_NotValid = sht + "NotValid" + lazy val sht_Validate = sht.add("Validate") + lazy val sht_ValidationTest = sht.add("ValidationTest") + lazy val sht_ValidationFailure = sht.add("ValidationFailure") - lazy val sht_WellFormedSchema = sht.add("WellFormedSchema") + lazy val sht_WellFormedSchema = sht.add("WellFormedSchema") lazy val sht_NonWellFormedSchema = sht.add("NonWellFormedSchema") - lazy val sht_MatchNodeShape = sht.add("MatchNodeShape") + lazy val sht_MatchNodeShape = sht.add("MatchNodeShape") lazy val sht_ConvertSchemaSyntax = sht.add("ConvertSchemaSyntax") - lazy val sht_SHACLC = sht.add("SHACLC") - lazy val sht_TURTLE = sht.add("TURTLE") + lazy val sht_SHACLC = sht.add("SHACLC") + lazy val sht_TURTLE = sht.add("TURTLE") } diff --git a/modules/shacl/src/test/scala/es/weso/shacl/manifest/RDF2Manifest.scala b/modules/shacl/src/test/scala/es/weso/shacl/manifest/RDF2Manifest.scala index 75d8ea7..732bfe9 100644 --- a/modules/shacl/src/test/scala/es/weso/shacl/manifest/RDF2Manifest.scala +++ b/modules/shacl/src/test/scala/es/weso/shacl/manifest/RDF2Manifest.scala @@ -27,33 +27,31 @@ import java.nio.file.Path // case class RDF2ManifestException(v: String) extends RuntimeException(v) -case class RDF2Manifest(base: Option[IRI], - derefIncludes: Boolean) extends RDFParser with LazyLogging { +case class RDF2Manifest(base: Option[IRI], derefIncludes: Boolean) extends RDFParser with LazyLogging { - - def transf: FunctionK[IO, RDFParser] = new FunctionK[IO,RDFParser] { + def transf: FunctionK[IO, RDFParser] = new FunctionK[IO, RDFParser] { def apply[A](io: IO[A]): RDFParser[A] = liftIO(io) } - def cnvResource[A](r: Resource[IO,A]): Resource[RDFParser,A] = r.mapK(transf) - - def fromEitherS[A](e: Either[String,A]): RDFParser[A] = { + def cnvResource[A](r: Resource[IO, A]): Resource[RDFParser, A] = r.mapK(transf) + + def fromEitherS[A](e: Either[String, A]): RDFParser[A] = { fromEither(e.leftMap(RDF2ManifestException)) } def rdf2Manifest(rdf: RDFReader, visited: List[RDFNode] = List()): RDFParser[Manifest] = for { - mfs <- rdf2Manifests(rdf,visited) - mf <- fromEitherS(takeSingle(mfs,"Number of manifests != 1")) + mfs <- rdf2Manifests(rdf, visited) + mf <- fromEitherS(takeSingle(mfs, "Number of manifests != 1")) // parseNodes(candidates.toList, manifest(List()))(rdf) } yield mf def rdf2Manifests(rdf: RDFReader, visited: List[RDFNode] = List()): RDFParser[List[Manifest]] = for { candidates <- fromRDFStream(rdf.subjectsWithType(mf_Manifest)) - ns <- parseNodes(candidates.toList, manifest(List())) + ns <- parseNodes(candidates.toList, manifest(List())) } yield ns - def manifest(visited: List[IRI]): RDFParser[Manifest] = + def manifest(visited: List[IRI]): RDFParser[Manifest] = for { maybeLabel <- stringFromPredicateOptional(rdfs_label) maybeComment <- stringFromPredicateOptional(rdfs_comment) @@ -79,24 +77,25 @@ case class RDF2Manifest(base: Option[IRI], } def entry: RDFParser[Entry] = for { - n <- getNode - entryTypeUri <- rdfType - entryType <- fromEitherS(getEntryType(entryTypeUri)) - maybeName <- stringFromPredicateOptional(mf_name) - actionNode <- objectFromPredicate(mf_action) - action <- withNode(actionNode, action) - resultNode <- objectFromPredicate(mf_result) - result <- withNode(resultNode, result) - statusIri <- iriFromPredicate(mf_status) - specRef <- optional(iriFromPredicate(sht_specRef)) - } yield - Entry(node = n, - entryType = entryType, - name = maybeName, - action = action, - result = result, - status = Status(statusIri), - specRef = specRef) + n <- getNode + entryTypeUri <- rdfType + entryType <- fromEitherS(getEntryType(entryTypeUri)) + maybeName <- stringFromPredicateOptional(mf_name) + actionNode <- objectFromPredicate(mf_action) + action <- withNode(actionNode, action) + resultNode <- objectFromPredicate(mf_result) + result <- withNode(resultNode, result) + statusIri <- iriFromPredicate(mf_status) + specRef <- optional(iriFromPredicate(sht_specRef)) + } yield Entry( + node = n, + entryType = entryType, + name = maybeName, + action = action, + result = result, + status = Status(statusIri), + specRef = specRef + ) def iriDataFormat2str(iri: IRI): Either[String, String] = { iri match { @@ -113,10 +112,10 @@ case class RDF2Manifest(base: Option[IRI], } } - private def action: RDFParser[ManifestAction] = + private def action: RDFParser[ManifestAction] = for { - data <- optional(iriFromPredicate(sht_dataGraph)) - schema <- iriFromPredicateOptional(sht_shapesGraph) + data <- optional(iriFromPredicate(sht_dataGraph)) + schema <- iriFromPredicateOptional(sht_shapesGraph) dataFormatIri <- optional(iriFromPredicate(sht_data_format)) dataFormat <- mapOptional(dataFormatIri, iriDataFormat2str) schemaFormatIRI <- optional(iriFromPredicate(sht_schema_format)) @@ -125,107 +124,111 @@ case class RDF2Manifest(base: Option[IRI], triggerMode <- optional(iriFromPredicate(sht_triggerMode)) node <- optional(oneOfPredicates(Seq(sht_node, sht_focus))) shape <- optional(iriFromPredicate(sht_shape)) - } yield - ManifestAction( - schema = schema, - schemaFormat = schemaFormat, - data = data, - dataFormat = dataFormat, - triggerMode = triggerMode, - schemaOutputFormat = schemaOutputFormat, - node = node, - shape = shape - ) + } yield ManifestAction( + schema = schema, + schemaFormat = schemaFormat, + data = data, + dataFormat = dataFormat, + triggerMode = triggerMode, + schemaOutputFormat = schemaOutputFormat, + node = node, + shape = shape + ) - private def result: RDFParser[Result] = for { + private def result: RDFParser[Result] = for { n <- getNode v <- n match { - case BooleanLiteral(b) => ok(BooleanResult(b)) - case iri: IRI => - for { - b <- noType - v <- if (b) { - val r: RDFParser[Result] = ok(IRIResult(iri)) - r - } - else compoundResult - } yield v - case bNode: BNode => compoundResult - case _ => parseFail("Unexpected type of result " + n) - } - } yield v - + case BooleanLiteral(b) => ok(BooleanResult(b)) + case iri: IRI => + for { + b <- noType + v <- + if (b) { + val r: RDFParser[Result] = ok(IRIResult(iri)) + r + } else compoundResult + } yield v + case bNode: BNode => compoundResult + case _ => parseFail("Unexpected type of result " + n) + } + } yield v private def compoundResult: RDFParser[Result] = for { - n <- getNode + n <- getNode maybeType <- optional(iriFromPredicate(rdf_type)) v <- maybeType match { case None => parseFail(s"compoundResult. No rdf:type for node: $n") - case Some(`sh_ValidationReport`) => for { - report <- validationReport - } yield ReportResult(report) + case Some(`sh_ValidationReport`) => + for { + report <- validationReport + } yield ReportResult(report) case Some(other) => parseFail(s"compoundResult. rdf:type for node $n should be ${`sh_ValidationReport`}") } } yield v - + private def validationReport: RDFParser[ValidationReport] = parsePropertyValues(sh_result, violationError).map(ValidationReport(_)) private def violationError: RDFParser[ViolationError] = for { - errorType <- optional(iriFromPredicate(rdf_type)) - focusNode <- optional(objectFromPredicate(sh_focusNode)) - path <- optional(iriFromPredicate(sh_path)) - severity <- optional(iriFromPredicate(sh_severity)) - scc <- optional(iriFromPredicate(sh_sourceConstraintComponent)) - sourceShape <- optional(iriFromPredicate(sh_sourceShape)) - value <- optional(objectFromPredicate(sh_value)) - } yield { - ViolationError(errorType, focusNode, path, severity, scc, sourceShape, value) + errorType <- optional(iriFromPredicate(rdf_type)) + focusNode <- optional(objectFromPredicate(sh_focusNode)) + path <- optional(iriFromPredicate(sh_path)) + severity <- optional(iriFromPredicate(sh_severity)) + scc <- optional(iriFromPredicate(sh_sourceConstraintComponent)) + sourceShape <- optional(iriFromPredicate(sh_sourceShape)) + value <- optional(objectFromPredicate(sh_value)) + } yield { + ViolationError(errorType, focusNode, path, severity, scc, sourceShape, value) } private def noType: RDFParser[Boolean] = for { types <- objectsFromPredicate(rdf_type) } yield types.isEmpty - private def includes(visited: List[RDFNode]): RDFParser[List[(RDFNode, Option[Manifest])]] = - for { - includes <- objectsFromPredicate(mf_include) - result <- { - val ds: List[RDFParser[(IRI, Option[Manifest])]] = - includes.toList.map(iri => derefInclude(iri, base, iri +: visited)) - ds.sequence - } - } yield result + private def includes(visited: List[RDFNode]): RDFParser[List[(RDFNode, Option[Manifest])]] = + for { + includes <- objectsFromPredicate(mf_include) + result <- { + val ds: List[RDFParser[(IRI, Option[Manifest])]] = + includes.toList.map(iri => derefInclude(iri, base, iri +: visited)) + ds.sequence + } + } yield result /* TODO: The following code doesn't take into account possible loops */ - private def derefInclude(node: RDFNode, - base: Option[IRI], - visited: List[RDFNode]): RDFParser[(IRI, Option[Manifest])] = node match { + private def derefInclude( + node: RDFNode, + base: Option[IRI], + visited: List[RDFNode] + ): RDFParser[(IRI, Option[Manifest])] = node match { case iri: IRI => if (derefIncludes) { val iriResolved = base.fold(iri)(base => base.resolve(iri)) - liftIO(RDFAsJenaModel.fromURI(iriResolved.getLexicalForm, "TURTLE", Some(iriResolved))).flatMap(res => - cnvResource(res).use(rdf => for { - manifest <- RDF2Manifest(Some(iriResolved), true).rdf2Manifest(rdf, iri +: visited) - //manifest <- if (mfs.size == 1) ok(mfs.head) - // else parseFail(s"More than one manifests found: ${mfs} at iri $iri") - } yield (iri, Some(manifest)))) + liftIO(RDFAsJenaModel.fromURI(iriResolved.getLexicalForm, "TURTLE", Some(iriResolved))).flatMap(res => + cnvResource(res).use(rdf => + for { + manifest <- RDF2Manifest(Some(iriResolved), true).rdf2Manifest(rdf, iri +: visited) + // manifest <- if (mfs.size == 1) ok(mfs.head) + // else parseFail(s"More than one manifests found: ${mfs} at iri $iri") + } yield (iri, Some(manifest)) + ) + ) } else ok((iri, None)) - case _ => - parseFail(s"Trying to deref an include from node $node which is not an IRI") + case _ => + parseFail(s"Trying to deref an include from node $node which is not an IRI") } private def parsePropertyValues[A](pred: IRI, parser: RDFParser[A]): RDFParser[Set[A]] = for { - values <- objectsFromPredicate(pred) - results <- parseNodes(values.toList, parser) - } yield results.toSet + values <- objectsFromPredicate(pred) + results <- parseNodes(values.toList, parser) + } yield results.toSet private def parsePropertyList[A](pred: IRI, parser: RDFParser[A]): RDFParser[List[A]] = for { - ls <- rdfListForPredicateAllowingNone(pred) - vs <- parseNodes(ls, parser) + ls <- rdfListForPredicateAllowingNone(pred) + vs <- parseNodes(ls, parser) } yield vs private def mapOptional[A, B](optA: Option[A], fn: A => Either[String, B]): RDFParser[Option[B]] = { @@ -242,18 +245,18 @@ case class RDF2Manifest(base: Option[IRI], oneOf(ps) } - /** - * Override this method to provide more info - */ - override def objectFromPredicate(p: IRI): RDFParser[RDFNode] = + /** Override this method to provide more info + */ + override def objectFromPredicate(p: IRI): RDFParser[RDFNode] = for { rdf <- getRDF - n <- getNode - ts <- fromRDFStream(rdf.triplesWithSubjectPredicate(n, p)) + n <- getNode + ts <- fromRDFStream(rdf.triplesWithSubjectPredicate(n, p)) r <- ts.size match { case 0 => parseFail( - s"objectFromPredicate: Not found triples with subject $n and predicate $p \nRDF: ${rdf.serialize("TURTLE")}") + s"objectFromPredicate: Not found triples with subject $n and predicate $p \nRDF: ${rdf.serialize("TURTLE")}" + ) case 1 => parseOk(ts.head.obj) case _ => parseFail("objectFromPredicate: More than one value from predicate " + p + " on node " + n) } @@ -262,33 +265,33 @@ case class RDF2Manifest(base: Option[IRI], object RDF2Manifest extends LazyLogging { - def read(path: Path, - format: String, - base: Option[String], - derefIncludes: Boolean - ): IO[Resource[IO,Manifest]] = { + def read(path: Path, format: String, base: Option[String], derefIncludes: Boolean): IO[Resource[IO, Manifest]] = { for { cs <- FileUtils.getContents(path) iriBase <- base match { - case None => None.pure[IO] - case Some(str) => IO.fromEither(IRI.fromString(str).leftMap(s => new RuntimeException(s))).map(Some(_)) + case None => None.pure[IO] + case Some(str) => IO.fromEither(IRI.fromString(str).leftMap(s => new RuntimeException(s))).map(Some(_)) } - resRdf <- RDFAsJenaModel.fromString(cs.toString, format, iriBase) - manifest <- IO(resRdf.evalMap(rdf => fromRDF(rdf,iriBase,derefIncludes))) - } yield manifest + resRdf <- RDFAsJenaModel.fromString(cs.toString, format, iriBase) + manifest <- IO(resRdf.evalMap(rdf => fromRDF(rdf, iriBase, derefIncludes))) + } yield manifest } def fromRDF(rdf: RDFReader, base: Option[IRI], derefIncludes: Boolean): IO[Manifest] = { val cfg = Config(IRI("http://internal/base"), rdf) - val x = RDF2Manifest(base,derefIncludes).rdf2Manifest(rdf) + val x = RDF2Manifest(base, derefIncludes).rdf2Manifest(rdf) // EitherT(x.value.run(cfg).map(_.leftMap(_.toString))) - x.value.run(cfg).flatMap(e => e.fold( - err => IO.raiseError(err), - IO(_) - )) + x.value + .run(cfg) + .flatMap(e => + e.fold( + err => IO.raiseError(err), + IO(_) + ) + ) } - // TODO: Move to common tools + // TODO: Move to common tools /* private def getContents(fileName: String): IO[CharSequence] = { val path = Paths.get(fileName) implicit val cs = IO.contextShift(ExecutionContext.global) diff --git a/modules/shacl/src/test/scala/es/weso/shacl/manifest/RDF2ManifestException.scala b/modules/shacl/src/test/scala/es/weso/shacl/manifest/RDF2ManifestException.scala index 7632b49..013d8b7 100644 --- a/modules/shacl/src/test/scala/es/weso/shacl/manifest/RDF2ManifestException.scala +++ b/modules/shacl/src/test/scala/es/weso/shacl/manifest/RDF2ManifestException.scala @@ -1,4 +1,3 @@ package es.weso.shacl.manifest -case class RDF2ManifestException(msg: String) - extends RuntimeException(msg) +case class RDF2ManifestException(msg: String) extends RuntimeException(msg) diff --git a/modules/shacl/src/test/scala/es/weso/shacl/report/Report.scala b/modules/shacl/src/test/scala/es/weso/shacl/report/Report.scala index 1dd93dd..d46b8f6 100644 --- a/modules/shacl/src/test/scala/es/weso/shacl/report/Report.scala +++ b/modules/shacl/src/test/scala/es/weso/shacl/report/Report.scala @@ -6,20 +6,14 @@ import java.util.Calendar import org.apache.jena.datatypes.xsd.{XSDDatatype, XSDDateTime} import org.apache.jena.rdf.model.{Model, ModelFactory} -case class Report( - var items: List[SingleTestReport]) { +case class Report(var items: List[SingleTestReport]) { def addTestReport(r: SingleTestReport): Report = { items = r :: items this } - def addTestReport( - passed: Boolean, - name: String, - uriTest: String, - testType: String, - msg: String): Report = { + def addTestReport(passed: Boolean, name: String, uriTest: String, testType: String, msg: String): Report = { items = SingleTestReport(passed, name, uriTest, testType, msg) :: items this } @@ -34,14 +28,14 @@ case class Report( val sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss") - val foaf = "http://xmlns.com/foaf/0.1/" - val doap = "http://usefulinc.com/ns/doap#" + val foaf = "http://xmlns.com/foaf/0.1/" + val doap = "http://usefulinc.com/ns/doap#" val shaclexURL = "https://github.com/labra/shaclex/" - val shaclURL = "https://www.w3.org/TR/shacl/" - val rdf = "http://www.w3.org/1999/02/22-rdf-syntax-ns#" - val earl = "http://www.w3.org/ns/earl#" - val dc = "http://purl.org/dc/terms/" - val rdfs = "http://www.w3.org/2000/01/rdf-schema#" + val shaclURL = "https://www.w3.org/TR/shacl/" + val rdf = "http://www.w3.org/1999/02/22-rdf-syntax-ns#" + val earl = "http://www.w3.org/ns/earl#" + val dc = "http://purl.org/dc/terms/" + val rdfs = "http://www.w3.org/2000/01/rdf-schema#" // val xsd = "http://www.w3.org/2001/XMLSchema#" // val shaclTests = new java.net.URI("urn:x-shacl-test") @@ -54,52 +48,52 @@ case class Report( val rdf_type = model.createProperty(rdf + "type") - val foaf_name = model.createProperty(foaf + "name") - val foaf_homepage = model.createProperty(foaf + "homepage") + val foaf_name = model.createProperty(foaf + "name") + val foaf_homepage = model.createProperty(foaf + "homepage") val foaf_primaryTopic = model.createProperty(foaf + "primaryTopic") - val foaf_maker = model.createProperty(foaf + "maker") + val foaf_maker = model.createProperty(foaf + "maker") val doapProject = model.createResource(doap + "Project") val doapVersion = model.createResource(doap + "Version") val doap_name = model.createProperty(doap + "name") // val doap_license = model.createProperty(doap + "license") - val doap_developer = model.createProperty(doap + "developer") - val doap_maintainer = model.createProperty(doap + "maintainer") - val doap_documenter = model.createProperty(doap + "documenter") - val doap_maker = model.createProperty(doap + "maker") - val doap_homePage = model.createProperty(doap + "homepage") - val doap_implements = model.createProperty(doap + "implements") - val doap_downloadPage = model.createProperty(doap + "download-page") + val doap_developer = model.createProperty(doap + "developer") + val doap_maintainer = model.createProperty(doap + "maintainer") + val doap_documenter = model.createProperty(doap + "documenter") + val doap_maker = model.createProperty(doap + "maker") + val doap_homePage = model.createProperty(doap + "homepage") + val doap_implements = model.createProperty(doap + "implements") + val doap_downloadPage = model.createProperty(doap + "download-page") val doap_programmingLanguage = model.createProperty(doap + "programming-language") - val doap_release = model.createProperty(doap + "release") - val doap_created = model.createProperty(doap + "created") + val doap_release = model.createProperty(doap + "release") + val doap_created = model.createProperty(doap + "created") - val dc_issued = model.createProperty(dc + "issued") - val dc_title = model.createProperty(dc + "title") + val dc_issued = model.createProperty(dc + "issued") + val dc_title = model.createProperty(dc + "title") val dc_description = model.createProperty(dc + "description") - val dc_date = model.createProperty(dc + "date") - val dc_creator = model.createProperty(dc + "creator") + val dc_date = model.createProperty(dc + "date") + val dc_creator = model.createProperty(dc + "creator") - val earlSoftware = model.createResource(earl + "Software") + val earlSoftware = model.createResource(earl + "Software") val earlTestSubject = model.createResource(earl + "TestSubject") - val earlAssertion = model.createResource(earl + "Assertion") - val earlTestResult = model.createResource(earl + "TestResult") + val earlAssertion = model.createResource(earl + "Assertion") + val earlTestResult = model.createResource(earl + "TestResult") val earl_automatic = model.createResource(earl + "automatic") val earl_assertedBy = model.createProperty(earl + "assertedBy") - val earl_subject = model.createProperty(earl + "subject") - val earl_test = model.createProperty(earl + "test") - val earl_result = model.createProperty(earl + "result") - val earl_mode = model.createProperty(earl + "mode") - val earl_outcome = model.createProperty(earl + "outcome") - val earl_passed = model.createProperty(earl + "passed") - val earl_failed = model.createProperty(earl + "failed") - - val labra = model.createResource("http://labra.weso.es#me") - val release = model.createResource() - val shaclex = model.createResource(shaclexURL) + val earl_subject = model.createProperty(earl + "subject") + val earl_test = model.createProperty(earl + "test") + val earl_result = model.createProperty(earl + "result") + val earl_mode = model.createProperty(earl + "mode") + val earl_outcome = model.createProperty(earl + "outcome") + val earl_passed = model.createProperty(earl + "passed") + val earl_failed = model.createProperty(earl + "failed") + + val labra = model.createResource("http://labra.weso.es#me") + val release = model.createResource() + val shaclex = model.createResource(shaclexURL) val thisReport = model.createResource("") val now = model.createTypedLiteral(sdf.format(Calendar.getInstance.getTime), XSDDatatype.XSDdateTime) @@ -132,13 +126,12 @@ case class Report( model.add(shaclex, doap_release, release) model.add(release, doap_name, "shaclex") - model.add(release, doap_created, - model.createTypedLiteral("2018-08-04", XSDDatatype.XSDdate)) + model.add(release, doap_created, model.createTypedLiteral("2018-08-04", XSDDatatype.XSDdate)) model.add(release, rdf_type, doapVersion) // Information about a test item for (r <- items) { - val t = model.createResource() + val t = model.createResource() val result = model.createResource() model.add(t, rdf_type, earlAssertion) model.add(t, earl_test, model.createResource(r.uriTest)) @@ -163,4 +156,3 @@ case class Report( object Report { def empty = Report(List()) } - diff --git a/modules/shacl/src/test/scala/es/weso/shacl/report/SingleTestReport.scala b/modules/shacl/src/test/scala/es/weso/shacl/report/SingleTestReport.scala index 6d1e4fd..60a7dad 100644 --- a/modules/shacl/src/test/scala/es/weso/shacl/report/SingleTestReport.scala +++ b/modules/shacl/src/test/scala/es/weso/shacl/report/SingleTestReport.scala @@ -1,17 +1,19 @@ package es.weso.shacl.report case class SingleTestReport( - passed: Boolean, // True if test passed - name: String, // Name of test - uriTest: String, // URI of test - testType: String, // Type of test - moreInfo: String // Info about what happened + passed: Boolean, // True if test passed + name: String, // Name of test + uriTest: String, // URI of test + testType: String, // Type of test + moreInfo: String // Info about what happened ) { override def toString: String = - if (passed) testType + ". OK " + name + - ", uri: " + uriTest + ". " + moreInfo - else testType + ". Failed " + name + - ", uri: " + uriTest + ". " + moreInfo + if (passed) + testType + ". OK " + name + + ", uri: " + uriTest + ". " + moreInfo + else + testType + ". Failed " + name + + ", uri: " + uriTest + ". " + moreInfo } diff --git a/modules/shacl/src/test/scala/es/weso/utils/IOUtils2.scala b/modules/shacl/src/test/scala/es/weso/utils/IOUtils2.scala index bdaf7e6..921b188 100644 --- a/modules/shacl/src/test/scala/es/weso/utils/IOUtils2.scala +++ b/modules/shacl/src/test/scala/es/weso/utils/IOUtils2.scala @@ -2,7 +2,7 @@ package es.weso.utils import cats.effect.IO object IOUtils2 { - def either2io[E,A](e: Either[E,A]): IO[A] = { + def either2io[E, A](e: Either[E, A]): IO[A] = { e.fold(err => IO.raiseError(new RuntimeException(err.toString)), IO.pure(_)) } -} \ No newline at end of file +} diff --git a/project/plugins.sbt b/project/plugins.sbt index 0dedec7..f7802a4 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,16 +1,16 @@ -addSbtPlugin("com.github.sbt" % "sbt-github-actions" % "0.25.0") -addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "1.1.0") -addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.10.0") -addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") -addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.12") -addSbtPlugin("com.github.sbt" % "sbt-git" % "2.1.0") -addSbtPlugin("com.github.sbt" % "sbt-site-asciidoctor" % "1.5.0") -//addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") -addSbtPlugin("com.github.sbt" % "sbt-native-packager" % "1.11.1") -addSbtPlugin("org.lyranthe.sbt" % "partial-unification" % "1.1.2") -addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.2.24") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.3.1") - -// addSbtPlugin("com.simplytyped" % "sbt-antlr4" % "0.8.2") - -addCompilerPlugin("com.olegpy" %% "better-monadic-for" % "0.3.1") +addSbtPlugin("com.github.sbt" % "sbt-github-actions" % "0.25.0") +addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "1.1.0") +addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.10.0") +addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") +addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.12") +addSbtPlugin("com.github.sbt" % "sbt-git" % "2.1.0") +addSbtPlugin("com.github.sbt" % "sbt-site-asciidoctor" % "1.5.0") +//addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") +addSbtPlugin("com.github.sbt" % "sbt-native-packager" % "1.11.1") +addSbtPlugin("org.lyranthe.sbt" % "partial-unification" % "1.1.2") +addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.2.24") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.3.1") + +// addSbtPlugin("com.simplytyped" % "sbt-antlr4" % "0.8.2") + +addCompilerPlugin("com.olegpy" %% "better-monadic-for" % "0.3.1") diff --git a/src/main/scala/es/weso/shacls/MainOpts.scala b/src/main/scala/es/weso/shacls/MainOpts.scala index e69de29..8b13789 100644 --- a/src/main/scala/es/weso/shacls/MainOpts.scala +++ b/src/main/scala/es/weso/shacls/MainOpts.scala @@ -0,0 +1 @@ +