001/**
002 * Copyright 2015 DuraSpace, Inc.
003 *
004 * Licensed under the Apache License, Version 2.0 (the "License");
005 * you may not use this file except in compliance with the License.
006 * You may obtain a copy of the License at
007 *
008 *     http://www.apache.org/licenses/LICENSE-2.0
009 *
010 * Unless required by applicable law or agreed to in writing, software
011 * distributed under the License is distributed on an "AS IS" BASIS,
012 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
013 * See the License for the specific language governing permissions and
014 * limitations under the License.
015 */
016package org.fcrepo.kernel.impl.rdf.impl.mappings;
017
018import com.hp.hpl.jena.graph.Node;
019import com.hp.hpl.jena.graph.Triple;
020import org.fcrepo.kernel.utils.iterators.RdfStream;
021import org.slf4j.Logger;
022
023import javax.jcr.RepositoryException;
024import javax.jcr.nodetype.NodeDefinition;
025import javax.jcr.nodetype.NodeType;
026import java.util.Iterator;
027
028import static com.google.common.base.Throwables.propagate;
029import static com.hp.hpl.jena.graph.Triple.create;
030import static com.hp.hpl.jena.vocabulary.RDFS.range;
031import static org.slf4j.LoggerFactory.getLogger;
032
033/**
034 * Utility for moving Node Definitions into RDFS triples
035 * @author cbeer
036 */
037public class NodeDefinitionToTriples extends ItemDefinitionToTriples<NodeDefinition> {
038
039    private static final Logger LOGGER = getLogger(NodeDefinitionToTriples.class);
040
041    /**
042     * Translate ItemDefinitions into triples. The definitions will hang off
043     * the provided RDF Node
044     * @param domain the domain
045     */
046    public NodeDefinitionToTriples(final Node domain) {
047        super(domain);
048    }
049
050    @Override
051    public Iterator<Triple> apply(final NodeDefinition input) {
052
053        try {
054
055            final Node propertyDefinitionNode = getResource(input).asNode();
056
057            final NodeType[] requiredPrimaryTypes = input.getRequiredPrimaryTypes();
058
059            if (requiredPrimaryTypes.length > 1) {
060                // TODO we can express this as an OWL unionOf. But should we?
061                LOGGER.trace(
062                        "Skipping RDFS:range for {} with multiple primary types",
063                        propertyDefinitionNode.getName());
064            } else if (requiredPrimaryTypes.length == 1) {
065                LOGGER.trace("Adding RDFS:range for {} with primary types {}",
066                             input.getName(),
067                             requiredPrimaryTypes[0].getName());
068                return new RdfStream(create(propertyDefinitionNode, range
069                        .asNode(), getResource(requiredPrimaryTypes[0])
070                        .asNode())).concat(super.apply(input));
071            } else {
072                LOGGER.trace("Skipping RDFS:range for {} with no required primary types");
073            }
074            return super.apply(input);
075
076        } catch (final RepositoryException e) {
077            throw propagate(e);
078        }
079
080    }
081}