001/**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.constraint;
019
020import org.apache.yetus.audience.InterfaceAudience;
021import org.apache.hadoop.conf.Configurable;
022import org.apache.hadoop.hbase.client.Put;
023
024/**
025 * Apply a {@link Constraint} (in traditional database terminology) to a HTable.
026 * Any number of {@link Constraint Constraints} can be added to the table, in
027 * any order.
028 * <p>
029 * A {@link Constraint} must be added to a table before the table is loaded via
030 * {@link Constraints#add(org.apache.hadoop.hbase.HTableDescriptor, Class[])} or
031 * {@link Constraints#add(org.apache.hadoop.hbase.HTableDescriptor,
032 * org.apache.hadoop.hbase.util.Pair...)}
033 * (if you want to add a configuration with the {@link Constraint}). Constraints
034 * will be run in the order that they are added. Further, a Constraint will be
035 * configured before it is run (on load).
036 * <p>
037 * See {@link Constraints#enableConstraint(org.apache.hadoop.hbase.HTableDescriptor, Class)} and
038 * {@link Constraints#disableConstraint(org.apache.hadoop.hbase.HTableDescriptor, Class)} for
039 * enabling/disabling of a given {@link Constraint} after it has been added.
040 * <p>
041 * If a {@link Put} is invalid, the Constraint should throw some sort of
042 * {@link org.apache.hadoop.hbase.constraint.ConstraintException}, indicating
043 * that the {@link Put} has failed. When
044 * this exception is thrown, not further retries of the {@link Put} are
045 * attempted nor are any other {@link Constraint Constraints} attempted (the
046 * {@link Put} is clearly not valid). Therefore, there are performance
047 * implications in the order in which {@link BaseConstraint Constraints} are
048 * specified.
049 * <p>
050 * If a {@link Constraint} fails to fail the {@link Put} via a
051 * {@link org.apache.hadoop.hbase.constraint.ConstraintException}, but instead
052 * throws a {@link RuntimeException},
053 * the entire constraint processing mechanism ({@link ConstraintProcessor}) will
054 * be unloaded from the table. This ensures that the region server is still
055 * functional, but no more {@link Put Puts} will be checked via
056 * {@link Constraint Constraints}.
057 * <p>
058 * Further, {@link Constraint Constraints} should probably not be used to
059 * enforce cross-table references as it will cause tremendous write slowdowns,
060 * but it is possible.
061 * <p>
062 * NOTE: Implementing classes must have a nullary (no-args) constructor
063 *
064 * @see BaseConstraint
065 * @see Constraints
066 */
067@InterfaceAudience.Private
068public interface Constraint extends Configurable {
069
070  /**
071   * Check a {@link Put} to ensure it is valid for the table. If the {@link Put}
072   * is valid, then just return from the method. Otherwise, throw an
073   * {@link Exception} specifying what happened. This {@link Exception} is
074   * propagated back to the client so you can see what caused the {@link Put} to
075   * fail.
076   * @param p {@link Put} to check
077   * @throws org.apache.hadoop.hbase.constraint.ConstraintException when the
078   * {@link Put} does not match the
079   *         constraint.
080   */
081  void check(Put p) throws ConstraintException;
082
083}