This project has retired. For details please refer to its Attic page.
Source code
001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *   http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing,
013 * software distributed under the License is distributed on an
014 * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
015 * KIND, either express or implied.  See the License for the
016 * specific language governing permissions and limitations
017 * under the License.
018 */
019package org.apache.reef.examples.group.bgd.loss;
020
021import org.apache.reef.tang.annotations.DefaultImplementation;
022
023/**
024 * Interface for Loss Functions.
025 */
026@DefaultImplementation(SquaredErrorLossFunction.class)
027public interface LossFunction {
028
029  /**
030   * Computes the loss incurred by predicting f, if y is the true label.
031   *
032   * @param y the label
033   * @param f the prediction
034   * @return the loss incurred by predicting f, if y is the true label.
035   */
036  double computeLoss(final double y, final double f);
037
038  /**
039   * Computes the gradient with respect to f, if y is the true label.
040   *
041   * @param y the label
042   * @param f the prediction
043   * @return the gradient with respect to f
044   */
045  double computeGradient(final double y, final double f);
046}