blob: 34f40663783699756271c4ed5dececda367e6d9c [file] [log] [blame]
/** Copyright 2014 TappingStone, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prediction.controller
import org.apache.spark.SparkContext
/** Mix in and implement this trait if your model cannot be persisted by
* PredictionIO automatically. A companion object extending
* IPersistentModelLoader is required for PredictionIO to load the persisted
* model automatically during deployment.
*
* {{{
* class MyModel extends IPersistentModel[MyParams] {
* def save(id: String, params: MyParams): Unit = {
* ...
* }
* }
*
* object MyModel extends IPersistentModelLoader[MyParams, MyModel] {
* def apply(id: String, params: MyParams, sc: Option[SparkContext]): MyModel = {
* ...
* }
* }
* }}}
*
* In Java, all you need to do is to implement this interface, and add a static
* method with 3 arguments of type String, [[Params]], and SparkContext.
*
* {{{
* public class MyModel implements IPersistentModel<MyParams>, Serializable {
* ...
* public void save(String id, MyParams params) {
* ...
* }
*
* public static MyModel load(String id, Params params, SparkContext sc) {
* ...
* }
* ...
* }
* }}}
*
* @tparam AP Algorithm parameters class.
* @see [[IPersistentModelLoader]]
* @group Algorithm
*/
trait IPersistentModel[AP <: Params] {
/** Save the model to some persistent storage.
*
* This method should return true if the model has been saved successfully so
* that PredictionIO knows that it can be restored later during deployment.
* This method should return false if the model cannot be saved (or should
* not be saved due to configuration) so that PredictionIO will re-train the
* model during deployment. All arguments of this method are provided by
* automatically by PredictionIO.
*
* @param id ID of the run that trained this model.
* @param params Algorithm parameters that were used to train this model.
* @param sc An Apache Spark context.
*/
def save(id: String, params: AP, sc: SparkContext): Boolean
}
/** Implement an object that extends this trait for PredictionIO to support
* loading a persisted model during serving deployment.
*
* @tparam AP Algorithm parameters class.
* @tparam M Model class.
* @see [[IPersistentModel]]
* @group Algorithm
*/
trait IPersistentModelLoader[AP <: Params, M] {
/** Implement this method to restore a persisted model that extends the
* [[IPersistentModel]] trait. All arguments of this method are provided
* automatically by PredictionIO.
*
* @param id ID of the run that trained this model.
* @param params Algorithm parameters that were used to train this model.
* @param sc An optional Apache Spark context. This will be injected if the
* model was generated by a [[PAlgorithm]].
*/
def apply(id: String, params: AP, sc: Option[SparkContext]): M
}
/** Mix in this trait if your model cannot be persisted to PredictionIO's
* metadata store for any reason and want to have it persisted to local
* filesystem instead. These traits contain concrete implementation and need
* not be implemented.
*
* {{{
* class MyModel extends IFSPersistentModel[MyParams] {
* ...
* }
*
* object MyModel extends IFSPersistentModelLoader[MyParams, MyModel] {
* ...
* }
* }}}
*
* @tparam AP Algorithm parameters class.
* @see [[IFSPersistentModelLoader]]
* @group Algorithm
*/
trait IFSPersistentModel[AP <: Params] extends IPersistentModel[AP] {
def save(id: String, params: AP, sc: SparkContext) = {
Utils.save(id, this)
true
}
}
/** Implement an object that extends this trait for PredictionIO to support
* loading a persisted model from local filesystem during serving deployment.
*
* @tparam AP Algorithm parameters class.
* @tparam M Model class.
* @see [[IFSPersistentModel]]
* @group Algorithm
*/
trait IFSPersistentModelLoader[AP <: Params, M]
extends IPersistentModelLoader[AP, M] {
def apply(id: String, params: AP, sc: Option[SparkContext]): M = {
Utils.load(id).asInstanceOf[M]
}
}