blob: a7df29948c5478b734514e637c2776e786bc37e2 [file] [log] [blame]
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<title>MADlib: logistic.sql_in Source File</title>
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
<link href="navtree.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="resize.js"></script>
<script type="text/javascript" src="navtree.js"></script>
<script type="text/javascript">
$(document).ready(initResizable);
</script>
<link href="search/search.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="search/search.js"></script>
<script type="text/javascript">
$(document).ready(function() { searchBox.OnSelectItem(0); });
</script>
<script src="../mathjax/MathJax.js">
MathJax.Hub.Config({
extensions: ["tex2jax.js", "TeX/AMSmath.js", "TeX/AMSsymbols.js"],
jax: ["input/TeX","output/HTML-CSS"],
});
</script>
</head>
<body>
<div id="top"><!-- do not remove this div! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
<tbody>
<tr style="height: 56px;">
<td style="padding-left: 0.5em;">
<div id="projectname">MADlib
&#160;<span id="projectnumber">0.6</span> <span style="font-size:10pt; font-style:italic"><a href="../latest/./logistic_8sql__in_source.html"> A newer version is available</a></span>
</div>
<div id="projectbrief">User Documentation</div>
</td>
</tr>
</tbody>
</table>
</div>
<!-- Generated by Doxygen 1.7.5.1 -->
<script type="text/javascript">
var searchBox = new SearchBox("searchBox", "search",false,'Search');
</script>
<script type="text/javascript" src="dynsections.js"></script>
<div id="navrow1" class="tabs">
<ul class="tablist">
<li><a href="index.html"><span>Main&#160;Page</span></a></li>
<li><a href="modules.html"><span>Modules</span></a></li>
<li class="current"><a href="files.html"><span>Files</span></a></li>
<li>
<div id="MSearchBox" class="MSearchBoxInactive">
<span class="left">
<img id="MSearchSelect" src="search/mag_sel.png"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
alt=""/>
<input type="text" id="MSearchField" value="Search" accesskey="S"
onfocus="searchBox.OnSearchFieldFocus(true)"
onblur="searchBox.OnSearchFieldFocus(false)"
onkeyup="searchBox.OnSearchFieldChange(event)"/>
</span><span class="right">
<a id="MSearchClose" href="javascript:searchBox.CloseResultsWindow()"><img id="MSearchCloseImg" border="0" src="search/close.png" alt=""/></a>
</span>
</div>
</li>
</ul>
</div>
<div id="navrow2" class="tabs2">
<ul class="tablist">
<li><a href="files.html"><span>File&#160;List</span></a></li>
<li><a href="globals.html"><span>File&#160;Members</span></a></li>
</ul>
</div>
</div>
<div id="side-nav" class="ui-resizable side-nav-resizable">
<div id="nav-tree">
<div id="nav-tree-contents">
</div>
</div>
<div id="splitbar" style="-moz-user-select:none;"
class="ui-resizable-handle">
</div>
</div>
<script type="text/javascript">
initNavTree('logistic_8sql__in.html','');
</script>
<div id="doc-content">
<div class="header">
<div class="headertitle">
<div class="title">logistic.sql_in</div> </div>
</div>
<div class="contents">
<a href="logistic_8sql__in.html">Go to the documentation of this file.</a><div class="fragment"><pre class="fragment"><a name="l00001"></a>00001 <span class="comment">/* ----------------------------------------------------------------------- */</span><span class="comment">/**</span>
<a name="l00002"></a>00002 <span class="comment"> *</span>
<a name="l00003"></a>00003 <span class="comment"> * @file logistic.sql_in</span>
<a name="l00004"></a>00004 <span class="comment"> *</span>
<a name="l00005"></a>00005 <span class="comment"> * @brief SQL functions for logistic regression</span>
<a name="l00006"></a>00006 <span class="comment"> * @date January 2011</span>
<a name="l00007"></a>00007 <span class="comment"> *</span>
<a name="l00008"></a>00008 <span class="comment"> * @sa For a brief introduction to logistic regression, see the</span>
<a name="l00009"></a>00009 <span class="comment"> * module description \ref grp_logreg.</span>
<a name="l00010"></a>00010 <span class="comment"> *</span>
<a name="l00011"></a>00011 <span class="comment"> */</span><span class="comment">/* ----------------------------------------------------------------------- */</span>
<a name="l00012"></a>00012
<a name="l00013"></a>00013 m4_include(`SQLCommon.m4<span class="stringliteral">&#39;) --&#39;</span>
<a name="l00014"></a>00014 <span class="comment"></span>
<a name="l00015"></a>00015 <span class="comment">/**</span>
<a name="l00016"></a>00016 <span class="comment">@addtogroup grp_logreg</span>
<a name="l00017"></a>00017 <span class="comment"></span>
<a name="l00018"></a>00018 <span class="comment">@about</span>
<a name="l00019"></a>00019 <span class="comment"></span>
<a name="l00020"></a>00020 <span class="comment">(Binomial) Logistic regression refers to a stochastic model in which the</span>
<a name="l00021"></a>00021 <span class="comment">conditional mean of the dependent dichotomous variable (usually denoted</span>
<a name="l00022"></a>00022 <span class="comment">\f$ Y \in \{ 0,1 \} \f$) is the logistic function of an affine function of the</span>
<a name="l00023"></a>00023 <span class="comment">vector of independent variables (usually denoted \f$ \boldsymbol x \f$). That</span>
<a name="l00024"></a>00024 <span class="comment">is,</span>
<a name="l00025"></a>00025 <span class="comment">\f[</span>
<a name="l00026"></a>00026 <span class="comment"> E[Y \mid \boldsymbol x] = \sigma(\boldsymbol c^T \boldsymbol x)</span>
<a name="l00027"></a>00027 <span class="comment">\f]</span>
<a name="l00028"></a>00028 <span class="comment">for some unknown vector of coefficients \f$ \boldsymbol c \f$ and where</span>
<a name="l00029"></a>00029 <span class="comment">\f$ \sigma(x) = \frac{1}{1 + \exp(-x)} \f$ is the logistic function. Logistic</span>
<a name="l00030"></a>00030 <span class="comment">regression finds the vector of coefficients \f$ \boldsymbol c \f$ that maximizes</span>
<a name="l00031"></a>00031 <span class="comment">the likelihood of the observations.</span>
<a name="l00032"></a>00032 <span class="comment"></span>
<a name="l00033"></a>00033 <span class="comment">Let</span>
<a name="l00034"></a>00034 <span class="comment">- \f$ \boldsymbol y \in \{ 0,1 \}^n \f$ denote the vector of observed dependent</span>
<a name="l00035"></a>00035 <span class="comment"> variables, with \f$ n \f$ rows, containing the observed values of the</span>
<a name="l00036"></a>00036 <span class="comment"> dependent variable,</span>
<a name="l00037"></a>00037 <span class="comment">- \f$ X \in \mathbf R^{n \times k} \f$ denote the design matrix with \f$ k \f$</span>
<a name="l00038"></a>00038 <span class="comment"> columns and \f$ n \f$ rows, containing all observed vectors of independent</span>
<a name="l00039"></a>00039 <span class="comment"> variables \f$ \boldsymbol x_i \f$ as rows.</span>
<a name="l00040"></a>00040 <span class="comment"></span>
<a name="l00041"></a>00041 <span class="comment">By definition,</span>
<a name="l00042"></a>00042 <span class="comment">\f[</span>
<a name="l00043"></a>00043 <span class="comment"> P[Y = y_i | \boldsymbol x_i]</span>
<a name="l00044"></a>00044 <span class="comment"> = \sigma((-1)^{y_i} \cdot \boldsymbol c^T \boldsymbol x_i)</span>
<a name="l00045"></a>00045 <span class="comment"> \,.</span>
<a name="l00046"></a>00046 <span class="comment">\f]</span>
<a name="l00047"></a>00047 <span class="comment">Maximizing the likelihood</span>
<a name="l00048"></a>00048 <span class="comment">\f$ \prod_{i=1}^n \Pr(Y = y_i \mid \boldsymbol x_i) \f$</span>
<a name="l00049"></a>00049 <span class="comment">is equivalent to maximizing the log-likelihood</span>
<a name="l00050"></a>00050 <span class="comment">\f$ \sum_{i=1}^n \log \Pr(Y = y_i \mid \boldsymbol x_i) \f$, which simplifies to</span>
<a name="l00051"></a>00051 <span class="comment">\f[</span>
<a name="l00052"></a>00052 <span class="comment"> l(\boldsymbol c) =</span>
<a name="l00053"></a>00053 <span class="comment"> -\sum_{i=1}^n \log(1 + \exp((-1)^{y_i}</span>
<a name="l00054"></a>00054 <span class="comment"> \cdot \boldsymbol c^T \boldsymbol x_i))</span>
<a name="l00055"></a>00055 <span class="comment"> \,.</span>
<a name="l00056"></a>00056 <span class="comment">\f]</span>
<a name="l00057"></a>00057 <span class="comment">The Hessian of this objective is \f$ H = -X^T A X \f$ where</span>
<a name="l00058"></a>00058 <span class="comment">\f$ A = \text{diag}(a_1, \dots, a_n) \f$ is the diagonal matrix with</span>
<a name="l00059"></a>00059 <span class="comment">\f$</span>
<a name="l00060"></a>00060 <span class="comment"> a_i = \sigma(\boldsymbol c^T \boldsymbol x)</span>
<a name="l00061"></a>00061 <span class="comment"> \cdot</span>
<a name="l00062"></a>00062 <span class="comment"> \sigma(-\boldsymbol c^T \boldsymbol x)</span>
<a name="l00063"></a>00063 <span class="comment"> \,.</span>
<a name="l00064"></a>00064 <span class="comment">\f$</span>
<a name="l00065"></a>00065 <span class="comment">Since \f$ H \f$ is non-positive definite, \f$ l(\boldsymbol c) \f$ is convex.</span>
<a name="l00066"></a>00066 <span class="comment">There are many techniques for solving convex optimization problems. Currently,</span>
<a name="l00067"></a>00067 <span class="comment">logistic regression in MADlib can use one of three algorithms:</span>
<a name="l00068"></a>00068 <span class="comment">- Iteratively Reweighted Least Squares</span>
<a name="l00069"></a>00069 <span class="comment">- A conjugate-gradient approach, also known as Fletcher-Reeves method in the</span>
<a name="l00070"></a>00070 <span class="comment"> literature, where we use the Hestenes-Stiefel rule for calculating the step</span>
<a name="l00071"></a>00071 <span class="comment"> size.</span>
<a name="l00072"></a>00072 <span class="comment">- Incremental gradient descent, also known as incremental gradient methods or</span>
<a name="l00073"></a>00073 <span class="comment"> stochastic gradient descent in the literature.</span>
<a name="l00074"></a>00074 <span class="comment"></span>
<a name="l00075"></a>00075 <span class="comment">We estimate the standard error for coefficient \f$ i \f$ as</span>
<a name="l00076"></a>00076 <span class="comment">\f[</span>
<a name="l00077"></a>00077 <span class="comment"> \mathit{se}(c_i) = \left( (X^T A X)^{-1} \right)_{ii}</span>
<a name="l00078"></a>00078 <span class="comment"> \,.</span>
<a name="l00079"></a>00079 <span class="comment">\f]</span>
<a name="l00080"></a>00080 <span class="comment">The Wald z-statistic is</span>
<a name="l00081"></a>00081 <span class="comment">\f[</span>
<a name="l00082"></a>00082 <span class="comment"> z_i = \frac{c_i}{\mathit{se}(c_i)}</span>
<a name="l00083"></a>00083 <span class="comment"> \,.</span>
<a name="l00084"></a>00084 <span class="comment">\f]</span>
<a name="l00085"></a>00085 <span class="comment"></span>
<a name="l00086"></a>00086 <span class="comment">The Wald \f$ p \f$-value for coefficient \f$ i \f$ gives the probability (under</span>
<a name="l00087"></a>00087 <span class="comment">the assumptions inherent in the Wald test) of seeing a value at least as extreme</span>
<a name="l00088"></a>00088 <span class="comment">as the one observed, provided that the null hypothesis (\f$ c_i = 0 \f$) is</span>
<a name="l00089"></a>00089 <span class="comment">true. Letting \f$ F \f$ denote the cumulative density function of a standard</span>
<a name="l00090"></a>00090 <span class="comment">normal distribution, the Wald \f$ p \f$-value for coefficient \f$ i \f$ is</span>
<a name="l00091"></a>00091 <span class="comment">therefore</span>
<a name="l00092"></a>00092 <span class="comment">\f[</span>
<a name="l00093"></a>00093 <span class="comment"> p_i = \Pr(|Z| \geq |z_i|) = 2 \cdot (1 - F( |z_i| ))</span>
<a name="l00094"></a>00094 <span class="comment">\f]</span>
<a name="l00095"></a>00095 <span class="comment">where \f$ Z \f$ is a standard normally distributed random variable.</span>
<a name="l00096"></a>00096 <span class="comment"></span>
<a name="l00097"></a>00097 <span class="comment">The odds ratio for coefficient \f$ i \f$ is estimated as \f$ \exp(c_i) \f$.</span>
<a name="l00098"></a>00098 <span class="comment"></span>
<a name="l00099"></a>00099 <span class="comment">The condition number is computed as \f$ \kappa(X^T A X) \f$ during the iteration</span>
<a name="l00100"></a>00100 <span class="comment">immediately &lt;em&gt;preceding&lt;/em&gt; convergence (i.e., \f$ A \f$ is computed using</span>
<a name="l00101"></a>00101 <span class="comment">the coefficients of the previous iteration). A large condition number (say, more</span>
<a name="l00102"></a>00102 <span class="comment">than 1000) indicates the presence of significant multicollinearity.</span>
<a name="l00103"></a>00103 <span class="comment"></span>
<a name="l00104"></a>00104 <span class="comment"></span>
<a name="l00105"></a>00105 <span class="comment">@input</span>
<a name="l00106"></a>00106 <span class="comment"></span>
<a name="l00107"></a>00107 <span class="comment">The training data is expected to be of the following form:\n</span>
<a name="l00108"></a>00108 <span class="comment">&lt;pre&gt;{TABLE|VIEW} &lt;em&gt;sourceName&lt;/em&gt; (</span>
<a name="l00109"></a>00109 <span class="comment"> ...</span>
<a name="l00110"></a>00110 <span class="comment"> &lt;em&gt;dependentVariable&lt;/em&gt; BOOLEAN,</span>
<a name="l00111"></a>00111 <span class="comment"> &lt;em&gt;independentVariables&lt;/em&gt; FLOAT8[],</span>
<a name="l00112"></a>00112 <span class="comment"> ...</span>
<a name="l00113"></a>00113 <span class="comment">)&lt;/pre&gt;</span>
<a name="l00114"></a>00114 <span class="comment"></span>
<a name="l00115"></a>00115 <span class="comment">@usage</span>
<a name="l00116"></a>00116 <span class="comment">- Get vector of coefficients \f$ \boldsymbol c \f$ and all diagnostic</span>
<a name="l00117"></a>00117 <span class="comment"> statistics:\n</span>
<a name="l00118"></a>00118 <span class="comment"> &lt;pre&gt;SELECT \ref logregr_train(</span>
<a name="l00119"></a>00119 <span class="comment"> &#39;&lt;em&gt;sourceName&lt;/em&gt;&#39;, &#39;&lt;em&gt;outName&lt;/em&gt;&#39;, &#39;&lt;em&gt;dependentVariable&lt;/em&gt;&#39;,</span>
<a name="l00120"></a>00120 <span class="comment"> &#39;&lt;em&gt;independentVariables&lt;/em&gt;&#39;[, &#39;&lt;em&gt;grouping_columns&lt;/em&gt;&#39;,</span>
<a name="l00121"></a>00121 <span class="comment"> [, &lt;em&gt;numberOfIterations&lt;/em&gt; [, &#39;&lt;em&gt;optimizer&lt;/em&gt;&#39; [, &lt;em&gt;precision&lt;/em&gt;</span>
<a name="l00122"></a>00122 <span class="comment"> [, &lt;em&gt;verbose&lt;/em&gt; ]] ] ] ]</span>
<a name="l00123"></a>00123 <span class="comment">);&lt;/pre&gt;</span>
<a name="l00124"></a>00124 <span class="comment"> Output table:</span>
<a name="l00125"></a>00125 <span class="comment"> &lt;pre&gt;coef | log_likelihood | std_err | z_stats | p_values | odds_ratios | condition_no | num_iterations</span>
<a name="l00126"></a>00126 <span class="comment">-----+----------------+---------+---------+----------+-------------+--------------+---------------</span>
<a name="l00127"></a>00127 <span class="comment"> ...</span>
<a name="l00128"></a>00128 <span class="comment">&lt;/pre&gt;</span>
<a name="l00129"></a>00129 <span class="comment">- Get vector of coefficients \f$ \boldsymbol c \f$:\n</span>
<a name="l00130"></a>00130 <span class="comment"> &lt;pre&gt;SELECT coef from outName; &lt;/pre&gt;</span>
<a name="l00131"></a>00131 <span class="comment">- Get a subset of the output columns, e.g., only the array of coefficients</span>
<a name="l00132"></a>00132 <span class="comment"> \f$ \boldsymbol c \f$, the log-likelihood of determination</span>
<a name="l00133"></a>00133 <span class="comment"> \f$ l(\boldsymbol c) \f$, and the array of p-values \f$ \boldsymbol p \f$:</span>
<a name="l00134"></a>00134 <span class="comment"> &lt;pre&gt;SELECT coef, log_likelihood, p_values FROM outName; &lt;/pre&gt;</span>
<a name="l00135"></a>00135 <span class="comment">- By default, the option &lt;em&gt;verbose&lt;/em&gt; is False. If it is set to be True, warning messages</span>
<a name="l00136"></a>00136 <span class="comment"> will be output to the SQL client for groups that failed.</span>
<a name="l00137"></a>00137 <span class="comment"></span>
<a name="l00138"></a>00138 <span class="comment">@examp</span>
<a name="l00139"></a>00139 <span class="comment"></span>
<a name="l00140"></a>00140 <span class="comment">-# Create the sample data set:</span>
<a name="l00141"></a>00141 <span class="comment">@verbatim</span>
<a name="l00142"></a>00142 <span class="comment">sql&gt; SELECT * FROM data;</span>
<a name="l00143"></a>00143 <span class="comment"> r1 | val</span>
<a name="l00144"></a>00144 <span class="comment">---------------------------------------------+-----</span>
<a name="l00145"></a>00145 <span class="comment"> {1,3.01789340097457,0.454183579888195} | t</span>
<a name="l00146"></a>00146 <span class="comment"> {1,-2.59380532894284,0.602678326424211} | f</span>
<a name="l00147"></a>00147 <span class="comment"> {1,-1.30643094424158,0.151587064377964} | t</span>
<a name="l00148"></a>00148 <span class="comment"> {1,3.60722299199551,0.963550757616758} | t</span>
<a name="l00149"></a>00149 <span class="comment"> {1,-1.52197745628655,0.0782248834148049} | t</span>
<a name="l00150"></a>00150 <span class="comment"> {1,-4.8746574902907,0.345104880165309} | f</span>
<a name="l00151"></a>00151 <span class="comment">...</span>
<a name="l00152"></a>00152 <span class="comment">@endverbatim</span>
<a name="l00153"></a>00153 <span class="comment">-# Run the logistic regression function:</span>
<a name="l00154"></a>00154 <span class="comment">@verbatim</span>
<a name="l00155"></a>00155 <span class="comment">sql&gt; \x on</span>
<a name="l00156"></a>00156 <span class="comment">Expanded display is off.</span>
<a name="l00157"></a>00157 <span class="comment">sql&gt; SELECT logregr_train(&#39;data&#39;, &#39;out_tbl&#39;, &#39;val&#39;, &#39;r1&#39;, Null, 100, &#39;irls&#39;, 0.001);</span>
<a name="l00158"></a>00158 <span class="comment">sql&gt; SELECT * from out_tbl;</span>
<a name="l00159"></a>00159 <span class="comment">coef | {5.59049410898112,2.11077546770772,-0.237276684606453}</span>
<a name="l00160"></a>00160 <span class="comment">log_likelihood | -467.214718489873</span>
<a name="l00161"></a>00161 <span class="comment">std_err | {0.318943457652178,0.101518723785383,0.294509929481773}</span>
<a name="l00162"></a>00162 <span class="comment">z_stats | {17.5281667482197,20.7919819024719,-0.805666162169712}</span>
<a name="l00163"></a>00163 <span class="comment">p_values | {8.73403463417837e-69,5.11539430631541e-96,0.420435365338518}</span>
<a name="l00164"></a>00164 <span class="comment">odds_ratios | {267.867942976278,8.2546400100702,0.788773016471171}</span>
<a name="l00165"></a>00165 <span class="comment">condition_no | 179.186118573205</span>
<a name="l00166"></a>00166 <span class="comment">num_iterations | 9</span>
<a name="l00167"></a>00167 <span class="comment"></span>
<a name="l00168"></a>00168 <span class="comment">@endverbatim</span>
<a name="l00169"></a>00169 <span class="comment"></span>
<a name="l00170"></a>00170 <span class="comment">@literature</span>
<a name="l00171"></a>00171 <span class="comment"></span>
<a name="l00172"></a>00172 <span class="comment">A somewhat random selection of nice write-ups, with valuable pointers into</span>
<a name="l00173"></a>00173 <span class="comment">further literature:</span>
<a name="l00174"></a>00174 <span class="comment"></span>
<a name="l00175"></a>00175 <span class="comment">[1] Cosma Shalizi: Statistics 36-350: Data Mining, Lecture Notes, 18 November</span>
<a name="l00176"></a>00176 <span class="comment"> 2009, http://www.stat.cmu.edu/~cshalizi/350/lectures/26/lecture-26.pdf</span>
<a name="l00177"></a>00177 <span class="comment"></span>
<a name="l00178"></a>00178 <span class="comment">[2] Thomas P. Minka: A comparison of numerical optimizers for logistic</span>
<a name="l00179"></a>00179 <span class="comment"> regression, 2003 (revised Mar 26, 2007),</span>
<a name="l00180"></a>00180 <span class="comment"> http://research.microsoft.com/en-us/um/people/minka/papers/logreg/minka-logreg.pdf</span>
<a name="l00181"></a>00181 <span class="comment"></span>
<a name="l00182"></a>00182 <span class="comment">[3] Paul Komarek, Andrew W. Moore: Making Logistic Regression A Core Data Mining</span>
<a name="l00183"></a>00183 <span class="comment"> Tool With TR-IRLS, IEEE International Conference on Data Mining 2005,</span>
<a name="l00184"></a>00184 <span class="comment"> pp. 685-688, http://komarix.org/ac/papers/tr-irls.short.pdf</span>
<a name="l00185"></a>00185 <span class="comment"></span>
<a name="l00186"></a>00186 <span class="comment">[4] D. P. Bertsekas: Incremental gradient, subgradient, and proximal methods for</span>
<a name="l00187"></a>00187 <span class="comment"> convex optimization: a survey, Technical report, Laboratory for Information</span>
<a name="l00188"></a>00188 <span class="comment"> and Decision Systems, 2010,</span>
<a name="l00189"></a>00189 <span class="comment"> http://web.mit.edu/dimitrib/www/Incremental_Survey_LIDS.pdf</span>
<a name="l00190"></a>00190 <span class="comment"></span>
<a name="l00191"></a>00191 <span class="comment">[5] A. Nemirovski, A. Juditsky, G. Lan, and A. Shapiro: Robust stochastic</span>
<a name="l00192"></a>00192 <span class="comment"> approximation approach to stochastic programming, SIAM Journal on</span>
<a name="l00193"></a>00193 <span class="comment"> Optimization, 19(4), 2009, http://www2.isye.gatech.edu/~nemirovs/SIOPT_RSA_2009.pdf</span>
<a name="l00194"></a>00194 <span class="comment"></span>
<a name="l00195"></a>00195 <span class="comment">@sa File logistic.sql_in (documenting the SQL functions)</span>
<a name="l00196"></a>00196 <span class="comment"></span>
<a name="l00197"></a>00197 <span class="comment">@internal</span>
<a name="l00198"></a>00198 <span class="comment">@sa Namespace logistic (documenting the driver/outer loop implemented in</span>
<a name="l00199"></a>00199 <span class="comment"> Python), Namespace</span>
<a name="l00200"></a>00200 <span class="comment"> \ref madlib::modules::regress documenting the implementation in C++</span>
<a name="l00201"></a>00201 <span class="comment">@endinternal</span>
<a name="l00202"></a>00202 <span class="comment"></span>
<a name="l00203"></a>00203 <span class="comment">*/</span>
<a name="l00204"></a>00204
<a name="l00205"></a>00205 DROP TYPE IF EXISTS MADLIB_SCHEMA.__logregr_result;
<a name="l00206"></a>00206 CREATE TYPE MADLIB_SCHEMA.__logregr_result AS (
<a name="l00207"></a>00207 coef DOUBLE PRECISION[],
<a name="l00208"></a>00208 log_likelihood DOUBLE PRECISION,
<a name="l00209"></a>00209 std_err DOUBLE PRECISION[],
<a name="l00210"></a>00210 z_stats DOUBLE PRECISION[],
<a name="l00211"></a>00211 p_values DOUBLE PRECISION[],
<a name="l00212"></a>00212 odds_ratios DOUBLE PRECISION[],
<a name="l00213"></a>00213 condition_no DOUBLE PRECISION,
<a name="l00214"></a>00214 status INTEGER,
<a name="l00215"></a>00215 num_iterations INTEGER
<a name="l00216"></a>00216 );
<a name="l00217"></a>00217
<a name="l00218"></a>00218 ------------------------------------------------------------------------
<a name="l00219"></a>00219
<a name="l00220"></a>00220 CREATE OR REPLACE FUNCTION MADLIB_SCHEMA.__logregr_cg_step_transition(
<a name="l00221"></a>00221 DOUBLE PRECISION[],
<a name="l00222"></a>00222 BOOLEAN,
<a name="l00223"></a>00223 DOUBLE PRECISION[],
<a name="l00224"></a>00224 DOUBLE PRECISION[])
<a name="l00225"></a>00225 RETURNS DOUBLE PRECISION[]
<a name="l00226"></a>00226 AS <span class="stringliteral">&#39;MODULE_PATHNAME&#39;</span>, <span class="stringliteral">&#39;logregr_cg_step_transition&#39;</span>
<a name="l00227"></a>00227 LANGUAGE C IMMUTABLE;
<a name="l00228"></a>00228
<a name="l00229"></a>00229 ------------------------------------------------------------------------
<a name="l00230"></a>00230
<a name="l00231"></a>00231 CREATE OR REPLACE FUNCTION MADLIB_SCHEMA.__logregr_irls_step_transition(
<a name="l00232"></a>00232 DOUBLE PRECISION[],
<a name="l00233"></a>00233 BOOLEAN,
<a name="l00234"></a>00234 DOUBLE PRECISION[],
<a name="l00235"></a>00235 DOUBLE PRECISION[])
<a name="l00236"></a>00236 RETURNS DOUBLE PRECISION[]
<a name="l00237"></a>00237 AS <span class="stringliteral">&#39;MODULE_PATHNAME&#39;</span>, <span class="stringliteral">&#39;logregr_irls_step_transition&#39;</span>
<a name="l00238"></a>00238 LANGUAGE C IMMUTABLE;
<a name="l00239"></a>00239
<a name="l00240"></a>00240 ------------------------------------------------------------------------
<a name="l00241"></a>00241
<a name="l00242"></a>00242 CREATE OR REPLACE FUNCTION MADLIB_SCHEMA.__logregr_igd_step_transition(
<a name="l00243"></a>00243 DOUBLE PRECISION[],
<a name="l00244"></a>00244 BOOLEAN,
<a name="l00245"></a>00245 DOUBLE PRECISION[],
<a name="l00246"></a>00246 DOUBLE PRECISION[])
<a name="l00247"></a>00247 RETURNS DOUBLE PRECISION[]
<a name="l00248"></a>00248 AS <span class="stringliteral">&#39;MODULE_PATHNAME&#39;</span>, <span class="stringliteral">&#39;logregr_igd_step_transition&#39;</span>
<a name="l00249"></a>00249 LANGUAGE C IMMUTABLE;
<a name="l00250"></a>00250
<a name="l00251"></a>00251 ------------------------------------------------------------------------
<a name="l00252"></a>00252
<a name="l00253"></a>00253 CREATE OR REPLACE FUNCTION MADLIB_SCHEMA.__logregr_cg_step_merge_states(
<a name="l00254"></a>00254 state1 DOUBLE PRECISION[],
<a name="l00255"></a>00255 state2 DOUBLE PRECISION[])
<a name="l00256"></a>00256 RETURNS DOUBLE PRECISION[]
<a name="l00257"></a>00257 AS <span class="stringliteral">&#39;MODULE_PATHNAME&#39;</span>, <span class="stringliteral">&#39;logregr_cg_step_merge_states&#39;</span>
<a name="l00258"></a>00258 LANGUAGE C IMMUTABLE STRICT;
<a name="l00259"></a>00259
<a name="l00260"></a>00260 ------------------------------------------------------------------------
<a name="l00261"></a>00261
<a name="l00262"></a>00262 CREATE OR REPLACE FUNCTION MADLIB_SCHEMA.__logregr_irls_step_merge_states(
<a name="l00263"></a>00263 state1 DOUBLE PRECISION[],
<a name="l00264"></a>00264 state2 DOUBLE PRECISION[])
<a name="l00265"></a>00265 RETURNS DOUBLE PRECISION[]
<a name="l00266"></a>00266 AS <span class="stringliteral">&#39;MODULE_PATHNAME&#39;</span>, <span class="stringliteral">&#39;logregr_irls_step_merge_states&#39;</span>
<a name="l00267"></a>00267 LANGUAGE C IMMUTABLE STRICT;
<a name="l00268"></a>00268
<a name="l00269"></a>00269 ------------------------------------------------------------------------
<a name="l00270"></a>00270
<a name="l00271"></a>00271 CREATE OR REPLACE FUNCTION MADLIB_SCHEMA.__logregr_igd_step_merge_states(
<a name="l00272"></a>00272 state1 DOUBLE PRECISION[],
<a name="l00273"></a>00273 state2 DOUBLE PRECISION[])
<a name="l00274"></a>00274 RETURNS DOUBLE PRECISION[]
<a name="l00275"></a>00275 AS <span class="stringliteral">&#39;MODULE_PATHNAME&#39;</span>, <span class="stringliteral">&#39;logregr_igd_step_merge_states&#39;</span>
<a name="l00276"></a>00276 LANGUAGE C IMMUTABLE STRICT;
<a name="l00277"></a>00277
<a name="l00278"></a>00278 ------------------------------------------------------------------------
<a name="l00279"></a>00279
<a name="l00280"></a>00280 CREATE OR REPLACE FUNCTION MADLIB_SCHEMA.__logregr_cg_step_final(
<a name="l00281"></a>00281 state DOUBLE PRECISION[])
<a name="l00282"></a>00282 RETURNS DOUBLE PRECISION[]
<a name="l00283"></a>00283 AS <span class="stringliteral">&#39;MODULE_PATHNAME&#39;</span>, <span class="stringliteral">&#39;logregr_cg_step_final&#39;</span>
<a name="l00284"></a>00284 LANGUAGE C IMMUTABLE STRICT;
<a name="l00285"></a>00285
<a name="l00286"></a>00286 ------------------------------------------------------------------------
<a name="l00287"></a>00287
<a name="l00288"></a>00288 CREATE OR REPLACE FUNCTION MADLIB_SCHEMA.__logregr_irls_step_final(
<a name="l00289"></a>00289 state DOUBLE PRECISION[])
<a name="l00290"></a>00290 RETURNS DOUBLE PRECISION[]
<a name="l00291"></a>00291 AS <span class="stringliteral">&#39;MODULE_PATHNAME&#39;</span>, <span class="stringliteral">&#39;logregr_irls_step_final&#39;</span>
<a name="l00292"></a>00292 LANGUAGE C IMMUTABLE STRICT;
<a name="l00293"></a>00293
<a name="l00294"></a>00294 ------------------------------------------------------------------------
<a name="l00295"></a>00295
<a name="l00296"></a>00296 CREATE OR REPLACE FUNCTION MADLIB_SCHEMA.__logregr_igd_step_final(
<a name="l00297"></a>00297 state DOUBLE PRECISION[])
<a name="l00298"></a>00298 RETURNS DOUBLE PRECISION[]
<a name="l00299"></a>00299 AS <span class="stringliteral">&#39;MODULE_PATHNAME&#39;</span>, <span class="stringliteral">&#39;logregr_igd_step_final&#39;</span>
<a name="l00300"></a>00300 LANGUAGE C IMMUTABLE STRICT;
<a name="l00301"></a>00301
<a name="l00302"></a>00302 ------------------------------------------------------------------------
<a name="l00303"></a>00303 <span class="comment"></span>
<a name="l00304"></a>00304 <span class="comment">/**</span>
<a name="l00305"></a>00305 <span class="comment"> * @internal</span>
<a name="l00306"></a>00306 <span class="comment"> * @brief Perform one iteration of the conjugate-gradient method for computing</span>
<a name="l00307"></a>00307 <span class="comment"> * logistic regression</span>
<a name="l00308"></a>00308 <span class="comment"> */</span>
<a name="l00309"></a>00309 CREATE AGGREGATE MADLIB_SCHEMA.__logregr_cg_step(
<a name="l00310"></a>00310 <span class="comment">/*+ y */</span> BOOLEAN,
<a name="l00311"></a>00311 <span class="comment">/*+ x */</span> DOUBLE PRECISION[],
<a name="l00312"></a>00312 <span class="comment">/*+ previous_state */</span> DOUBLE PRECISION[]) (
<a name="l00313"></a>00313
<a name="l00314"></a>00314 STYPE=DOUBLE PRECISION[],
<a name="l00315"></a>00315 SFUNC=MADLIB_SCHEMA.__logregr_cg_step_transition,
<a name="l00316"></a>00316 m4_ifdef(`__GREENPLUM__<span class="stringliteral">&#39;,`prefunc=MADLIB_SCHEMA.__logregr_cg_step_merge_states,&#39;</span>)
<a name="l00317"></a>00317 FINALFUNC=MADLIB_SCHEMA.__logregr_cg_step_final,
<a name="l00318"></a>00318 INITCOND=&#39;{0,0,0,0,0,0}<span class="stringliteral">&#39;</span>
<a name="l00319"></a>00319 <span class="stringliteral">);</span>
<a name="l00320"></a>00320 <span class="stringliteral"></span>
<a name="l00321"></a>00321 <span class="stringliteral">------------------------------------------------------------------------</span>
<a name="l00322"></a>00322 <span class="stringliteral"></span><span class="comment"></span>
<a name="l00323"></a>00323 <span class="comment">/**</span>
<a name="l00324"></a>00324 <span class="comment"> * @internal</span>
<a name="l00325"></a>00325 <span class="comment"> * @brief Perform one iteration of the iteratively-reweighted-least-squares</span>
<a name="l00326"></a>00326 <span class="comment"> * method for computing linear regression</span>
<a name="l00327"></a>00327 <span class="comment"> */</span>
<a name="l00328"></a>00328 CREATE AGGREGATE MADLIB_SCHEMA.__logregr_irls_step(
<a name="l00329"></a>00329 /*+ y */ BOOLEAN,
<a name="l00330"></a>00330 /*+ x */ DOUBLE PRECISION[],
<a name="l00331"></a>00331 /*+ previous_state */ DOUBLE PRECISION[]) (
<a name="l00332"></a>00332
<a name="l00333"></a>00333 STYPE=DOUBLE PRECISION[],
<a name="l00334"></a>00334 SFUNC=MADLIB_SCHEMA.__logregr_irls_step_transition,
<a name="l00335"></a>00335 m4_ifdef(`__GREENPLUM__&#39;,`prefunc=MADLIB_SCHEMA.__logregr_irls_step_merge_states,<span class="stringliteral">&#39;)</span>
<a name="l00336"></a>00336 <span class="stringliteral"> FINALFUNC=MADLIB_SCHEMA.__logregr_irls_step_final,</span>
<a name="l00337"></a>00337 <span class="stringliteral"> INITCOND=&#39;</span>{0,0,0,0}<span class="stringliteral">&#39;</span>
<a name="l00338"></a>00338 <span class="stringliteral">);</span>
<a name="l00339"></a>00339 <span class="stringliteral"></span>
<a name="l00340"></a>00340 <span class="stringliteral">------------------------------------------------------------------------</span>
<a name="l00341"></a>00341 <span class="stringliteral"></span><span class="comment"></span>
<a name="l00342"></a>00342 <span class="comment">/**</span>
<a name="l00343"></a>00343 <span class="comment"> * @internal</span>
<a name="l00344"></a>00344 <span class="comment"> * @brief Perform one iteration of the incremental gradient</span>
<a name="l00345"></a>00345 <span class="comment"> * method for computing logistic regression</span>
<a name="l00346"></a>00346 <span class="comment"> */</span>
<a name="l00347"></a>00347 CREATE AGGREGATE MADLIB_SCHEMA.__logregr_igd_step(
<a name="l00348"></a>00348 /*+ y */ BOOLEAN,
<a name="l00349"></a>00349 /*+ x */ DOUBLE PRECISION[],
<a name="l00350"></a>00350 /*+ previous_state */ DOUBLE PRECISION[]) (
<a name="l00351"></a>00351
<a name="l00352"></a>00352 STYPE=DOUBLE PRECISION[],
<a name="l00353"></a>00353 SFUNC=MADLIB_SCHEMA.__logregr_igd_step_transition,
<a name="l00354"></a>00354 m4_ifdef(`__GREENPLUM__&#39;,`prefunc=MADLIB_SCHEMA.__logregr_igd_step_merge_states,<span class="stringliteral">&#39;)</span>
<a name="l00355"></a>00355 <span class="stringliteral"> FINALFUNC=MADLIB_SCHEMA.__logregr_igd_step_final,</span>
<a name="l00356"></a>00356 <span class="stringliteral"> INITCOND=&#39;</span>{0,0,0,0,0}<span class="stringliteral">&#39;</span>
<a name="l00357"></a>00357 <span class="stringliteral">);</span>
<a name="l00358"></a>00358 <span class="stringliteral"></span>
<a name="l00359"></a>00359 <span class="stringliteral">------------------------------------------------------------------------</span>
<a name="l00360"></a>00360 <span class="stringliteral"></span>
<a name="l00361"></a>00361 <span class="stringliteral">CREATE OR REPLACE FUNCTION MADLIB_SCHEMA.__logregr_cg_step_distance(</span>
<a name="l00362"></a>00362 <span class="stringliteral"> /*+ state1 */ DOUBLE PRECISION[],</span>
<a name="l00363"></a>00363 <span class="stringliteral"> /*+ state2 */ DOUBLE PRECISION[])</span>
<a name="l00364"></a>00364 <span class="stringliteral">RETURNS DOUBLE PRECISION AS</span>
<a name="l00365"></a>00365 <span class="stringliteral">&#39;</span>MODULE_PATHNAME<span class="stringliteral">&#39;, &#39;</span>internal_logregr_cg_step_distance<span class="stringliteral">&#39;</span>
<a name="l00366"></a>00366 <span class="stringliteral">LANGUAGE c IMMUTABLE STRICT;</span>
<a name="l00367"></a>00367 <span class="stringliteral"></span>
<a name="l00368"></a>00368 <span class="stringliteral">------------------------------------------------------------------------</span>
<a name="l00369"></a>00369 <span class="stringliteral"></span>
<a name="l00370"></a>00370 <span class="stringliteral">CREATE OR REPLACE FUNCTION MADLIB_SCHEMA.__logregr_cg_result(</span>
<a name="l00371"></a>00371 <span class="stringliteral"> /*+ state */ DOUBLE PRECISION[])</span>
<a name="l00372"></a>00372 <span class="stringliteral">RETURNS MADLIB_SCHEMA.__logregr_result AS</span>
<a name="l00373"></a>00373 <span class="stringliteral">&#39;</span>MODULE_PATHNAME<span class="stringliteral">&#39;, &#39;</span>internal_logregr_cg_result<span class="stringliteral">&#39;</span>
<a name="l00374"></a>00374 <span class="stringliteral">LANGUAGE c IMMUTABLE STRICT;</span>
<a name="l00375"></a>00375 <span class="stringliteral"></span>
<a name="l00376"></a>00376 <span class="stringliteral">------------------------------------------------------------------------</span>
<a name="l00377"></a>00377 <span class="stringliteral"></span>
<a name="l00378"></a>00378 <span class="stringliteral">CREATE OR REPLACE FUNCTION MADLIB_SCHEMA.__logregr_irls_step_distance(</span>
<a name="l00379"></a>00379 <span class="stringliteral"> /*+ state1 */ DOUBLE PRECISION[],</span>
<a name="l00380"></a>00380 <span class="stringliteral"> /*+ state2 */ DOUBLE PRECISION[])</span>
<a name="l00381"></a>00381 <span class="stringliteral">RETURNS DOUBLE PRECISION AS</span>
<a name="l00382"></a>00382 <span class="stringliteral">&#39;</span>MODULE_PATHNAME<span class="stringliteral">&#39;, &#39;</span>internal_logregr_irls_step_distance<span class="stringliteral">&#39;</span>
<a name="l00383"></a>00383 <span class="stringliteral">LANGUAGE c IMMUTABLE STRICT;</span>
<a name="l00384"></a>00384 <span class="stringliteral"></span>
<a name="l00385"></a>00385 <span class="stringliteral">------------------------------------------------------------------------</span>
<a name="l00386"></a>00386 <span class="stringliteral"></span>
<a name="l00387"></a>00387 <span class="stringliteral">CREATE OR REPLACE FUNCTION MADLIB_SCHEMA.__logregr_irls_result(</span>
<a name="l00388"></a>00388 <span class="stringliteral"> /*+ state */ DOUBLE PRECISION[])</span>
<a name="l00389"></a>00389 <span class="stringliteral">RETURNS MADLIB_SCHEMA.__logregr_result AS</span>
<a name="l00390"></a>00390 <span class="stringliteral">&#39;</span>MODULE_PATHNAME<span class="stringliteral">&#39;, &#39;</span>internal_logregr_irls_result<span class="stringliteral">&#39;</span>
<a name="l00391"></a>00391 <span class="stringliteral">LANGUAGE c IMMUTABLE STRICT;</span>
<a name="l00392"></a>00392 <span class="stringliteral"></span>
<a name="l00393"></a>00393 <span class="stringliteral">------------------------------------------------------------------------</span>
<a name="l00394"></a>00394 <span class="stringliteral"></span>
<a name="l00395"></a>00395 <span class="stringliteral">CREATE OR REPLACE FUNCTION MADLIB_SCHEMA.__logregr_igd_step_distance(</span>
<a name="l00396"></a>00396 <span class="stringliteral"> /*+ state1 */ DOUBLE PRECISION[],</span>
<a name="l00397"></a>00397 <span class="stringliteral"> /*+ state2 */ DOUBLE PRECISION[])</span>
<a name="l00398"></a>00398 <span class="stringliteral">RETURNS DOUBLE PRECISION AS</span>
<a name="l00399"></a>00399 <span class="stringliteral">&#39;</span>MODULE_PATHNAME<span class="stringliteral">&#39;, &#39;</span>internal_logregr_igd_step_distance<span class="stringliteral">&#39;</span>
<a name="l00400"></a>00400 <span class="stringliteral">LANGUAGE c IMMUTABLE STRICT;</span>
<a name="l00401"></a>00401 <span class="stringliteral"></span>
<a name="l00402"></a>00402 <span class="stringliteral">------------------------------------------------------------------------</span>
<a name="l00403"></a>00403 <span class="stringliteral"></span>
<a name="l00404"></a>00404 <span class="stringliteral">CREATE OR REPLACE FUNCTION MADLIB_SCHEMA.__logregr_igd_result(</span>
<a name="l00405"></a>00405 <span class="stringliteral"> /*+ state */ DOUBLE PRECISION[])</span>
<a name="l00406"></a>00406 <span class="stringliteral">RETURNS MADLIB_SCHEMA.__logregr_result AS</span>
<a name="l00407"></a>00407 <span class="stringliteral">&#39;</span>MODULE_PATHNAME<span class="stringliteral">&#39;, &#39;</span>internal_logregr_igd_result<span class="stringliteral">&#39;</span>
<a name="l00408"></a>00408 <span class="stringliteral">LANGUAGE c IMMUTABLE STRICT;</span>
<a name="l00409"></a>00409 <span class="stringliteral"></span>
<a name="l00410"></a>00410 <span class="stringliteral">------------------------------------------------------------------------</span>
<a name="l00411"></a>00411 <span class="stringliteral"></span><span class="comment"></span>
<a name="l00412"></a>00412 <span class="comment">/**</span>
<a name="l00413"></a>00413 <span class="comment"> * @brief Compute logistic-regression coefficients and diagnostic statistics</span>
<a name="l00414"></a>00414 <span class="comment"> *</span>
<a name="l00415"></a>00415 <span class="comment"> * To include an intercept in the model, set one coordinate in the</span>
<a name="l00416"></a>00416 <span class="comment"> * &lt;tt&gt;independentVariables&lt;/tt&gt; array to 1.</span>
<a name="l00417"></a>00417 <span class="comment"> *</span>
<a name="l00418"></a>00418 <span class="comment"> * @param tbl_source Name of the source relation containing the training data</span>
<a name="l00419"></a>00419 <span class="comment"> * @param tbl_output Name of the output relation to store the model results</span>
<a name="l00420"></a>00420 <span class="comment"> * Columns of the output relation are as follows:</span>
<a name="l00421"></a>00421 <span class="comment"> * - &lt;tt&gt;coef FLOAT8[]&lt;/tt&gt; - Array of coefficients, \f$ \boldsymbol c \f$</span>
<a name="l00422"></a>00422 <span class="comment"> * - &lt;tt&gt;log_likelihood FLOAT8&lt;/tt&gt; - Log-likelihood \f$ l(\boldsymbol c) \f$</span>
<a name="l00423"></a>00423 <span class="comment"> * - &lt;tt&gt;std_err FLOAT8[]&lt;/tt&gt; - Array of standard errors,</span>
<a name="l00424"></a>00424 <span class="comment"> * \f$ \mathit{se}(c_1), \dots, \mathit{se}(c_k) \f$</span>
<a name="l00425"></a>00425 <span class="comment"> * - &lt;tt&gt;z_stats FLOAT8[]&lt;/tt&gt; - Array of Wald z-statistics, \f$ \boldsymbol z \f$</span>
<a name="l00426"></a>00426 <span class="comment"> * - &lt;tt&gt;p_values FLOAT8[]&lt;/tt&gt; - Array of Wald p-values, \f$ \boldsymbol p \f$</span>
<a name="l00427"></a>00427 <span class="comment"> * - &lt;tt&gt;odds_ratios FLOAT8[]&lt;/tt&gt;: Array of odds ratios,</span>
<a name="l00428"></a>00428 <span class="comment"> * \f$ \mathit{odds}(c_1), \dots, \mathit{odds}(c_k) \f$</span>
<a name="l00429"></a>00429 <span class="comment"> * - &lt;tt&gt;condition_no FLOAT8&lt;/tt&gt; - The condition number of</span>
<a name="l00430"></a>00430 <span class="comment"> * matrix \f$ X^T A X \f$ during the iteration</span>
<a name="l00431"></a>00431 <span class="comment"> * immediately &lt;em&gt;preceding&lt;/em&gt; convergence</span>
<a name="l00432"></a>00432 <span class="comment"> * (i.e., \f$ A \f$ is computed using the coefficients</span>
<a name="l00433"></a>00433 <span class="comment"> * of the previous iteration)</span>
<a name="l00434"></a>00434 <span class="comment"> * @param dep_col Name of the dependent column (of type BOOLEAN)</span>
<a name="l00435"></a>00435 <span class="comment"> * @param ind_col Name of the independent column (of type DOUBLE</span>
<a name="l00436"></a>00436 <span class="comment"> * PRECISION[])</span>
<a name="l00437"></a>00437 <span class="comment"> * @param grouping_col Comma delimited list of column names to group-by</span>
<a name="l00438"></a>00438 <span class="comment"> * @param max_iter The maximum number of iterations</span>
<a name="l00439"></a>00439 <span class="comment"> * @param optimizer The optimizer to use (either</span>
<a name="l00440"></a>00440 <span class="comment"> * &lt;tt&gt;&#39;irls&#39;&lt;/tt&gt;/&lt;tt&gt;&#39;newton&#39;&lt;/tt&gt; for iteratively reweighted least</span>
<a name="l00441"></a>00441 <span class="comment"> * squares or &lt;tt&gt;&#39;cg&#39;&lt;/tt&gt; for conjugent gradient)</span>
<a name="l00442"></a>00442 <span class="comment"> * @param tolerance The difference between log-likelihood values in successive</span>
<a name="l00443"></a>00443 <span class="comment"> * iterations that should indicate convergence. This value should be</span>
<a name="l00444"></a>00444 <span class="comment"> * non-negative and a zero value here disables the convergence criterion,</span>
<a name="l00445"></a>00445 <span class="comment"> * and execution will only stop after \c maxNumIterations iterations.</span>
<a name="l00446"></a>00446 <span class="comment"> * @param verbose If true, any error or warning message will be printed to the</span>
<a name="l00447"></a>00447 <span class="comment"> * console (irrespective of the &#39;client_min_messages&#39; set by server).</span>
<a name="l00448"></a>00448 <span class="comment"> * If false, no error/warning message is printed to console.</span>
<a name="l00449"></a>00449 <span class="comment"> *</span>
<a name="l00450"></a>00450 <span class="comment"> *</span>
<a name="l00451"></a>00451 <span class="comment"> * @usage</span>
<a name="l00452"></a>00452 <span class="comment"> * - Get vector of coefficients \f$ \boldsymbol c \f$ and all diagnostic</span>
<a name="l00453"></a>00453 <span class="comment"> * statistics:\n</span>
<a name="l00454"></a>00454 <span class="comment"> * &lt;pre&gt;SELECT logregr_train(&#39;&lt;em&gt;sourceName&lt;/em&gt;&#39;, &#39;&lt;em&gt;outName&lt;/em&gt;&#39;</span>
<a name="l00455"></a>00455 <span class="comment"> * &#39;&lt;em&gt;dependentVariable&lt;/em&gt;&#39;, &#39;&lt;em&gt;independentVariables&lt;/em&gt;&#39;);</span>
<a name="l00456"></a>00456 <span class="comment"> * SELECT * from outName;</span>
<a name="l00457"></a>00457 <span class="comment"> * &lt;/pre&gt;</span>
<a name="l00458"></a>00458 <span class="comment"> * - Get vector of coefficients \f$ \boldsymbol c \f$:\n</span>
<a name="l00459"></a>00459 <span class="comment"> * &lt;pre&gt;SELECT coef from outName;&lt;/pre&gt;</span>
<a name="l00460"></a>00460 <span class="comment"> * - Get a subset of the output columns, e.g., only the array of coefficients</span>
<a name="l00461"></a>00461 <span class="comment"> * \f$ \boldsymbol c \f$, the log-likelihood of determination</span>
<a name="l00462"></a>00462 <span class="comment"> * \f$ l(\boldsymbol c) \f$, and the array of p-values \f$ \boldsymbol p \f$:</span>
<a name="l00463"></a>00463 <span class="comment"> * &lt;pre&gt;SELECT coef, log_likelihood, p_values FROM outName;&lt;/pre&gt;</span>
<a name="l00464"></a>00464 <span class="comment"> *</span>
<a name="l00465"></a>00465 <span class="comment"> * @note This function starts an iterative algorithm. It is not an aggregate</span>
<a name="l00466"></a>00466 <span class="comment"> * function. Source, output, and column names have to be passed as strings</span>
<a name="l00467"></a>00467 <span class="comment"> * (due to limitations of the SQL syntax).</span>
<a name="l00468"></a>00468 <span class="comment"> *</span>
<a name="l00469"></a>00469 <span class="comment"> * @internal</span>
<a name="l00470"></a>00470 <span class="comment"> * @sa This function is a wrapper for logistic::compute_logregr(), which</span>
<a name="l00471"></a>00471 <span class="comment"> * sets the default values.</span>
<a name="l00472"></a>00472 <span class="comment"> */</span>
<a name="l00473"></a>00473 CREATE OR REPLACE FUNCTION MADLIB_SCHEMA.logregr_train (
<a name="l00474"></a>00474 tbl_source VARCHAR,
<a name="l00475"></a>00475 tbl_output VARCHAR,
<a name="l00476"></a>00476 dep_col VARCHAR,
<a name="l00477"></a>00477 ind_col VARCHAR,
<a name="l00478"></a>00478 grouping_col VARCHAR,
<a name="l00479"></a>00479 max_iter INTEGER,
<a name="l00480"></a>00480 optimizer VARCHAR,
<a name="l00481"></a>00481 tolerance DOUBLE PRECISION,
<a name="l00482"></a>00482 verbose BOOLEAN
<a name="l00483"></a>00483 ) RETURNS VOID AS $$
<a name="l00484"></a>00484 PythonFunction(regress, logistic, logregr_train)
<a name="l00485"></a>00485 $$ LANGUAGE plpythonu;
<a name="l00486"></a>00486
<a name="l00487"></a>00487 ------------------------------------------------------------------------
<a name="l00488"></a>00488
<a name="l00489"></a>00489 CREATE FUNCTION MADLIB_SCHEMA.logregr_train (
<a name="l00490"></a>00490 tbl_source VARCHAR,
<a name="l00491"></a>00491 tbl_output VARCHAR,
<a name="l00492"></a>00492 dep_col VARCHAR,
<a name="l00493"></a>00493 ind_col VARCHAR)
<a name="l00494"></a>00494 RETURNS VOID AS $$
<a name="l00495"></a><a class="code" href="logistic_8sql__in.html#a32880a39de2e36b6c6be72691a6a4a40">00495</a> SELECT MADLIB_SCHEMA.logregr_train($1, $2, $3, $4, NULL::VARCHAR, 20, &#39;irls<span class="stringliteral">&#39;, 0.0001, False);</span>
<a name="l00496"></a>00496 <span class="stringliteral">$$ LANGUAGE sql VOLATILE;</span>
<a name="l00497"></a>00497 <span class="stringliteral"></span>
<a name="l00498"></a>00498 <span class="stringliteral">------------------------------------------------------------------------</span>
<a name="l00499"></a>00499 <span class="stringliteral"></span>
<a name="l00500"></a>00500 <span class="stringliteral">CREATE FUNCTION MADLIB_SCHEMA.logregr_train (</span>
<a name="l00501"></a>00501 <span class="stringliteral"> tbl_source VARCHAR,</span>
<a name="l00502"></a>00502 <span class="stringliteral"> tbl_output VARCHAR,</span>
<a name="l00503"></a>00503 <span class="stringliteral"> dep_col VARCHAR,</span>
<a name="l00504"></a>00504 <span class="stringliteral"> ind_col VARCHAR,</span>
<a name="l00505"></a>00505 <span class="stringliteral"> grouping_col VARCHAR)</span>
<a name="l00506"></a>00506 <span class="stringliteral">RETURNS VOID AS $$</span>
<a name="l00507"></a>00507 <span class="stringliteral"> SELECT MADLIB_SCHEMA.logregr_train($1, $2, $3, $4, $5, 20, &#39;</span>irls<span class="stringliteral">&#39;, 0.0001, False);</span>
<a name="l00508"></a>00508 <span class="stringliteral">$$LANGUAGE sql VOLATILE;</span>
<a name="l00509"></a>00509 <span class="stringliteral"></span>
<a name="l00510"></a>00510 <span class="stringliteral">------------------------------------------------------------------------</span>
<a name="l00511"></a>00511 <span class="stringliteral"></span>
<a name="l00512"></a>00512 <span class="stringliteral">CREATE FUNCTION MADLIB_SCHEMA.logregr_train (</span>
<a name="l00513"></a>00513 <span class="stringliteral"> tbl_source VARCHAR,</span>
<a name="l00514"></a>00514 <span class="stringliteral"> tbl_output VARCHAR,</span>
<a name="l00515"></a>00515 <span class="stringliteral"> dep_col VARCHAR,</span>
<a name="l00516"></a>00516 <span class="stringliteral"> ind_col VARCHAR,</span>
<a name="l00517"></a>00517 <span class="stringliteral"> grouping_col VARCHAR,</span>
<a name="l00518"></a>00518 <span class="stringliteral"> max_iter INTEGER)</span>
<a name="l00519"></a>00519 <span class="stringliteral">RETURNS VOID AS $$</span>
<a name="l00520"></a>00520 <span class="stringliteral"> SELECT MADLIB_SCHEMA.logregr_train($1, $2, $3, $4, $5, $6, &#39;</span>irls<span class="stringliteral">&#39;, 0.0001, False);</span>
<a name="l00521"></a>00521 <span class="stringliteral">$$LANGUAGE sql VOLATILE;</span>
<a name="l00522"></a>00522 <span class="stringliteral"></span>
<a name="l00523"></a>00523 <span class="stringliteral">------------------------------------------------------------------------</span>
<a name="l00524"></a>00524 <span class="stringliteral"></span>
<a name="l00525"></a>00525 <span class="stringliteral">CREATE FUNCTION MADLIB_SCHEMA.logregr_train (</span>
<a name="l00526"></a>00526 <span class="stringliteral"> tbl_source VARCHAR,</span>
<a name="l00527"></a>00527 <span class="stringliteral"> tbl_output VARCHAR,</span>
<a name="l00528"></a>00528 <span class="stringliteral"> dep_col VARCHAR,</span>
<a name="l00529"></a>00529 <span class="stringliteral"> ind_col VARCHAR,</span>
<a name="l00530"></a>00530 <span class="stringliteral"> grouping_col VARCHAR,</span>
<a name="l00531"></a>00531 <span class="stringliteral"> max_iter INTEGER,</span>
<a name="l00532"></a>00532 <span class="stringliteral"> optimizer VARCHAR)</span>
<a name="l00533"></a>00533 <span class="stringliteral">RETURNS VOID AS $$</span>
<a name="l00534"></a>00534 <span class="stringliteral"> SELECT MADLIB_SCHEMA.logregr_train($1, $2, $3, $4, $5, $6, $7, 0.0001, False);</span>
<a name="l00535"></a>00535 <span class="stringliteral">$$ LANGUAGE sql VOLATILE;</span>
<a name="l00536"></a>00536 <span class="stringliteral"></span>
<a name="l00537"></a>00537 <span class="stringliteral">------------------------------------------------------------------------</span>
<a name="l00538"></a>00538 <span class="stringliteral"></span>
<a name="l00539"></a>00539 <span class="stringliteral">CREATE FUNCTION MADLIB_SCHEMA.logregr_train (</span>
<a name="l00540"></a>00540 <span class="stringliteral"> tbl_source VARCHAR,</span>
<a name="l00541"></a>00541 <span class="stringliteral"> tbl_output VARCHAR,</span>
<a name="l00542"></a>00542 <span class="stringliteral"> dep_col VARCHAR,</span>
<a name="l00543"></a>00543 <span class="stringliteral"> ind_col VARCHAR,</span>
<a name="l00544"></a>00544 <span class="stringliteral"> grouping_col VARCHAR,</span>
<a name="l00545"></a>00545 <span class="stringliteral"> max_iter INTEGER,</span>
<a name="l00546"></a>00546 <span class="stringliteral"> optimizer VARCHAR,</span>
<a name="l00547"></a>00547 <span class="stringliteral"> tolerance DOUBLE PRECISION)</span>
<a name="l00548"></a>00548 <span class="stringliteral">RETURNS VOID AS $$</span>
<a name="l00549"></a>00549 <span class="stringliteral"> SELECT MADLIB_SCHEMA.logregr_train($1, $2, $3, $4, $5, $6, $7, $8, False);</span>
<a name="l00550"></a>00550 <span class="stringliteral">$$ LANGUAGE sql VOLATILE;</span>
<a name="l00551"></a>00551 <span class="stringliteral"></span>
<a name="l00552"></a>00552 <span class="stringliteral">------------------------------------------------------------------------</span>
<a name="l00553"></a>00553 <span class="stringliteral"></span><span class="comment"></span>
<a name="l00554"></a>00554 <span class="comment">/**</span>
<a name="l00555"></a>00555 <span class="comment"> * @brief Evaluate the usual logistic function in an under-/overflow-safe way</span>
<a name="l00556"></a>00556 <span class="comment"> *</span>
<a name="l00557"></a>00557 <span class="comment"> * @param x</span>
<a name="l00558"></a>00558 <span class="comment"> * @returns \f$ \frac{1}{1 + \exp(-x)} \f$</span>
<a name="l00559"></a>00559 <span class="comment"> *</span>
<a name="l00560"></a>00560 <span class="comment"> * Evaluating this expression directly can lead to under- or overflows.</span>
<a name="l00561"></a>00561 <span class="comment"> * This function performs the evaluation in a safe manner, making use of the</span>
<a name="l00562"></a>00562 <span class="comment"> * following observations:</span>
<a name="l00563"></a>00563 <span class="comment"> *</span>
<a name="l00564"></a>00564 <span class="comment"> * In order for the outcome of \f$ \exp(x) \f$ to be within the range of the</span>
<a name="l00565"></a>00565 <span class="comment"> * minimum positive double-precision number (i.e., \f$ 2^{-1074} \f$) and the</span>
<a name="l00566"></a>00566 <span class="comment"> * maximum positive double-precision number (i.e.,</span>
<a name="l00567"></a>00567 <span class="comment"> * \f$ (1 + (1 - 2^{52})) * 2^{1023}) \f$, \f$ x \f$ has to be within the</span>
<a name="l00568"></a>00568 <span class="comment"> * natural logarithm of these numbers, so roughly in between -744 and 709.</span>
<a name="l00569"></a>00569 <span class="comment"> * However, \f$ 1 + \exp(x) \f$ will just evaluate to 1 if \f$ \exp(x) \f$ is</span>
<a name="l00570"></a>00570 <span class="comment"> * less than the machine epsilon (i.e., \f$ 2^{-52} \f$) or, equivalently, if</span>
<a name="l00571"></a>00571 <span class="comment"> * \f$ x \f$ is less than the natural logarithm of that; i.e., in any case if</span>
<a name="l00572"></a>00572 <span class="comment"> * \f$ x \f$ is less than -37.</span>
<a name="l00573"></a>00573 <span class="comment"> * Note that taking the reciprocal of the largest double-precision number will</span>
<a name="l00574"></a>00574 <span class="comment"> * not cause an underflow. Hence, no further checks are necessary.</span>
<a name="l00575"></a>00575 <span class="comment"> */</span>
<a name="l00576"></a>00576 CREATE OR REPLACE FUNCTION MADLIB_SCHEMA.logistic(x DOUBLE PRECISION)
<a name="l00577"></a>00577 RETURNS DOUBLE PRECISION
<a name="l00578"></a>00578 LANGUAGE sql
<a name="l00579"></a>00579 AS $$
<a name="l00580"></a>00580 SELECT CASE WHEN -$1 &lt; -37 THEN 1
<a name="l00581"></a>00581 WHEN -$1 &gt; 709 THEN 0
<a name="l00582"></a>00582 ELSE 1 / (1 + exp(-$1))
<a name="l00583"></a>00583 END;
<a name="l00584"></a>00584 $$;
<a name="l00585"></a>00585
</pre></div></div>
</div>
<div id="nav-path" class="navpath">
<ul>
<li class="navelem"><a class="el" href="logistic_8sql__in.html">logistic.sql_in</a> </li>
<!-- window showing the filter options -->
<div id="MSearchSelectWindow"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
onkeydown="return searchBox.OnSearchSelectKey(event)">
<a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(0)"><span class="SelectionMark">&#160;</span>All</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(1)"><span class="SelectionMark">&#160;</span>Files</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(2)"><span class="SelectionMark">&#160;</span>Functions</a></div>
<!-- iframe showing the search results (closed by default) -->
<div id="MSearchResultsWindow">
<iframe src="javascript:void(0)" frameborder="0"
name="MSearchResults" id="MSearchResults">
</iframe>
</div>
<li class="footer">Generated on Tue Apr 2 2013 14:57:03 for MADlib by
<a href="http://www.doxygen.org/index.html">
<img class="footer" src="doxygen.png" alt="doxygen"/></a> 1.7.5.1 </li>
</ul>
</div>
</body>
</html>