| # Imports from base R |
| importFrom(methods, setGeneric, setMethod, setOldClass) |
| |
| # Disable native libraries till we figure out how to package it |
| # See SPARKR-7839 |
| #useDynLib(SparkR, stringHashCode) |
| |
| # S3 methods exported |
| export("sparkR.init") |
| export("sparkR.stop") |
| export("print.jobj") |
| |
| # MLlib integration |
| exportMethods("glm", |
| "predict", |
| "summary") |
| |
| # Job group lifecycle management methods |
| export("setJobGroup", |
| "clearJobGroup", |
| "cancelJobGroup") |
| |
| exportClasses("DataFrame") |
| |
| exportMethods("arrange", |
| "cache", |
| "collect", |
| "columns", |
| "count", |
| "crosstab", |
| "describe", |
| "dim", |
| "distinct", |
| "dropna", |
| "dtypes", |
| "except", |
| "explain", |
| "fillna", |
| "filter", |
| "first", |
| "group_by", |
| "groupBy", |
| "head", |
| "insertInto", |
| "intersect", |
| "isLocal", |
| "join", |
| "limit", |
| "merge", |
| "mutate", |
| "na.omit", |
| "names", |
| "ncol", |
| "nrow", |
| "orderBy", |
| "persist", |
| "printSchema", |
| "rbind", |
| "registerTempTable", |
| "rename", |
| "repartition", |
| "sample", |
| "sample_frac", |
| "saveAsParquetFile", |
| "saveAsTable", |
| "saveDF", |
| "schema", |
| "select", |
| "selectExpr", |
| "show", |
| "showDF", |
| "subset", |
| "summarize", |
| "summary", |
| "take", |
| "transform", |
| "unionAll", |
| "unique", |
| "unpersist", |
| "where", |
| "withColumn", |
| "withColumnRenamed", |
| "write.df") |
| |
| exportClasses("Column") |
| |
| exportMethods("%in%", |
| "abs", |
| "acos", |
| "add_months", |
| "alias", |
| "approxCountDistinct", |
| "asc", |
| "ascii", |
| "asin", |
| "atan", |
| "atan2", |
| "avg", |
| "base64", |
| "between", |
| "bin", |
| "bitwiseNOT", |
| "cast", |
| "cbrt", |
| "ceil", |
| "ceiling", |
| "concat", |
| "concat_ws", |
| "contains", |
| "conv", |
| "cos", |
| "cosh", |
| "count", |
| "countDistinct", |
| "crc32", |
| "date_add", |
| "date_format", |
| "date_sub", |
| "datediff", |
| "dayofmonth", |
| "dayofyear", |
| "desc", |
| "endsWith", |
| "exp", |
| "explode", |
| "expm1", |
| "expr", |
| "factorial", |
| "first", |
| "floor", |
| "format_number", |
| "format_string", |
| "from_unixtime", |
| "from_utc_timestamp", |
| "getField", |
| "getItem", |
| "greatest", |
| "hex", |
| "hour", |
| "hypot", |
| "ifelse", |
| "initcap", |
| "instr", |
| "isNaN", |
| "isNotNull", |
| "isNull", |
| "last", |
| "last_day", |
| "least", |
| "length", |
| "levenshtein", |
| "like", |
| "lit", |
| "locate", |
| "log", |
| "log10", |
| "log1p", |
| "log2", |
| "lower", |
| "lpad", |
| "ltrim", |
| "max", |
| "md5", |
| "mean", |
| "min", |
| "minute", |
| "month", |
| "months_between", |
| "n", |
| "n_distinct", |
| "nanvl", |
| "negate", |
| "next_day", |
| "otherwise", |
| "pmod", |
| "quarter", |
| "rand", |
| "randn", |
| "regexp_extract", |
| "regexp_replace", |
| "reverse", |
| "rint", |
| "rlike", |
| "round", |
| "rpad", |
| "rtrim", |
| "second", |
| "sha1", |
| "sha2", |
| "shiftLeft", |
| "shiftRight", |
| "shiftRightUnsigned", |
| "sign", |
| "signum", |
| "sin", |
| "sinh", |
| "size", |
| "soundex", |
| "sqrt", |
| "startsWith", |
| "substr", |
| "substring_index", |
| "sum", |
| "sumDistinct", |
| "tan", |
| "tanh", |
| "toDegrees", |
| "toRadians", |
| "to_date", |
| "to_utc_timestamp", |
| "translate", |
| "trim", |
| "unbase64", |
| "unhex", |
| "unix_timestamp", |
| "upper", |
| "weekofyear", |
| "when", |
| "year") |
| |
| exportClasses("GroupedData") |
| exportMethods("agg") |
| |
| export("sparkRSQL.init", |
| "sparkRHive.init") |
| |
| export("cacheTable", |
| "clearCache", |
| "createDataFrame", |
| "createExternalTable", |
| "dropTempTable", |
| "jsonFile", |
| "loadDF", |
| "parquetFile", |
| "read.df", |
| "sql", |
| "table", |
| "tableNames", |
| "tables", |
| "uncacheTable") |
| |
| export("structField", |
| "structField.jobj", |
| "structField.character", |
| "print.structField", |
| "structType", |
| "structType.jobj", |
| "structType.structField", |
| "print.structType") |