Skip to content

Commit 41c4d31

Browse files
anirudhagar13Anirudh Agarwal
andauthored
Revert mvel log (#1140)
* Revert "Allow alien value in MVEL-based derivations (#1120) and remove stdout statements" This reverts commit 55290e7. * updating rc version after last commit --------- Co-authored-by: Anirudh Agarwal <aniagarw@aniagarw-mn1.linkedin.biz>
1 parent 65702dc commit 41c4d31

15 files changed

Lines changed: 19 additions & 62 deletions

feathr-impl/src/main/java/com/linkedin/feathr/common/FeatureVariableResolver.java

Lines changed: 7 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -4,24 +4,19 @@
44
import com.linkedin.feathr.common.tensor.TensorIterator;
55
import com.linkedin.feathr.common.types.ValueType;
66
import com.linkedin.feathr.common.util.CoercionUtils;
7-
import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext;
8-
import org.mvel2.DataConversion;
97
import org.mvel2.integration.impl.SimpleValueResolver;
108

11-
import java.util.Optional;
12-
139

1410
/**
1511
* FeatureVariableResolver takes a FeatureValue object for member variable during MVEL expression evaluation,
1612
* and then resolve the value for that variable.
1713
*/
1814
public class FeatureVariableResolver extends SimpleValueResolver {
1915
private FeatureValue _featureValue;
20-
private Optional<FeathrExpressionExecutionContext> _mvelContext = Optional.empty();
21-
public FeatureVariableResolver(FeatureValue featureValue, Optional<FeathrExpressionExecutionContext> mvelContext) {
16+
17+
public FeatureVariableResolver(FeatureValue featureValue) {
2218
super(featureValue);
2319
_featureValue = featureValue;
24-
_mvelContext = mvelContext;
2520
}
2621

2722
@Override
@@ -30,27 +25,21 @@ public Object getValue() {
3025
return null;
3126
}
3227

33-
Object fv = null;
3428
switch (_featureValue.getFeatureType().getBasicType()) {
3529
case NUMERIC:
36-
fv = _featureValue.getAsNumeric(); break;
30+
return _featureValue.getAsNumeric();
3731
case TERM_VECTOR:
38-
fv = getValueFromTermVector(); break;
32+
return getValueFromTermVector();
3933
case BOOLEAN:
4034
case CATEGORICAL:
4135
case CATEGORICAL_SET:
4236
case DENSE_VECTOR:
4337

4438
case TENSOR:
45-
fv = getValueFromTensor(); break;
39+
return getValueFromTensor();
40+
4641
default:
47-
throw new IllegalArgumentException("Unexpected feature type: " + _featureValue.getFeatureType().getBasicType());
48-
}
49-
// If there is any registered FeatureValue handler that can handle this feature value, return the converted value per request.
50-
if (_mvelContext.isPresent() && _mvelContext.get().canConvertFromAny(fv)) {
51-
return _mvelContext.get().convertFromAny(fv).head();
52-
} else {
53-
return fv;
42+
throw new IllegalArgumentException("Unexpected feature type: " + _featureValue.getFeatureType().getBasicType());
5443
}
5544
}
5645

feathr-impl/src/main/scala/com/linkedin/feathr/offline/PostTransformationUtil.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,7 @@ private[offline] object PostTransformationUtil {
130130
featureType: FeatureTypes,
131131
mvelContext: Option[FeathrExpressionExecutionContext]): Try[FeatureValue] = Try {
132132
val args = Map(featureName -> Some(featureValue))
133-
val variableResolverFactory = new FeatureVariableResolverFactory(args, mvelContext)
133+
val variableResolverFactory = new FeatureVariableResolverFactory(args)
134134
val transformedValue = MvelContext.executeExpressionWithPluginSupportWithFactory(compiledExpression, featureValue, variableResolverFactory, mvelContext.orNull)
135135
CoercionUtilsScala.coerceToFeatureValue(transformedValue, featureType)
136136
}

feathr-impl/src/main/scala/com/linkedin/feathr/offline/derived/functions/MvelFeatureDerivationFunction.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ private[offline] class MvelFeatureDerivationFunction(
4242

4343
override def getFeatures(inputs: Seq[Option[common.FeatureValue]]): Seq[Option[common.FeatureValue]] = {
4444
val argMap = (parameterNames zip inputs).toMap
45-
val variableResolverFactory = new FeatureVariableResolverFactory(argMap, mvelContext)
45+
val variableResolverFactory = new FeatureVariableResolverFactory(argMap)
4646

4747
MvelUtils.executeExpression(compiledExpression, null, variableResolverFactory, featureName, mvelContext) match {
4848
case Some(value) =>

feathr-impl/src/main/scala/com/linkedin/feathr/offline/derived/functions/MvelFeatureDerivationFunction1.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ private[offline] class MvelFeatureDerivationFunction1(
3939

4040
override def getFeatures(inputs: Seq[Option[common.FeatureValue]]): Seq[Option[common.FeatureValue]] = {
4141
val argMap = (parameterNames zip inputs).toMap
42-
val variableResolverFactory = new FeatureVariableResolverFactory(argMap, mvelContext)
42+
val variableResolverFactory = new FeatureVariableResolverFactory(argMap)
4343

4444
MvelUtils.executeExpression(compiledExpression, null, variableResolverFactory, featureName, mvelContext) match {
4545
case Some(value) =>

feathr-impl/src/main/scala/com/linkedin/feathr/offline/derived/functions/SimpleMvelDerivationFunction.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ private[offline] class SimpleMvelDerivationFunction(expression: String, featureN
4444
MvelContext.ensureInitialized()
4545

4646
// In order to prevent MVEL from barfing if a feature is null, we use a custom variable resolver that understands `Option`
47-
val variableResolverFactory = new FeatureVariableResolverFactory(args, mvelContext)
47+
val variableResolverFactory = new FeatureVariableResolverFactory(args)
4848

4949
if (TestFwkUtils.IS_DEBUGGER_ENABLED) {
5050
while(TestFwkUtils.DERIVED_FEATURE_COUNTER > 0) {

feathr-impl/src/main/scala/com/linkedin/feathr/offline/evaluator/transformation/AnchorUDFOperator.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -53,15 +53,13 @@ object AnchorUDFOperator extends TransformationOperator {
5353
val (withFeaturesDf, outputJoinKeyColumnNames) = newExtractor match {
5454
case sparkExtractor: SimpleAnchorExtractorSpark =>
5555
// Note that for Spark UDFs we only support SQL keys.
56-
print("in simpleanchorextractorspark = " + newExtractor)
5756
val sqlKeyExtractor = new SQLSourceKeyExtractor(keySeq)
5857
val withKeyColumnDF = if (appendKeyColumns) sqlKeyExtractor.appendKeyColumns(inputDf) else inputDf
5958
val outputJoinKeyColumnNames = getFeatureKeyColumnNames(sqlKeyExtractor, withKeyColumnDF)
6059

6160
val tensorizedFeatureColumns = sparkExtractor.getFeatures(inputDf, Map())
6261
val transformedColsAndFormats: Map[(String, Column), FeatureColumnFormat] = extractor match {
6362
case extractor2: SQLConfigurableAnchorExtractor =>
64-
print("in SQLConfigurableAnchorExtractor = " + newExtractor)
6563
// If instance of SQLConfigurableAnchorExtractor, get Tensor features
6664
// Get DataFrame schema for tensor based on FML or inferred tensor type.
6765
val featureSchemas = featureNamesInBatch.map(featureName => {

feathr-impl/src/main/scala/com/linkedin/feathr/offline/evaluator/transformation/TransformationOperatorUtils.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,9 +42,7 @@ object TransformationOperatorUtils {
4242
def createFeatureDF(inputDf: DataFrame, featureColumnDefs: Seq[(String, Column)]): DataFrame = {
4343
// first add a prefix to the feature column name in the schema
4444
val featureColumnNamePrefix = "_frame_sql_feature_prefix_"
45-
print(inputDf.columns.mkString("Array(", ", ", ")"))
4645
val transformedDF = featureColumnDefs.foldLeft(inputDf)((baseDF, columnWithName) => {
47-
print("COLUMN NAME = " + columnWithName)
4846
val columnName = featureColumnNamePrefix + columnWithName._1
4947
baseDF.withColumn(columnName, expr(columnWithName._2.toString()))
5048
})

feathr-impl/src/main/scala/com/linkedin/feathr/offline/job/FeatureGenJob.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -75,8 +75,6 @@ object FeatureGenJob {
7575
val dataSourceConfigs = DataSourceConfigUtils.getConfigs(cmdParser)
7676
val featureGenJobContext = new FeatureGenJobContext(workDir, paramsOverride, featureConfOverride)
7777

78-
println("dataSourceConfigs: ")
79-
println(dataSourceConfigs)
8078
(applicationConfigPath, featureDefinitionsInput, featureGenJobContext, dataSourceConfigs)
8179
}
8280

feathr-impl/src/main/scala/com/linkedin/feathr/offline/job/FeatureJoinJob.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,6 @@ object FeatureJoinJob {
6969
def run(ss: SparkSession, hadoopConf: Configuration, jobContext: FeathrJoinJobContext, dataPathHandlers: List[DataPathHandler]): Unit = {
7070
val dataLoaderHandlers: List[DataLoaderHandler] = dataPathHandlers.map(_.dataLoaderHandler)
7171
val joinConfig = FeatureJoinConfig.parseJoinConfig(hdfsFileReader(ss, jobContext.joinConfig))
72-
print("join config is, ",joinConfig)
7372
// check read authorization for observation data, and write authorization for output path
7473
checkAuthorization(ss, hadoopConf, jobContext, dataLoaderHandlers)
7574

@@ -86,7 +85,6 @@ object FeatureJoinJob {
8685
def stringifyFeatureNames(nameSet: Set[String]): String = nameSet.toSeq.sorted.toArray.mkString("\n\t")
8786

8887
def hdfsFileReader(ss: SparkSession, path: String): String = {
89-
print("ss.sparkContext.textFile(path),", path)
9088
ss.sparkContext.textFile(path).collect.mkString("\n")
9189
}
9290

feathr-impl/src/main/scala/com/linkedin/feathr/offline/job/FeatureTransformation.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -888,7 +888,6 @@ private[offline] object FeatureTransformation {
888888
val features = transformers map {
889889
case extractor: AnchorExtractor[IndexedRecord] =>
890890
val features = extractor.getFeatures(record)
891-
print(features)
892891
FeatureValueTypeValidator.validate(features, featureTypeConfigs)
893892
features
894893
case extractor =>
@@ -1423,7 +1422,6 @@ private[offline] object FeatureTransformation {
14231422
val features = transformers map {
14241423
case extractor: AnchorExtractor[Any] =>
14251424
val features = extractor.getFeatures(row)
1426-
print(features)
14271425
FeatureValueTypeValidator.validate(features, featureTypeConfigs)
14281426
features
14291427
case extractor =>

0 commit comments

Comments
 (0)