Skip to content

Commit ef3e130

Browse files
authored
bump version to 0.8.0 (#694)
Signed-off-by: Yuqing Wei <weiyuqing021@outlook.com> Signed-off-by: Yuqing Wei <weiyuqing021@outlook.com>
1 parent ebcc81c commit ef3e130

8 files changed

Lines changed: 11 additions & 11 deletions

File tree

build.sbt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ import sbt.Keys.publishLocalConfiguration
22

33
ThisBuild / resolvers += Resolver.mavenLocal
44
ThisBuild / scalaVersion := "2.12.15"
5-
ThisBuild / version := "0.7.2"
5+
ThisBuild / version := "0.8.0"
66
ThisBuild / organization := "com.linkedin.feathr"
77
ThisBuild / organizationName := "linkedin"
88
val sparkVersion = "3.1.3"

docs/how-to-guides/azure_resource_provision.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@
111111
"destinationBacpacBlobUrl": "[concat('https://',variables('dlsName'),'.blob.core.windows.net/',variables('dlsFsName'),'/',variables('bacpacBlobName'))]",
112112
"bacpacDeploymentScriptName": "CopyBacpacFile",
113113
"bacpacDbExtensionName": "registryRbacDbImport",
114-
"preBuiltdockerImage": "feathrfeaturestore/feathr-registry:releases-v0.7.2"
114+
"preBuiltdockerImage": "feathrfeaturestore/feathr-registry:releases-v0.8.0"
115115
},
116116
"functions": [],
117117
"resources": [

docs/how-to-guides/local-spark-provider.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ A spark-submit script will auto generated in your workspace under `debug` folder
3636
spark-submit \
3737
--master local[*] \
3838
--name project_feathr_local_spark_test \
39-
--packages "org.apache.spark:spark-avro_2.12:3.3.0,com.microsoft.sqlserver:mssql-jdbc:10.2.0.jre8,com.microsoft.azure:spark-mssql-connector_2.12:1.2.0,org.apache.logging.log4j:log4j-core:2.17.2,com.typesafe:config:1.3.4,com.fasterxml.jackson.core:jackson-databind:2.12.6.1,org.apache.hadoop:hadoop-mapreduce-client-core:2.7.7,org.apache.hadoop:hadoop-common:2.7.7,org.apache.avro:avro:1.8.2,org.apache.xbean:xbean-asm6-shaded:4.10,org.apache.spark:spark-sql-kafka-0-10_2.12:3.1.3,com.microsoft.azure:azure-eventhubs-spark_2.12:2.3.21,org.apache.kafka:kafka-clients:3.1.0,com.google.guava:guava:31.1-jre,it.unimi.dsi:fastutil:8.1.1,org.mvel:mvel2:2.2.8.Final,com.fasterxml.jackson.module:jackson-module-scala_2.12:2.13.3,com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.12.6,com.fasterxml.jackson.dataformat:jackson-dataformat-csv:2.12.6,com.jasonclawson:jackson-dataformat-hocon:1.1.0,com.redislabs:spark-redis_2.12:3.1.0,org.apache.xbean:xbean-asm6-shaded:4.10,com.google.protobuf:protobuf-java:3.19.4,net.snowflake:snowflake-jdbc:3.13.18,net.snowflake:spark-snowflake_2.12:2.10.0-spark_3.2,org.apache.commons:commons-lang3:3.12.0,org.xerial:sqlite-jdbc:3.36.0.3,com.github.changvvb:jackson-module-caseclass_2.12:1.1.1,com.azure.cosmos.spark:azure-cosmos-spark_3-1_2-12:4.11.1,org.eclipse.jetty:jetty-util:9.3.24.v20180605,commons-io:commons-io:2.6,org.apache.hadoop:hadoop-azure:2.7.4,com.microsoft.azure:azure-storage:8.6.4,com.linkedin.feathr:feathr_2.12:0.7.2" \
39+
--packages "org.apache.spark:spark-avro_2.12:3.3.0,com.microsoft.sqlserver:mssql-jdbc:10.2.0.jre8,com.microsoft.azure:spark-mssql-connector_2.12:1.2.0,org.apache.logging.log4j:log4j-core:2.17.2,com.typesafe:config:1.3.4,com.fasterxml.jackson.core:jackson-databind:2.12.6.1,org.apache.hadoop:hadoop-mapreduce-client-core:2.7.7,org.apache.hadoop:hadoop-common:2.7.7,org.apache.avro:avro:1.8.2,org.apache.xbean:xbean-asm6-shaded:4.10,org.apache.spark:spark-sql-kafka-0-10_2.12:3.1.3,com.microsoft.azure:azure-eventhubs-spark_2.12:2.3.21,org.apache.kafka:kafka-clients:3.1.0,com.google.guava:guava:31.1-jre,it.unimi.dsi:fastutil:8.1.1,org.mvel:mvel2:2.2.8.Final,com.fasterxml.jackson.module:jackson-module-scala_2.12:2.13.3,com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.12.6,com.fasterxml.jackson.dataformat:jackson-dataformat-csv:2.12.6,com.jasonclawson:jackson-dataformat-hocon:1.1.0,com.redislabs:spark-redis_2.12:3.1.0,org.apache.xbean:xbean-asm6-shaded:4.10,com.google.protobuf:protobuf-java:3.19.4,net.snowflake:snowflake-jdbc:3.13.18,net.snowflake:spark-snowflake_2.12:2.10.0-spark_3.2,org.apache.commons:commons-lang3:3.12.0,org.xerial:sqlite-jdbc:3.36.0.3,com.github.changvvb:jackson-module-caseclass_2.12:1.1.1,com.azure.cosmos.spark:azure-cosmos-spark_3-1_2-12:4.11.1,org.eclipse.jetty:jetty-util:9.3.24.v20180605,commons-io:commons-io:2.6,org.apache.hadoop:hadoop-azure:2.7.4,com.microsoft.azure:azure-storage:8.6.4,com.linkedin.feathr:feathr_2.12:0.8.0" \
4040
--conf "spark.driver.extraClassPath=../target/scala-2.12/classes:jars/config-1.3.4.jar:jars/jackson-dataformat-hocon-1.1.0.jar:jars/jackson-module-caseclass_2.12-1.1.1.jar:jars/mvel2-2.2.8.Final.jar:jars/fastutil-8.1.1.jar" \
4141
--conf "spark.hadoop.fs.wasbs.impl=org.apache.hadoop.fs.azure.NativeAzureFileSystem" \
4242
--class com.linkedin.feathr.offline.job.FeatureJoinJob \

feathr_project/docs/conf.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,9 +24,9 @@
2424
author = 'Feathr Community'
2525

2626
# The short X.Y version
27-
version = '0.7'
27+
version = '0.8'
2828
# The full version, including alpha/beta/rc tags
29-
release = '0.7'
29+
release = '0.8'
3030

3131

3232
# -- General configuration ---------------------------------------------------

feathr_project/feathr/constants.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
TYPEDEF_ARRAY_DERIVED_FEATURE=f"array<feathr_derived_feature_{REGISTRY_TYPEDEF_VERSION}>"
2929
TYPEDEF_ARRAY_ANCHOR_FEATURE=f"array<feathr_anchor_feature_{REGISTRY_TYPEDEF_VERSION}>"
3030

31-
FEATHR_MAVEN_ARTIFACT="com.linkedin.feathr:feathr_2.12:0.7.2"
31+
FEATHR_MAVEN_ARTIFACT="com.linkedin.feathr:feathr_2.12:0.8.0"
3232

3333
JOIN_CLASS_NAME="com.linkedin.feathr.offline.job.FeatureJoinJob"
3434
GEN_CLASS_NAME="com.linkedin.feathr.offline.job.FeatureGenJob"

feathr_project/setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77

88
setup(
99
name='feathr',
10-
version='0.7.2',
10+
version='0.8.0',
1111
long_description=long_description,
1212
long_description_content_type="text/markdown",
1313
author_email="feathr-technical-discuss@lists.lfaidata.foundation",

feathr_project/test/test_user_workspace/feathr_config.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ spark_config:
8282
# Feathr Job configuration. Support local paths, path start with http(s)://, and paths start with abfs(s)://
8383
# this is the default location so end users don't have to compile the runtime again.
8484
# feathr_runtime_location: wasbs://public@azurefeathrstorage.blob.core.windows.net/feathr-assembly-LATEST.jar
85-
feathr_runtime_location: "../../target/scala-2.12/feathr-assembly-0.7.2.jar"
85+
feathr_runtime_location: "../../target/scala-2.12/feathr-assembly-0.8.0.jar"
8686
databricks:
8787
# workspace instance
8888
workspace_instance_url: 'https://adb-2474129336842816.16.azuredatabricks.net/'
@@ -93,7 +93,7 @@ spark_config:
9393
# Feathr Job location. Support local paths, path start with http(s)://, and paths start with dbfs:/
9494
work_dir: 'dbfs:/feathr_getting_started'
9595
# this is the default location so end users don't have to compile the runtime again.
96-
feathr_runtime_location: "../../target/scala-2.12/feathr-assembly-0.7.2.jar"
96+
feathr_runtime_location: "../../target/scala-2.12/feathr-assembly-0.8.0.jar"
9797

9898
online_store:
9999
redis:

feathr_project/test/test_user_workspace/feathr_config_purview.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ spark_config:
8282
# Feathr Job configuration. Support local paths, path start with http(s)://, and paths start with abfs(s)://
8383
# this is the default location so end users don't have to compile the runtime again.
8484
# feathr_runtime_location: wasbs://public@azurefeathrstorage.blob.core.windows.net/feathr-assembly-LATEST.jar
85-
feathr_runtime_location: "../../target/scala-2.12/feathr-assembly-0.7.2.jar"
85+
feathr_runtime_location: "../../target/scala-2.12/feathr-assembly-0.8.0.jar"
8686
databricks:
8787
# workspace instance
8888
workspace_instance_url: 'https://adb-2474129336842816.16.azuredatabricks.net/'
@@ -93,7 +93,7 @@ spark_config:
9393
# Feathr Job location. Support local paths, path start with http(s)://, and paths start with dbfs:/
9494
work_dir: 'dbfs:/feathr_getting_started'
9595
# this is the default location so end users don't have to compile the runtime again.
96-
feathr_runtime_location: "../../target/scala-2.12/feathr-assembly-0.7.2.jar"
96+
feathr_runtime_location: "../../target/scala-2.12/feathr-assembly-0.8.0.jar"
9797

9898
online_store:
9999
redis:

0 commit comments

Comments
 (0)