@@ -268,6 +268,114 @@ To start the feature server in TLS mode, you need to provide the private and pub
268268feast serve --key /path/to/key.pem --cert /path/to/cert.pem
269269```
270270
271+ # Static Artifacts Loading
272+
273+ ## Overview
274+
275+ Static artifacts loading allows you to load models, lookup tables, and other static resources once during feature server startup instead of loading them on each request. These artifacts are cached in memory and accessible to on-demand feature views.
276+
277+ ## Usage
278+
279+ ### Create Static Artifacts Module
280+
281+ Create a ` static_artifacts.py ` file in your feature repository:
282+
283+ ``` python
284+ # static_artifacts.py
285+ from fastapi import FastAPI
286+ from transformers import pipeline
287+
288+ def load_sentiment_model ():
289+ """ Load sentiment analysis model."""
290+ return pipeline(
291+ " sentiment-analysis" ,
292+ model = " cardiffnlp/twitter-roberta-base-sentiment-latest" ,
293+ device = " cpu"
294+ )
295+
296+ def load_lookup_tables ():
297+ """ Load static lookup tables."""
298+ return {
299+ " sentiment_labels" : {" LABEL_0" : " negative" , " LABEL_1" : " neutral" , " LABEL_2" : " positive" },
300+ " domain_categories" : {" twitter.com" : " social" , " news.com" : " news" },
301+ }
302+
303+ def load_artifacts (app : FastAPI):
304+ """ Load static artifacts into app.state."""
305+ app.state.sentiment_model = load_sentiment_model()
306+ app.state.lookup_tables = load_lookup_tables()
307+
308+ # Update global references for access from feature views
309+ import example_repo
310+ example_repo._sentiment_model = app.state.sentiment_model
311+ example_repo._lookup_tables = app.state.lookup_tables
312+ ```
313+
314+ ### Use Artifacts in Feature Views
315+
316+ Access pre-loaded artifacts in your on-demand feature views:
317+
318+ ``` python
319+ # example_repo.py
320+ import pandas as pd
321+ from feast.on_demand_feature_view import on_demand_feature_view
322+
323+ # Global references for static artifacts
324+ _sentiment_model = None
325+ _lookup_tables: dict = {}
326+
327+ @on_demand_feature_view (
328+ sources = [text_input_request],
329+ schema = [
330+ Field(name = " predicted_sentiment" , dtype = String),
331+ Field(name = " sentiment_confidence" , dtype = Float32),
332+ ],
333+ )
334+ def sentiment_prediction (inputs : pd.DataFrame) -> pd.DataFrame:
335+ """ Sentiment prediction using pre-loaded artifacts."""
336+ global _sentiment_model, _lookup_tables
337+
338+ results = []
339+ for text in inputs[" input_text" ]:
340+ predictions = _sentiment_model(text)
341+ label_map = _lookup_tables[" sentiment_labels" ]
342+ best_pred = max (predictions, key = lambda x : x[" score" ])
343+ predicted_sentiment = label_map[best_pred[" label" ]]
344+ confidence = best_pred[" score" ]
345+
346+ results.append({
347+ " predicted_sentiment" : predicted_sentiment,
348+ " sentiment_confidence" : confidence,
349+ })
350+
351+ return pd.DataFrame(results)
352+ ```
353+
354+ ### Start Feature Server
355+
356+ ``` bash
357+ feast serve
358+ ```
359+
360+ The server will automatically load static artifacts during startup.
361+
362+ ## Supported Artifact Types
363+
364+ - Small to medium ML models
365+ - Lookup tables and reference data
366+ - Configuration parameters
367+ - Pre-computed embeddings
368+
369+ ## Example Template
370+
371+ The PyTorch NLP template demonstrates static artifacts loading:
372+
373+ ``` bash
374+ feast init my-nlp-project -t pytorch_nlp
375+ cd my-nlp-project/feature_repo
376+ feast serve
377+ ```
378+
271379# Online Feature Server Permissions and Access Control
272380
273381## API Endpoints and Permissions
0 commit comments