Skip to content

Commit 4e450ad

Browse files
authored
chore: Bumping fastapi + starlette (#3938)
1 parent d3a2a45 commit 4e450ad

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

42 files changed

+147
-178
lines changed

Makefile

+1-1
Original file line numberDiff line numberDiff line change
@@ -310,7 +310,7 @@ format-python:
310310
cd ${ROOT_DIR}/sdk/python; python -m black --target-version py38 feast tests
311311

312312
lint-python:
313-
cd ${ROOT_DIR}/sdk/python; python -m mypy
313+
cd ${ROOT_DIR}/sdk/python; python -m mypy --exclude=/tests/ --follow-imports=skip feast
314314
cd ${ROOT_DIR}/sdk/python; python -m isort feast/ tests/ --check-only
315315
cd ${ROOT_DIR}/sdk/python; python -m flake8 feast/ tests/
316316
cd ${ROOT_DIR}/sdk/python; python -m black --check feast tests

sdk/python/feast/data_source.py

+9-10
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@
1111
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
14-
1514
import enum
1615
import warnings
1716
from abc import ABC, abstractmethod
@@ -485,12 +484,12 @@ def to_proto(self) -> DataSourceProto:
485484
return data_source_proto
486485

487486
def validate(self, config: RepoConfig):
488-
pass
487+
raise NotImplementedError
489488

490489
def get_table_column_names_and_types(
491490
self, config: RepoConfig
492491
) -> Iterable[Tuple[str, str]]:
493-
pass
492+
raise NotImplementedError
494493

495494
@staticmethod
496495
def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]:
@@ -534,12 +533,12 @@ def __init__(
534533
self.schema = schema
535534

536535
def validate(self, config: RepoConfig):
537-
pass
536+
raise NotImplementedError
538537

539538
def get_table_column_names_and_types(
540539
self, config: RepoConfig
541540
) -> Iterable[Tuple[str, str]]:
542-
pass
541+
raise NotImplementedError
543542

544543
def __eq__(self, other):
545544
if not isinstance(other, RequestSource):
@@ -610,12 +609,12 @@ def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]:
610609
@typechecked
611610
class KinesisSource(DataSource):
612611
def validate(self, config: RepoConfig):
613-
pass
612+
raise NotImplementedError
614613

615614
def get_table_column_names_and_types(
616615
self, config: RepoConfig
617616
) -> Iterable[Tuple[str, str]]:
618-
pass
617+
raise NotImplementedError
619618

620619
@staticmethod
621620
def from_proto(data_source: DataSourceProto):
@@ -639,7 +638,7 @@ def from_proto(data_source: DataSourceProto):
639638

640639
@staticmethod
641640
def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]:
642-
pass
641+
raise NotImplementedError
643642

644643
def get_table_query_string(self) -> str:
645644
raise NotImplementedError
@@ -772,12 +771,12 @@ def __hash__(self):
772771
return super().__hash__()
773772

774773
def validate(self, config: RepoConfig):
775-
pass
774+
raise NotImplementedError
776775

777776
def get_table_column_names_and_types(
778777
self, config: RepoConfig
779778
) -> Iterable[Tuple[str, str]]:
780-
pass
779+
raise NotImplementedError
781780

782781
@staticmethod
783782
def from_proto(data_source: DataSourceProto):

sdk/python/feast/feature_service.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ def __init__(
5656
*,
5757
name: str,
5858
features: List[Union[FeatureView, OnDemandFeatureView]],
59-
tags: Dict[str, str] = None,
59+
tags: Optional[Dict[str, str]] = None,
6060
description: str = "",
6161
owner: str = "",
6262
logging_config: Optional[LoggingConfig] = None,

sdk/python/feast/feature_view.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,7 @@ def __init__(
101101
name: str,
102102
source: DataSource,
103103
schema: Optional[List[Field]] = None,
104-
entities: List[Entity] = None,
104+
entities: Optional[List[Entity]] = None,
105105
ttl: Optional[timedelta] = timedelta(days=0),
106106
online: bool = True,
107107
description: str = "",

sdk/python/feast/importer.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import importlib
2+
from typing import Optional
23

34
from feast.errors import (
45
FeastClassImportError,
@@ -7,7 +8,7 @@
78
)
89

910

10-
def import_class(module_name: str, class_name: str, class_type: str = None):
11+
def import_class(module_name: str, class_name: str, class_type: Optional[str] = None):
1112
"""
1213
Dynamically loads and returns a class from a module.
1314

sdk/python/feast/infra/contrib/spark_kafka_processor.py

+9-2
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
from pyspark.sql import DataFrame, SparkSession
66
from pyspark.sql.avro.functions import from_avro
77
from pyspark.sql.functions import col, from_json
8+
from pyspark.sql.streaming import StreamingQuery
89

910
from feast.data_format import AvroFormat, JsonFormat
1011
from feast.data_source import KafkaSource, PushMode
@@ -63,7 +64,13 @@ def __init__(
6364
self.join_keys = [fs.get_entity(entity).join_key for entity in sfv.entities]
6465
super().__init__(fs=fs, sfv=sfv, data_source=sfv.stream_source)
6566

66-
def ingest_stream_feature_view(self, to: PushMode = PushMode.ONLINE) -> None:
67+
# Type hinting for data_source type.
68+
# data_source type has been checked to be an instance of KafkaSource.
69+
self.data_source: KafkaSource = self.data_source # type: ignore
70+
71+
def ingest_stream_feature_view(
72+
self, to: PushMode = PushMode.ONLINE
73+
) -> StreamingQuery:
6774
ingested_stream_df = self._ingest_stream_data()
6875
transformed_df = self._construct_transformation_plan(ingested_stream_df)
6976
online_store_query = self._write_stream_data(transformed_df, to)
@@ -122,7 +129,7 @@ def _ingest_stream_data(self) -> StreamTable:
122129
def _construct_transformation_plan(self, df: StreamTable) -> StreamTable:
123130
return self.sfv.udf.__call__(df) if self.sfv.udf else df
124131

125-
def _write_stream_data(self, df: StreamTable, to: PushMode):
132+
def _write_stream_data(self, df: StreamTable, to: PushMode) -> StreamingQuery:
126133
# Validation occurs at the fs.write_to_online_store() phase against the stream feature view schema.
127134
def batch_write(row: DataFrame, batch_id: int):
128135
rows: pd.DataFrame = row.toPandas()

sdk/python/feast/infra/contrib/stream_processor.py

+6-5
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
from typing import TYPE_CHECKING, Optional
44

55
from pyspark.sql import DataFrame
6+
from typing_extensions import TypeAlias
67

78
from feast.data_source import DataSource, PushMode
89
from feast.importer import import_class
@@ -17,7 +18,7 @@
1718
}
1819

1920
# TODO: support more types other than just Spark.
20-
StreamTable = DataFrame
21+
StreamTable: TypeAlias = DataFrame
2122

2223

2324
class ProcessorConfig(FeastConfigBaseModel):
@@ -54,28 +55,28 @@ def ingest_stream_feature_view(self, to: PushMode = PushMode.ONLINE) -> None:
5455
Ingests data from the stream source attached to the stream feature view; transforms the data
5556
and then persists it to the online store and/or offline store, depending on the 'to' parameter.
5657
"""
57-
pass
58+
raise NotImplementedError
5859

5960
def _ingest_stream_data(self) -> StreamTable:
6061
"""
6162
Ingests data into a StreamTable.
6263
"""
63-
pass
64+
raise NotImplementedError
6465

6566
def _construct_transformation_plan(self, table: StreamTable) -> StreamTable:
6667
"""
6768
Applies transformations on top of StreamTable object. Since stream engines use lazy
6869
evaluation, the StreamTable will not be materialized until it is actually evaluated.
6970
For example: df.collect() in spark or tbl.execute() in Flink.
7071
"""
71-
pass
72+
raise NotImplementedError
7273

7374
def _write_stream_data(self, table: StreamTable, to: PushMode) -> None:
7475
"""
7576
Launches a job to persist stream data to the online store and/or offline store, depending
7677
on the 'to' parameter, and returns a handle for the job.
7778
"""
78-
pass
79+
raise NotImplementedError
7980

8081

8182
def get_stream_processor_object(

sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena_source.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -297,9 +297,9 @@ class SavedDatasetAthenaStorage(SavedDatasetStorage):
297297
def __init__(
298298
self,
299299
table_ref: str,
300-
query: str = None,
301-
database: str = None,
302-
data_source: str = None,
300+
query: Optional[str] = None,
301+
database: Optional[str] = None,
302+
data_source: Optional[str] = None,
303303
):
304304
self.athena_options = AthenaOptions(
305305
table=table_ref, query=query, database=database, data_source=data_source

sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/tests/data_source.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ def create_data_source(
5151
suffix: Optional[str] = None,
5252
timestamp_field="ts",
5353
created_timestamp_column="created_ts",
54-
field_mapping: Dict[str, str] = None,
54+
field_mapping: Optional[Dict[str, str]] = None,
5555
) -> DataSource:
5656

5757
table_name = destination_name

sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from typing import Dict, List
1+
from typing import Dict, List, Optional
22

33
import pandas as pd
44
import pytest
@@ -66,7 +66,7 @@ def create_data_source(
6666
destination_name: str,
6767
timestamp_field="ts",
6868
created_timestamp_column="created_ts",
69-
field_mapping: Dict[str, str] = None,
69+
field_mapping: Optional[Dict[str, str]] = None,
7070
**kwargs,
7171
) -> DataSource:
7272
# Make sure the field mapping is correct and convert the datetime datasources.
@@ -99,10 +99,10 @@ def create_data_source(
9999
)
100100

101101
def create_saved_dataset_destination(self) -> SavedDatasetStorage:
102-
pass
102+
raise NotImplementedError
103103

104104
def get_prefixed_table_name(self, destination_name: str) -> str:
105105
return f"{self.project_name}_{destination_name}"
106106

107107
def teardown(self):
108-
pass
108+
raise NotImplementedError

sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ def create_data_source(
8585
suffix: Optional[str] = None,
8686
timestamp_field="ts",
8787
created_timestamp_column="created_ts",
88-
field_mapping: Dict[str, str] = None,
88+
field_mapping: Optional[Dict[str, str]] = None,
8989
) -> DataSource:
9090
destination_name = self.get_prefixed_table_name(destination_name)
9191

sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
import shutil
33
import tempfile
44
import uuid
5-
from typing import Dict, List
5+
from typing import Dict, List, Optional
66

77
import pandas as pd
88
from pyspark import SparkConf
@@ -70,7 +70,7 @@ def create_data_source(
7070
destination_name: str,
7171
timestamp_field="ts",
7272
created_timestamp_column="created_ts",
73-
field_mapping: Dict[str, str] = None,
73+
field_mapping: Optional[Dict[str, str]] = None,
7474
**kwargs,
7575
) -> DataSource:
7676
if timestamp_field in df:

sdk/python/feast/infra/offline_stores/file_source.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -183,7 +183,7 @@ def create_filesystem_and_path(
183183
return None, path
184184

185185
def get_table_query_string(self) -> str:
186-
pass
186+
raise NotImplementedError
187187

188188

189189
class FileOptions:

0 commit comments

Comments
 (0)