Skip to content

Commit 56da7f1

Browse files
pre-commit-ci-lite[bot]gz
authored andcommitted
[pre-commit.ci lite] apply automatic fixes
1 parent 0cb0497 commit 56da7f1

File tree

103 files changed

+1183
-1191
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

103 files changed

+1183
-1191
lines changed

Earthfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -497,7 +497,7 @@ integration-tests:
497497

498498
benchmark:
499499
FROM +build-manager
500-
RUN apt-get install --yes csvkit
500+
RUN apt-get install --yes csvkit
501501
COPY demo/project_demo12-HopsworksTikTokRecSys/tiktok-gen demo/project_demo12-HopsworksTikTokRecSys/tiktok-gen
502502
COPY scripts/bench.bash scripts/bench.bash
503503
COPY benchmark/feldera-sql/run.py benchmark/feldera-sql/run.py

benchmark/feldera-sql/benchmarks/tiktok/queries/q1.sql

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
CREATE VIEW q0 AS
2-
SELECT
2+
SELECT
33
interaction_id,
44
count(*),
55
avg(watch_time)
@@ -15,7 +15,7 @@ GROUP BY
1515
interaction_id;
1616

1717
CREATE VIEW q1 AS
18-
SELECT
18+
SELECT
1919
interaction_id,
2020
count(*),
2121
avg(watch_time)

benchmark/flink-kafka/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ docker compose up -d -p nexmark
8888
This query will continue running in the background even after it
8989
"finishes". It will consume your disk space at an incredible rate
9090
(over 1 TB in 30 minutes).
91-
91+
9292
You might want to do this from a separate terminal, because the
9393
`docker compose` from the previous step will spew tons of
9494
distractions (unless you added `-d` above).

benchmark/flink/refresh-flink-benchmark.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ cd "$(dirname "$0")"
55
# Run Flink benchmark
66
flink_dir=../../gh-pages/flink/
77
mkdir $flink_dir
8-
cd ../..
8+
cd ../..
99
earthly --verbose -P +flink-benchmark
1010
cd benchmark/flink
1111
mv ../../flink_results_*.csv $flink_dir

crates/adapters/src/format/avro/serializer.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -813,7 +813,7 @@ mod test {
813813
b["b"]: Option<bool>
814814
});
815815

816-
const SCHEMA1: &'static str = r#"{
816+
const SCHEMA1: &str = r#"{
817817
"type": "record",
818818
"name": "Test1",
819819
"fields": [
@@ -840,7 +840,7 @@ mod test {
840840
dec3["dec3"]: RustDecimal
841841
});
842842

843-
const SCHEMA_NUMERIC: &'static str = r#"{
843+
const SCHEMA_NUMERIC: &str = r#"{
844844
"type": "record",
845845
"name": "Numeric",
846846
"fields": [
@@ -853,7 +853,7 @@ mod test {
853853
}"#;
854854

855855
// Serializer should be able to serialize non-nullable fields into nullable schema.
856-
const SCHEMA_NUMERIC_OPTIONAL: &'static str = r#"{
856+
const SCHEMA_NUMERIC_OPTIONAL: &str = r#"{
857857
"type": "record",
858858
"name": "Numeric",
859859
"fields": [

crates/adapters/src/format/avro/test.rs

Lines changed: 13 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -182,7 +182,7 @@ fn debezium_avro_schema(value_schema: &str, value_type_name: &str) -> AvroSchema
182182
],
183183
"connect.version": 1,
184184
"connect.name": "test_namespace.Envelope"
185-
}"#.replace("VALUE_SCHEMA", &value_schema).replace("VALUE_TYPE", value_type_name);
185+
}"#.replace("VALUE_SCHEMA", value_schema).replace("VALUE_TYPE", value_type_name);
186186

187187
println!("Debezium Avro schema: {schema_str}");
188188

@@ -202,11 +202,11 @@ where
202202
// 5-byte header
203203
let mut buffer = vec![0; 5];
204204
let refs = HashMap::new();
205-
let serializer = AvroSchemaSerializer::new(&schema, &refs, false);
205+
let serializer = AvroSchemaSerializer::new(schema, &refs, false);
206206
let val = x
207207
.serialize_with_context(serializer, &avro_ser_config())
208208
.unwrap();
209-
let mut avro_record = to_avro_datum(&schema, val).unwrap();
209+
let mut avro_record = to_avro_datum(schema, val).unwrap();
210210
buffer.append(&mut avro_record);
211211
buffer
212212
}
@@ -301,8 +301,7 @@ where
301301

302302
let expected_output = data
303303
.iter()
304-
.map(|x| vec![MockUpdate::Delete(x.clone()), MockUpdate::Insert(x.clone())])
305-
.flatten()
304+
.flat_map(|x| vec![MockUpdate::Delete(x.clone()), MockUpdate::Insert(x.clone())])
306305
.collect::<Vec<_>>();
307306

308307
TestCase {
@@ -341,7 +340,7 @@ fn test_raw_avro_parser() {
341340
let test_case = gen_raw_parser_test(
342341
&TestStruct2::data(),
343342
&TestStruct2::relation_schema(),
344-
&TestStruct2::avro_schema(),
343+
TestStruct2::avro_schema(),
345344
);
346345

347346
run_parser_test(vec![test_case])
@@ -352,7 +351,7 @@ fn test_debezium_avro_parser() {
352351
let test_case = gen_debezium_parser_test(
353352
&TestStruct2::data(),
354353
&TestStruct2::relation_schema(),
355-
&TestStruct2::avro_schema(),
354+
TestStruct2::avro_schema(),
356355
"TestStruct2",
357356
);
358357

@@ -393,7 +392,7 @@ fn test_extra_columns() {
393392
]
394393
}"#;
395394

396-
let schema = AvroSchema::parse_str(&schema_str).unwrap();
395+
let schema = AvroSchema::parse_str(schema_str).unwrap();
397396
let vals = TestStruct2::data();
398397
let input_batches = vals
399398
.iter()
@@ -460,8 +459,8 @@ fn test_non_null_to_nullable() {
460459
]
461460
}"#;
462461

463-
let schema = AvroSchema::parse_str(&schema_str).unwrap();
464-
let vals = vec![TestStruct2 {
462+
let schema = AvroSchema::parse_str(schema_str).unwrap();
463+
let vals = [TestStruct2 {
465464
field: 1,
466465
field_0: Some("test".to_string()),
467466
..Default::default()
@@ -526,7 +525,7 @@ fn test_ms_time() {
526525
]
527526
}"#;
528527

529-
let schema = AvroSchema::parse_str(&schema_str).unwrap();
528+
let schema = AvroSchema::parse_str(schema_str).unwrap();
530529
let vals = TestStruct2::data();
531530
let input_batches = vals
532531
.iter()
@@ -558,15 +557,15 @@ proptest! {
558557
#[test]
559558
fn proptest_raw_avro_parser(data in proptest::collection::vec(any::<TestStruct2>(), 0..=10000))
560559
{
561-
let test_case = gen_raw_parser_test(&data, &TestStruct2::relation_schema(), &TestStruct2::avro_schema());
560+
let test_case = gen_raw_parser_test(&data, &TestStruct2::relation_schema(), TestStruct2::avro_schema());
562561

563562
run_parser_test(vec![test_case])
564563
}
565564

566565
#[test]
567566
fn proptest_debezium_avro_parser(data in proptest::collection::vec(any::<TestStruct2>(), 0..=10000))
568567
{
569-
let test_case = gen_debezium_parser_test(&data, &TestStruct2::relation_schema(), &TestStruct2::avro_schema(), "TestStruct2");
568+
let test_case = gen_debezium_parser_test(&data, &TestStruct2::relation_schema(), TestStruct2::avro_schema(), "TestStruct2");
570569

571570
run_parser_test(vec![test_case])
572571
}
@@ -672,14 +671,13 @@ fn test_confluent_avro_output<K, V, KF>(
672671
let (expected_inserts, expected_deletes): (Vec<_>, Vec<_>) = batches
673672
.concat()
674673
.into_iter()
675-
.map(|Tup2(v, w)| {
674+
.flat_map(|Tup2(v, w)| {
676675
if w > 0 {
677676
repeat(Tup2(v.clone(), 1)).take(w as usize)
678677
} else {
679678
repeat(Tup2(v.clone(), -1)).take(-w as usize)
680679
}
681680
})
682-
.flatten()
683681
.partition(|Tup2(_, w)| *w > 0);
684682
let expected_deletes = expected_deletes
685683
.into_iter()

crates/adapters/src/format/json/output.rs

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -634,8 +634,7 @@ mod test {
634634
.lock()
635635
.unwrap()
636636
.iter()
637-
.map(|(_k, v)| v.clone())
638-
.flatten()
637+
.filter_map(|(_k, v)| v.clone())
639638
.flatten()
640639
.collect::<Vec<_>>()
641640
)
@@ -646,8 +645,7 @@ mod test {
646645
.lock()
647646
.unwrap()
648647
.iter()
649-
.map(|(_k, v)| v.clone())
650-
.flatten()
648+
.filter_map(|(_k, v)| v.clone())
651649
.flatten()
652650
.collect::<Vec<_>>();
653651
let deserializer = serde_json::Deserializer::from_slice(&consumer_data);

crates/adapters/src/format/parquet/test.rs

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ pub fn load_parquet_file<T: for<'de> DeserializeWithContext<'de, SqlSerdeConfig>
3333
let file = File::open(path).unwrap();
3434

3535
SerializedFileReader::new(file)
36-
.expect(&format!("error opening parquet file {path:?}"))
36+
.unwrap_or_else(|_| panic!("error opening parquet file {path:?}"))
3737
.into_iter()
3838
.map(|row| {
3939
let row = row.unwrap().to_json_value();
@@ -171,8 +171,7 @@ fn parquet_output() {
171171
.lock()
172172
.unwrap()
173173
.iter()
174-
.map(|(_k, v)| v.clone())
175-
.flatten()
174+
.filter_map(|(_k, v)| v.clone())
176175
.flatten()
177176
.collect(),
178177
);
@@ -181,8 +180,7 @@ fn parquet_output() {
181180
.lock()
182181
.unwrap()
183182
.iter()
184-
.map(|(_k, v)| v.clone())
185-
.flatten()
183+
.filter_map(|(_k, v)| v.clone())
186184
.flatten()
187185
.collect::<Vec<_>>();
188186

crates/adapters/src/integrated/delta_table/test.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -395,7 +395,7 @@ outputs:
395395

396396
if verify {
397397
let parquet_files =
398-
list_files_recursive(&Path::new(table_uri), OsStr::from_bytes(b"parquet")).unwrap();
398+
list_files_recursive(Path::new(table_uri), OsStr::from_bytes(b"parquet")).unwrap();
399399

400400
// // Uncomment to inspect the input JSON file.
401401
// std::mem::forget(input_file);
@@ -508,7 +508,7 @@ async fn test_follow(
508508

509509
// Connect to `output_table_uri`.
510510
let mut output_table = Arc::new(
511-
DeltaTableBuilder::from_uri(&output_table_uri)
511+
DeltaTableBuilder::from_uri(output_table_uri)
512512
.with_storage_options(storage_options.clone())
513513
.load()
514514
.await

crates/adapters/src/static_compile/deinput.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1550,7 +1550,7 @@ mod test {
15501550
fn test_scalar() {
15511551
let (mut dbsp, input_handle, output_handle) = descalar_test_circuit(NUM_WORKERS);
15521552

1553-
let inputs = vec![
1553+
let inputs = [
15541554
TestStruct {
15551555
id: 1,
15561556
s: "foo".to_string(),

0 commit comments

Comments
 (0)