Skip to content

Commit 64194f0

Browse files
committed
Merge branch 'main' into flow-processor
2 parents 1d91267 + 9aaabbb commit 64194f0

File tree

15 files changed

+41
-33
lines changed

15 files changed

+41
-33
lines changed

.git-blame-ignore-revs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,3 +21,6 @@ e5525d3f0da44052fdcfbe844993260bdc044270
2121

2222
# Scala Steward: Reformat with scalafmt 3.8.2
2323
a0a37ece16ee55056270b4d9ba5c1505ead8af17
24+
25+
# Scala Steward: Reformat with scalafmt 3.8.6
26+
52e52b013db077ecb5b5a8f5b6e6113f912556d8

.scalafmt.conf

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
version = "3.8.2"
1+
version = "3.9.2"
22

33
style = default
44

build.sbt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,10 @@ ThisBuild / organization := "co.fs2"
88
ThisBuild / organizationName := "Functional Streams for Scala"
99
ThisBuild / startYear := Some(2013)
1010

11-
val Scala213 = "2.13.15"
11+
val Scala213 = "2.13.16"
1212

1313
ThisBuild / scalaVersion := Scala213
14-
ThisBuild / crossScalaVersions := Seq("2.12.20", Scala213, "3.3.4")
14+
ThisBuild / crossScalaVersions := Seq("2.12.20", Scala213, "3.3.5")
1515
ThisBuild / tlVersionIntroduced := Map("3" -> "3.0.3")
1616

1717
ThisBuild / githubWorkflowOSes := Seq("ubuntu-latest")

core/shared/src/main/scala-3/fs2/ChunkPlatform.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,8 @@ private[fs2] trait ChunkCompanionPlatform extends ChunkCompanion213And3Compat {
8686
private[fs2] val ct: ClassTag[O]
8787
) extends Chunk[O] {
8888
require(
89-
offset >= 0 && offset <= values.size && length >= 0 && length <= values.size && offset + length <= values.size
89+
offset >= 0 && offset <= values.size && length >= 0 && length <= values.size && offset + length <= values.size,
90+
"IArraySlice out of bounds"
9091
)
9192

9293
def size = length

core/shared/src/main/scala/fs2/Chunk.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -863,7 +863,8 @@ object Chunk
863863
// ClassTag(values.getClass.getComponentType) -- we only keep it for bincompat
864864

865865
require(
866-
offset >= 0 && offset <= values.size && length >= 0 && length <= values.size && offset + length <= values.size
866+
offset >= 0 && offset <= values.size && length >= 0 && length <= values.size && offset + length <= values.size,
867+
"ArraySlice out of bounds"
867868
)
868869

869870
override protected def thisClassTag: ClassTag[Any] = ct.asInstanceOf[ClassTag[Any]]

core/shared/src/main/scala/fs2/concurrent/Signal.scala

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -481,8 +481,11 @@ object SignallingMapRef {
481481
.map { case (state, ids) =>
482482
def newId = ids.getAndUpdate(_ + 1)
483483

484-
def updateAndNotify[U](state: State, k: K, f: Option[V] => (Option[V], U))
485-
: (State, F[U]) = {
484+
def updateAndNotify[U](
485+
state: State,
486+
k: K,
487+
f: Option[V] => (Option[V], U)
488+
): (State, F[U]) = {
486489

487490
val keyState = state.keys.get(k)
488491

core/shared/src/test/scala/fs2/StreamPerformanceSuite.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -198,9 +198,8 @@ class StreamPerformanceSuite extends Fs2Suite {
198198
val s: Stream[SyncIO, Int] =
199199
List
200200
.fill(N)(bracketed)
201-
.foldLeft(Stream.raiseError[SyncIO](new Err): Stream[SyncIO, Int]) {
202-
(acc, hd) =>
203-
acc.handleErrorWith(_ => hd)
201+
.foldLeft(Stream.raiseError[SyncIO](new Err): Stream[SyncIO, Int]) { (acc, hd) =>
202+
acc.handleErrorWith(_ => hd)
204203
}
205204
s.compile.toList.attempt
206205
.flatMap(_ => (ok.get, open.get).tupled)

core/shared/src/test/scala/fs2/StreamZipSuite.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -177,9 +177,8 @@ class StreamZipSuite extends Fs2Suite {
177177
Logger[IO]
178178
.flatMap { logger =>
179179
def s(tag: String) =
180-
logger.logLifecycle(tag) >> {
180+
logger.logLifecycle(tag) >>
181181
logger.logLifecycle(s"$tag - 1") ++ logger.logLifecycle(s"$tag - 2")
182-
}
183182

184183
s("a").zip(s("b")).compile.drain >>
185184
logger.get.assertEquals(

flake.lock

Lines changed: 9 additions & 9 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

integration/src/test/scala/fs2/MemoryLeakSpec.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ class MemoryLeakSpec extends FunSuite {
4646
warmupIterations: Int = 3,
4747
samplePeriod: FiniteDuration = 1.seconds,
4848
monitorPeriod: FiniteDuration = 10.seconds,
49-
limitTotalBytesIncreasePerSecond: Long = 700000,
49+
limitTotalBytesIncreasePerSecond: Long = 1400000,
5050
limitConsecutiveIncreases: Int = 10
5151
)
5252

io/jvm/src/test/scala/fs2/io/IoPlatformSuite.scala

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -64,12 +64,11 @@ class IoPlatformSuite extends Fs2Suite {
6464
(bs1.length != (o1 + l1)) &&
6565
// we expect that next slice will wrap same buffer
6666
((bs2 eq bs1) && (o2 == o1 + l1))
67-
} || {
68-
// if first slice buffer is 'full'
69-
(bs2.length == (o1 + l1)) &&
70-
// we expect new buffer allocated for next slice
71-
((bs2 ne bs1) && (o2 == 0))
72-
}
67+
} ||
68+
// if first slice buffer is 'full'
69+
(bs2.length == (o1 + l1)) &&
70+
// we expect new buffer allocated for next slice
71+
((bs2 ne bs1) && (o2 == 0))
7372
case _ => false // unexpected chunk subtype
7473
}
7574
}

project/build.properties

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
sbt.version=1.10.7
1+
sbt.version=1.10.10

project/plugins.sbt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
1-
val sbtTypelevelVersion = "0.7.5"
1+
val sbtTypelevelVersion = "0.7.7"
22
addSbtPlugin("org.typelevel" % "sbt-typelevel" % sbtTypelevelVersion)
33
addSbtPlugin("org.typelevel" % "sbt-typelevel-site" % sbtTypelevelVersion)
4-
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.17.0")
4+
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.18.2")
55
addSbtPlugin("org.scala-native" % "sbt-scala-native" % "0.4.17")
66
addSbtPlugin("com.armanbilge" % "sbt-scala-native-config-brew-github-actions" % "0.3.0")
77
addSbtPlugin("io.github.sbt-doctest" % "sbt-doctest" % "0.11.1")

protocols/shared/src/main/scala-2/fs2/protocols/pcapng/BlockCodec.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,8 +44,9 @@ object BlockCodec {
4444
("Block Total Length" | constant(length.bv) )}
4545
// format: on
4646

47-
def unknownByteOrder[L <: HList, LB <: HList](hexConstant: ByteVector)(f: Length => Codec[L])(
48-
implicit
47+
def unknownByteOrder[L <: HList, LB <: HList](
48+
hexConstant: ByteVector
49+
)(f: Length => Codec[L])(implicit
4950
prepend: Prepend.Aux[L, Unit :: HNil, LB],
5051
init: Init.Aux[LB, L],
5152
last: Last.Aux[LB, Unit]

reactive-streams/src/main/scala/fs2/interop/reactivestreams/package.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,7 @@ package object reactivestreams {
6060
* A high number can be useful if the publisher is triggering from IO, like requesting elements from a database.
6161
* The publisher can use this `bufferSize` to query elements in batch.
6262
* A high number will also lead to more elements in memory.
63+
* The stream will not emit new element until, either the `Chunk` is filled or the publisher finishes.
6364
*/
6465
def fromPublisher[F[_]: Async, A](p: Publisher[A], bufferSize: Int): Stream[F, A] =
6566
Stream
@@ -87,6 +88,7 @@ package object reactivestreams {
8788
* A high number can be useful if the publisher is triggering from IO, like requesting elements from a database.
8889
* The publisher can use this `bufferSize` to query elements in batch.
8990
* A high number will also lead to more elements in memory.
91+
* The stream will not emit new element until, either the `Chunk` is filled or the publisher finishes.
9092
*/
9193
def toStreamBuffered[F[_]: Async](bufferSize: Int): Stream[F, A] =
9294
fromPublisher(publisher, bufferSize)

0 commit comments

Comments
 (0)