|
| 1 | +package jsonrpclib.fs2interop.internals |
| 2 | + |
| 3 | +import fs2.Chunk |
| 4 | +import fs2.Stream |
| 5 | +import java.nio.charset.Charset |
| 6 | +import java.nio.charset.StandardCharsets |
| 7 | +import jsonrpclib.Payload |
| 8 | +import cats.MonadThrow |
| 9 | +import cats.effect.std.Queue |
| 10 | +import cats.effect.Concurrent |
| 11 | +import cats.implicits._ |
| 12 | +import cats.effect.implicits._ |
| 13 | +import cats.effect.kernel.Resource |
| 14 | + |
| 15 | +object LSP { |
| 16 | + |
| 17 | + def writeSink[F[_]: Concurrent]( |
| 18 | + writePipe: fs2.Pipe[F, Byte, Nothing], |
| 19 | + bufferSize: Int |
| 20 | + ): Resource[F, Payload => F[Unit]] = |
| 21 | + Queue.bounded[F, Payload](bufferSize).toResource.flatMap { queue => |
| 22 | + val payloads = fs2.Stream.fromQueueUnterminated(queue, bufferSize) |
| 23 | + payloads.map(writeChunk).flatMap(Stream.chunk(_)).compile.drain.background.void.as(queue.offer(_)) |
| 24 | + } |
| 25 | + |
| 26 | + /** Split a stream of bytes into payloads by extracting each frame based on information contained in the headers. |
| 27 | + * |
| 28 | + * See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#contentPart |
| 29 | + */ |
| 30 | + def readStream[F[_]: MonadThrow](bytes: Stream[F, Byte]): Stream[F, Payload] = |
| 31 | + bytes |
| 32 | + .scanChunks(ScanState.starting) { case (state, chunk) => |
| 33 | + val (ns, maybeResult) = loop(state.concatChunk(chunk)) |
| 34 | + (ns, Chunk(maybeResult)) |
| 35 | + } |
| 36 | + .flatMap { |
| 37 | + case Right(acc) => Stream.iterable(acc).map(c => Payload(c.toArray)) |
| 38 | + case Left(error) => Stream.raiseError[F](error) |
| 39 | + } |
| 40 | + |
| 41 | + private def writeChunk(payload: Payload): Chunk[Byte] = { |
| 42 | + val size = payload.array.size |
| 43 | + val header = s"Content-Length: ${size}" + "\r\n" * 2 |
| 44 | + Chunk.array(header.getBytes()) ++ Chunk.array(payload.array) |
| 45 | + } |
| 46 | + |
| 47 | + private val returnByte = '\r'.toByte |
| 48 | + private val newlineByte = '\n'.toByte |
| 49 | + |
| 50 | + private final case class LSPHeaders( |
| 51 | + contentLength: Int, |
| 52 | + mimeType: String, |
| 53 | + charset: Charset |
| 54 | + ) |
| 55 | + |
| 56 | + private final case class ParseError(message: String) extends Throwable { |
| 57 | + override def getMessage(): String = message |
| 58 | + } |
| 59 | + |
| 60 | + private def parseHeader( |
| 61 | + line: String, |
| 62 | + headers: LSPHeaders |
| 63 | + ): Either[ParseError, LSPHeaders] = |
| 64 | + line.trim() match { |
| 65 | + case s"Content-Length: ${integer(length)}" => |
| 66 | + Right(headers.copy(contentLength = length)) |
| 67 | + case s"Content-type: ${mimeType}; charset=${charset}" => |
| 68 | + Right( |
| 69 | + headers.copy(mimeType = mimeType, charset = Charset.forName(charset)) |
| 70 | + ) |
| 71 | + case _ => Left(ParseError(s"Couldn't parse to header: $line")) |
| 72 | + } |
| 73 | + |
| 74 | + private object integer { |
| 75 | + def unapply(string: String): Option[Int] = string.toIntOption |
| 76 | + } |
| 77 | + |
| 78 | + private final case class ScanState(status: Status, currentHeaders: LSPHeaders, buffered: Chunk[Byte]) { |
| 79 | + def concatChunk(other: Chunk[Byte]) = copy(buffered = buffered ++ other) |
| 80 | + } |
| 81 | + |
| 82 | + private object ScanState { |
| 83 | + def readingHeader(storedChunk: Chunk[Byte]) = ScanState( |
| 84 | + Status.ReadingHeader, |
| 85 | + LSPHeaders(-1, "application/json", StandardCharsets.UTF_8), |
| 86 | + storedChunk |
| 87 | + ) |
| 88 | + |
| 89 | + val starting: ScanState = readingHeader(Chunk.empty) |
| 90 | + } |
| 91 | + |
| 92 | + private sealed trait Status |
| 93 | + |
| 94 | + private object Status { |
| 95 | + case object ReadingHeader extends Status |
| 96 | + case object FinishedReadingHeader extends Status |
| 97 | + case object ReadingBody extends Status |
| 98 | + } |
| 99 | + |
| 100 | + private def loop( |
| 101 | + state: ScanState, |
| 102 | + acc: Seq[Chunk[Byte]] = Seq.empty |
| 103 | + ): (ScanState, Either[ParseError, Seq[Chunk[Byte]]]) = |
| 104 | + state match { |
| 105 | + case ScanState(Status.ReadingBody, headers, buffered) => |
| 106 | + if (headers.contentLength <= buffered.size) { |
| 107 | + // We have a full payload to emit |
| 108 | + val (payload, tail) = buffered.splitAt(headers.contentLength) |
| 109 | + val newState = ScanState.readingHeader(tail) |
| 110 | + loop(newState, acc.appended(payload)) |
| 111 | + } else { |
| 112 | + (state, Right(acc)) |
| 113 | + } |
| 114 | + case ScanState(Status.ReadingHeader, headers, buffered) => |
| 115 | + val bb = java.nio.ByteBuffer.allocate(buffered.size) |
| 116 | + val iterator = buffered.iterator |
| 117 | + var continue = true |
| 118 | + var newState: ScanState = null |
| 119 | + var error: ParseError = null |
| 120 | + while (iterator.hasNext && continue) { |
| 121 | + val byte = iterator.next |
| 122 | + if (byte == newlineByte) { |
| 123 | + parseHeader(new String(bb.array, StandardCharsets.US_ASCII), headers) match { |
| 124 | + case Right(newHeader) => |
| 125 | + newState = ScanState(Status.FinishedReadingHeader, newHeader, Chunk.iterator(iterator)) |
| 126 | + case Left(e) => |
| 127 | + error = e |
| 128 | + } |
| 129 | + continue = false |
| 130 | + } else { |
| 131 | + bb.put(byte) |
| 132 | + } |
| 133 | + } |
| 134 | + if (newState != null) { |
| 135 | + loop(newState, acc) |
| 136 | + } else if (error != null) { |
| 137 | + (state, Left(error)) |
| 138 | + } else { |
| 139 | + (state, Right(acc)) |
| 140 | + } |
| 141 | + |
| 142 | + case ScanState(Status.FinishedReadingHeader, headers, buffered) => |
| 143 | + if (buffered.size >= 2) { |
| 144 | + if (buffered.startsWith(Seq(returnByte, newlineByte))) { |
| 145 | + // We have read two `\r\n` in a row, starting to scan a body |
| 146 | + loop(ScanState(Status.ReadingBody, headers, buffered.drop(2)), acc) |
| 147 | + } else { |
| 148 | + loop(ScanState(Status.ReadingHeader, headers, buffered), acc) |
| 149 | + } |
| 150 | + } else { |
| 151 | + (state, Right(acc)) |
| 152 | + } |
| 153 | + } |
| 154 | + |
| 155 | +} |
0 commit comments