BenchmarkClient.swift 7.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237
  1. /*
  2. * Copyright 2024, gRPC Authors All rights reserved.
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. import Foundation
  17. import GRPCCore
  18. import NIOConcurrencyHelpers
  19. @available(macOS 13.0, iOS 16.0, watchOS 9.0, tvOS 16.0, *)
  20. struct BenchmarkClient {
  21. private var client: GRPCClient
  22. private var rpcNumber: Int32
  23. private var rpcType: RPCType
  24. private var messagesPerStream: Int32
  25. private var protoParams: Grpc_Testing_SimpleProtoParams
  26. private let rpcStats: NIOLockedValueBox<RPCStats>
  27. init(
  28. client: GRPCClient,
  29. rpcNumber: Int32,
  30. rpcType: RPCType,
  31. messagesPerStream: Int32,
  32. protoParams: Grpc_Testing_SimpleProtoParams,
  33. histogramParams: Grpc_Testing_HistogramParams?
  34. ) {
  35. self.client = client
  36. self.rpcNumber = rpcNumber
  37. self.messagesPerStream = messagesPerStream
  38. self.protoParams = protoParams
  39. self.rpcType = rpcType
  40. let histogram: RPCStats.LatencyHistogram
  41. if let histogramParams = histogramParams {
  42. histogram = .init(
  43. resolution: histogramParams.resolution,
  44. maxBucketStart: histogramParams.maxPossible
  45. )
  46. } else {
  47. histogram = .init()
  48. }
  49. self.rpcStats = NIOLockedValueBox(RPCStats(latencyHistogram: histogram))
  50. }
  51. enum RPCType {
  52. case unary
  53. case streaming
  54. case streamingFromClient
  55. case streamingFromServer
  56. case streamingBothWays
  57. }
  58. internal var currentStats: RPCStats {
  59. return self.rpcStats.withLockedValue { stats in
  60. return stats
  61. }
  62. }
  63. internal func run() async throws {
  64. let benchmarkClient = Grpc_Testing_BenchmarkServiceClient(client: client)
  65. return try await withThrowingTaskGroup(of: Void.self) { clientGroup in
  66. // Start the client.
  67. clientGroup.addTask { try await client.run() }
  68. // Make the requests to the server and register the latency for each one.
  69. try await withThrowingTaskGroup(of: Void.self) { rpcsGroup in
  70. for _ in 0 ..< self.rpcNumber {
  71. rpcsGroup.addTask {
  72. let (latency, errorCode) = try await self.makeRPC(
  73. benchmarkClient: benchmarkClient
  74. )
  75. self.rpcStats.withLockedValue {
  76. $0.latencyHistogram.record(latency)
  77. if let errorCode = errorCode {
  78. $0.requestResultCount[errorCode, default: 1] += 1
  79. }
  80. }
  81. }
  82. }
  83. try await rpcsGroup.waitForAll()
  84. }
  85. try await clientGroup.next()
  86. }
  87. }
  88. private func timeIt<R>(
  89. _ body: () async throws -> R
  90. ) async rethrows -> (R, nanoseconds: Double) {
  91. let startTime = DispatchTime.now().uptimeNanoseconds
  92. let result = try await body()
  93. let endTime = DispatchTime.now().uptimeNanoseconds
  94. return (result, nanoseconds: Double(endTime - startTime))
  95. }
  96. // The result is the number of nanoseconds for processing the RPC.
  97. private func makeRPC(
  98. benchmarkClient: Grpc_Testing_BenchmarkServiceClient
  99. ) async throws -> (latency: Double, errorCode: RPCError.Code?) {
  100. let message = Grpc_Testing_SimpleRequest.with {
  101. $0.responseSize = self.protoParams.respSize
  102. $0.payload = Grpc_Testing_Payload.with {
  103. $0.body = Data(count: Int(self.protoParams.reqSize))
  104. }
  105. }
  106. switch self.rpcType {
  107. case .unary:
  108. let (errorCode, nanoseconds): (RPCError.Code?, Double) = await self.timeIt {
  109. do {
  110. try await benchmarkClient.unaryCall(
  111. request: ClientRequest.Single(message: message)
  112. ) { response in
  113. _ = try response.message
  114. }
  115. return nil
  116. } catch let error as RPCError {
  117. return error.code
  118. } catch {
  119. return .unknown
  120. }
  121. }
  122. return (latency: nanoseconds, errorCode)
  123. // Repeated sequence of one request followed by one response.
  124. // It is a ping-pong of messages between the client and the server.
  125. case .streaming:
  126. let (errorCode, nanoseconds): (RPCError.Code?, Double) = await self.timeIt {
  127. do {
  128. let ids = AsyncStream.makeStream(of: Int.self)
  129. let streamingRequest = ClientRequest.Stream { writer in
  130. for try await id in ids.stream {
  131. if id <= self.messagesPerStream {
  132. try await writer.write(message)
  133. } else {
  134. return
  135. }
  136. }
  137. }
  138. ids.continuation.yield(1)
  139. try await benchmarkClient.streamingCall(request: streamingRequest) { response in
  140. var id = 1
  141. for try await _ in response.messages {
  142. id += 1
  143. ids.continuation.yield(id)
  144. }
  145. }
  146. return nil
  147. } catch let error as RPCError {
  148. return error.code
  149. } catch {
  150. return .unknown
  151. }
  152. }
  153. return (latency: nanoseconds, errorCode)
  154. case .streamingFromClient:
  155. let (errorCode, nanoseconds): (RPCError.Code?, Double) = await self.timeIt {
  156. do {
  157. let streamingRequest = ClientRequest.Stream { writer in
  158. for _ in 1 ... self.messagesPerStream {
  159. try await writer.write(message)
  160. }
  161. }
  162. try await benchmarkClient.streamingFromClient(
  163. request: streamingRequest
  164. ) { response in
  165. _ = try response.message
  166. }
  167. return nil
  168. } catch let error as RPCError {
  169. return error.code
  170. } catch {
  171. return .unknown
  172. }
  173. }
  174. return (latency: nanoseconds, errorCode)
  175. case .streamingFromServer:
  176. let (errorCode, nanoseconds): (RPCError.Code?, Double) = await self.timeIt {
  177. do {
  178. try await benchmarkClient.streamingFromServer(
  179. request: ClientRequest.Single(message: message)
  180. ) { response in
  181. for try await _ in response.messages {}
  182. }
  183. return nil
  184. } catch let error as RPCError {
  185. return error.code
  186. } catch {
  187. return .unknown
  188. }
  189. }
  190. return (latency: nanoseconds, errorCode)
  191. case .streamingBothWays:
  192. let (errorCode, nanoseconds): (RPCError.Code?, Double) = await self.timeIt {
  193. do {
  194. let streamingRequest = ClientRequest.Stream { writer in
  195. for _ in 1 ... self.messagesPerStream {
  196. try await writer.write(message)
  197. }
  198. }
  199. try await benchmarkClient.streamingBothWays(request: streamingRequest) { response in
  200. for try await _ in response.messages {}
  201. }
  202. return nil
  203. } catch let error as RPCError {
  204. return error.code
  205. } catch {
  206. return .unknown
  207. }
  208. }
  209. return (latency: nanoseconds, errorCode)
  210. }
  211. }
  212. internal func shutdown() {
  213. self.client.close()
  214. }
  215. }