From 071c06cfbe61655c1117b48b86aacd9eb2c613d9 Mon Sep 17 00:00:00 2001 From: Nick Slavin Date: Mon, 22 Dec 2025 12:51:15 -0500 Subject: [PATCH 1/9] multi project tracing example --- examples/trace/multiple_projects.rb | 156 ++++++++++++++++++++++++++++ 1 file changed, 156 insertions(+) create mode 100644 examples/trace/multiple_projects.rb diff --git a/examples/trace/multiple_projects.rb b/examples/trace/multiple_projects.rb new file mode 100644 index 0000000..3cd8b7d --- /dev/null +++ b/examples/trace/multiple_projects.rb @@ -0,0 +1,156 @@ +#!/usr/bin/env ruby +# frozen_string_literal: true + +require "bundler/setup" +require "braintrust" +require "opentelemetry/sdk" +require "ruby_llm" +require "openai" + +project1 = "Project-A" +project2 = "Project-B" +model1 = "gpt-4o-mini" +model2 = "claude-sonnet-4" + +# check for API keys +unless ENV["OPENAI_API_KEY"] && ENV["ANTHROPIC_API_KEY"] + puts "Error: Both OPENAI_API_KEY and ANTHROPIC_API_KEY environment variables are required" + puts "Get your API key from: https://platform.openai.com/api-keys" + puts "Get your Anthropic API key from: https://console.anthropic.com/" + puts "Set with `export OPENAI_API_KEY=`" + exit 1 +end + +unless ENV["BRAINTRUST_API_KEY"] + puts "Error: BRAINTRUST_API_KEY environment variable is required" + puts "Get your API key from https://www.braintrust.dev/app/settings or ask your org administrator" + exit 1 +end + +# Example: Log/Trace to Multiple Projects with Separate States +# +# This example demonstrates how to: +# 1. Create multiple Braintrust states for different projects +# 2. Set up separate tracer providers for each project +# 3. Log traces to different projects simultaneously +# +# Usage: +# bundle exec ruby examples/trace/multiple_projects.rb + +# Create first state for Project A (non-global) +state_a = Braintrust.init( + default_project: project1, + set_global: false, + enable_tracing: false # We'll manually set up tracing +) + # Create second state for Project B (non-global) +state_b = Braintrust.init( + default_project: project2, + set_global: false, + enable_tracing: false # We'll manually set up tracing +) + +Braintrust::Trace::Contrib::Github::Crmne::RubyLLM.wrap + +RubyLLM.configure do |config| + config.openai_api_key = ENV["OPENAI_API_KEY"] + config.anthropic_api_key = ENV["ANTHROPIC_API_KEY"] +end + +chat_openai = RubyLLM.chat(model: model1) +chat_anthropic = RubyLLM.chat(model: model2) + +# Create first tracer provider +tracer_provider_a = OpenTelemetry::SDK::Trace::TracerProvider.new + +# Setup using Trace.setup +# When you pass an explicit tracer_provider, it won't set it as global +Braintrust::Trace.setup(state_a, tracer_provider_a) + +# Get tracer for Project A +tracer_a = tracer_provider_a.tracer("Feature A") + +# Note: You can also use Trace.enable instead of Trace.setup: +# Braintrust::Trace.enable(tracer_provider_a, state: state_a) +# Braintrust::Trace.enable(tracer_provider_b, state: state_b) +# Both work the same when you provide explicit providers + +# Now create spans in both projects +root_span_a = nil +tracer_a.in_span("chat_ask") do |span| + root_span_a = span + span.set_attribute("project", "A") + + # Nested span in Project A + tracer_a.in_span("turn1") do |nested_t1| + nested_t1.set_attribute("gen_ai.operation.name", "chat") + nested_t1.set_attribute("gen_ai.request.model", model1) + input = "What is the best season to visit Japan?" + output = chat_openai.ask(input) + + #gen_ai.prompt would work too + nested_t1.set_attribute("gen_ai.prompt", input) + nested_t1.set_attribute("braintrust.output", output.content) + + tracer_a.in_span("turn2") do |nested_t2| + nested_t2.set_attribute("braintrust.span_attributes.type", "llm") + nested_t2.set_attribute("braintrust.metadata.model", model2) + input = "Which airlines fly to Japan from SFO?" + output = chat_anthropic.ask(input) + + nested_t2.set_attribute("braintrust.input", input) + nested_t2.set_attribute("braintrust.output", output.content) + end + end +end + + +client = OpenAI::Client.new(api_key: ENV["OPENAI_API_KEY"]) +url = "https://upload.wikimedia.org/wikipedia/commons/thumb/6/65/Tokyo_Tower_during_daytime.jpg/330px-Tokyo_Tower_during_daytime.jpg" +# Create second tracer provider +tracer_provider_b = OpenTelemetry::SDK::Trace::TracerProvider.new +Braintrust::Trace.setup(state_b, tracer_provider_b) + +# Get tracer for Project A +tracer_b = tracer_provider_b.tracer("Feature B") + + +Braintrust::Trace::OpenAI.wrap(client, tracer_provider: tracer_provider_b) + +# Wrap all examples under a single parent trace +root_span = nil +tracer_b.in_span("vision") do |span| + root_span = span + # Example 1: Vision - Image Understanding + puts "\n1. Vision (Image Understanding)" + puts "-" * 50 + tracer_b.in_span("example-vision") do + response = client.chat.completions.create( + model: model1, + messages: [ + { + role: "user", + content: [ + {type: "text", text: "Tell me about this landmark."}, + { + type: "image_url", + image_url: { + url: url + } + } + ] + } + ], + max_tokens: 100 + ) + puts "✓ Vision response: #{response.choices[0].message.content[0..100]}..." + puts " Tokens: #{response.usage.total_tokens}" + rescue OpenAI::Errors::BadRequestError => e + puts "⊘ Skipped - Image URL error (#{e.message.split("\n").first[0..80]}...)" + rescue => e + puts "⊘ Error: #{e.class}" + end +end + +tracer_provider_a.shutdown +tracer_provider_b.shutdown \ No newline at end of file From b6e4e66f410f4c7a12d00b7ff88a64c217df70cc Mon Sep 17 00:00:00 2001 From: Nick Slavin Date: Mon, 22 Dec 2025 13:11:37 -0500 Subject: [PATCH 2/9] Clean up, print intermediate state --- examples/trace/multiple_projects.rb | 63 +++++++++++++++++++++-------- 1 file changed, 46 insertions(+), 17 deletions(-) diff --git a/examples/trace/multiple_projects.rb b/examples/trace/multiple_projects.rb index 3cd8b7d..6bee784 100644 --- a/examples/trace/multiple_projects.rb +++ b/examples/trace/multiple_projects.rb @@ -1,5 +1,4 @@ #!/usr/bin/env ruby -# frozen_string_literal: true require "bundler/setup" require "braintrust" @@ -43,13 +42,14 @@ set_global: false, enable_tracing: false # We'll manually set up tracing ) - # Create second state for Project B (non-global) +# Create second state for Project B (non-global) state_b = Braintrust.init( default_project: project2, set_global: false, enable_tracing: false # We'll manually set up tracing ) +# Wrap all instances of RubyLLM client Braintrust::Trace::Contrib::Github::Crmne::RubyLLM.wrap RubyLLM.configure do |config| @@ -68,61 +68,85 @@ Braintrust::Trace.setup(state_a, tracer_provider_a) # Get tracer for Project A -tracer_a = tracer_provider_a.tracer("Feature A") +tracer_a = tracer_provider_a.tracer("MultiTurn") # Note: You can also use Trace.enable instead of Trace.setup: # Braintrust::Trace.enable(tracer_provider_a, state: state_a) # Braintrust::Trace.enable(tracer_provider_b, state: state_b) # Both work the same when you provide explicit providers -# Now create spans in both projects +# Now create spans in first project +puts "\nProject A: Multi-turn conversation" +puts "=" * 50 root_span_a = nil tracer_a.in_span("chat_ask") do |span| root_span_a = span - span.set_attribute("project", "A") + span.set_attribute("project", project1) - # Nested span in Project A + # Nested spans for multi-turn convo tracer_a.in_span("turn1") do |nested_t1| + # Using OTEL GenAI Semantic Conventions for properties + # https://www.braintrust.dev/docs/integrations/sdk-integrations/opentelemetry#manual-tracing + # Braintrust automatically maps `gen_ai.*` attributes to native Braintrust fields + # tracer_b will use native fields nested_t1.set_attribute("gen_ai.operation.name", "chat") nested_t1.set_attribute("gen_ai.request.model", model1) input = "What is the best season to visit Japan?" + puts "\nTurn 1 (#{model1}):" + puts "Q: #{input}" output = chat_openai.ask(input) - #gen_ai.prompt would work too nested_t1.set_attribute("gen_ai.prompt", input) - nested_t1.set_attribute("braintrust.output", output.content) + nested_t1.set_attribute("gen_ai.completion", output.content) + puts "A: #{output.content[0..100]}..." + puts " Tokens: #{output.to_h[:input_tokens]} in, #{output.to_h[:output_tokens]} out" tracer_a.in_span("turn2") do |nested_t2| - nested_t2.set_attribute("braintrust.span_attributes.type", "llm") - nested_t2.set_attribute("braintrust.metadata.model", model2) + nested_t2.set_attribute("gen_ai.operation.name", "chat") + nested_t2.set_attribute("gen_ai.request.model", model2) input = "Which airlines fly to Japan from SFO?" + puts "\nTurn 2 (#{model2}):" + puts "Q: #{input}" output = chat_anthropic.ask(input) - nested_t2.set_attribute("braintrust.input", input) - nested_t2.set_attribute("braintrust.output", output.content) + nested_t2.set_attribute("gen_ai.prompt", input) + nested_t2.set_attribute("gen_ai.completion", output.content) + puts "A: #{output.content[0..100]}..." + puts " Tokens: #{output.to_h[:input_tokens]} in, #{output.to_h[:output_tokens]} out" end end end +puts "\n✓ Multi-turn conversation completed" +puts "\n✓ View Project A trace in Braintrust:" +puts " #{Braintrust::Trace.permalink(root_span_a)}" -client = OpenAI::Client.new(api_key: ENV["OPENAI_API_KEY"]) url = "https://upload.wikimedia.org/wikipedia/commons/thumb/6/65/Tokyo_Tower_during_daytime.jpg/330px-Tokyo_Tower_during_daytime.jpg" + +# For second project, we'll use the Ruby OpenAI client +# You can log to multiple projects even if your clients use different client libs +client = OpenAI::Client.new(api_key: ENV["OPENAI_API_KEY"]) + # Create second tracer provider tracer_provider_b = OpenTelemetry::SDK::Trace::TracerProvider.new Braintrust::Trace.setup(state_b, tracer_provider_b) # Get tracer for Project A -tracer_b = tracer_provider_b.tracer("Feature B") - +tracer_b = tracer_provider_b.tracer("ImageUpload") +# Wrapping OpenAI client with second trace provider +# We could simply call `wrap` without tracer_provider, but then it would be bound to our global state Braintrust::Trace::OpenAI.wrap(client, tracer_provider: tracer_provider_b) -# Wrap all examples under a single parent trace +puts "\nProject B: Describe Image" +puts "=" * 50 + +# chat completion should automatically nest root_span = nil tracer_b.in_span("vision") do |span| root_span = span # Example 1: Vision - Image Understanding - puts "\n1. Vision (Image Understanding)" + puts "\n Vision (Image Understanding)" puts "-" * 50 tracer_b.in_span("example-vision") do response = client.chat.completions.create( @@ -152,5 +176,10 @@ end end +puts "\n✓ Vision example completed" +puts "\n✓ View Project B trace in Braintrust:" +puts " #{Braintrust::Trace.permalink(root_span)}" + +# Shutdown both tracer providers to flush spans tracer_provider_a.shutdown tracer_provider_b.shutdown \ No newline at end of file From 47da6b73204ffd0fe067b2319dc113452bdbdbce Mon Sep 17 00:00:00 2001 From: Nick Slavin Date: Mon, 22 Dec 2025 13:20:31 -0500 Subject: [PATCH 3/9] Fix broken trace permalinks --- examples/trace/multiple_projects.rb | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/examples/trace/multiple_projects.rb b/examples/trace/multiple_projects.rb index 6bee784..60d99db 100644 --- a/examples/trace/multiple_projects.rb +++ b/examples/trace/multiple_projects.rb @@ -40,13 +40,16 @@ state_a = Braintrust.init( default_project: project1, set_global: false, - enable_tracing: false # We'll manually set up tracing + enable_tracing: false, # We'll manually set up tracing + blocking_login: true # Ensure login completes before tracing setup + # Not required if only tracing, login is async by default and can lead to a broken permalink if not synchronous ) # Create second state for Project B (non-global) state_b = Braintrust.init( default_project: project2, set_global: false, - enable_tracing: false # We'll manually set up tracing + enable_tracing: false, + blocking_login: true ) # Wrap all instances of RubyLLM client @@ -142,9 +145,9 @@ puts "=" * 50 # chat completion should automatically nest -root_span = nil +root_span_b = nil tracer_b.in_span("vision") do |span| - root_span = span + root_span_b = span # Example 1: Vision - Image Understanding puts "\n Vision (Image Understanding)" puts "-" * 50 @@ -178,7 +181,7 @@ puts "\n✓ Vision example completed" puts "\n✓ View Project B trace in Braintrust:" -puts " #{Braintrust::Trace.permalink(root_span)}" +puts " #{Braintrust::Trace.permalink(root_span_b)}" # Shutdown both tracer providers to flush spans tracer_provider_a.shutdown From 9faf6b5bcee6bb39e185bcf52d544186a23acf21 Mon Sep 17 00:00:00 2001 From: Nick Slavin Date: Mon, 22 Dec 2025 21:42:08 -0500 Subject: [PATCH 4/9] Use Braintrust native span props in project B --- examples/trace/multiple_projects.rb | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/examples/trace/multiple_projects.rb b/examples/trace/multiple_projects.rb index 60d99db..3bc0b33 100644 --- a/examples/trace/multiple_projects.rb +++ b/examples/trace/multiple_projects.rb @@ -16,7 +16,7 @@ puts "Error: Both OPENAI_API_KEY and ANTHROPIC_API_KEY environment variables are required" puts "Get your API key from: https://platform.openai.com/api-keys" puts "Get your Anthropic API key from: https://console.anthropic.com/" - puts "Set with `export OPENAI_API_KEY=`" + puts "Set with `export OPENAI_API_KEY= and export ANTHROPIC_API_KEY=`" exit 1 end @@ -151,14 +151,16 @@ # Example 1: Vision - Image Understanding puts "\n Vision (Image Understanding)" puts "-" * 50 - tracer_b.in_span("example-vision") do + + input = "Tell me about this landmark." + tracer_b.in_span("example-vision") do |nested| response = client.chat.completions.create( model: model1, messages: [ { role: "user", content: [ - {type: "text", text: "Tell me about this landmark."}, + {type: "text", text: input}, { type: "image_url", image_url: { @@ -170,6 +172,15 @@ ], max_tokens: 100 ) + + # Using Braintrust native span attributes + # For comparisons with OTEL GenAI semantic convention properties, + # see https://www.braintrust.dev/docs/integrations/sdk-integrations/opentelemetry#manual-tracing + nested.set_attribute("braintrust.span_attributes.type", "llm") + nested.set_attribute("metadata.model", model1) + nested.set_attribute("braintrust.input", input) + nested.set_attribute("braintrust.output", "#{response.choices[0].message.content}") + puts "✓ Vision response: #{response.choices[0].message.content[0..100]}..." puts " Tokens: #{response.usage.total_tokens}" rescue OpenAI::Errors::BadRequestError => e From faa729bba77680c5d011a77c1e104763ba658263 Mon Sep 17 00:00:00 2001 From: Nick Slavin Date: Mon, 22 Dec 2025 23:06:04 -0500 Subject: [PATCH 5/9] Add appraisal for testing --- Appraisals | 6 ++++++ Rakefile | 1 + examples/trace/multiple_projects.rb | 1 + gemfiles/ruby_llm_openai.gemfile | 9 +++++++++ 4 files changed, 17 insertions(+) create mode 100644 gemfiles/ruby_llm_openai.gemfile diff --git a/Appraisals b/Appraisals index 3a87121..ffcabaf 100644 --- a/Appraisals +++ b/Appraisals @@ -49,3 +49,9 @@ appraise "opentelemetry-latest" do gem "opentelemetry-sdk", ">= 1.10" gem "opentelemetry-exporter-otlp", ">= 0.31" end + +# for multiple_projects.rb only, test both openai and ruby_llm +appraise "ruby-llm-openai" do + gem "openai", ">= 0.34" + gem "ruby_llm", ">= 1.9" +end diff --git a/Rakefile b/Rakefile index 5aa4d62..04b39c7 100644 --- a/Rakefile +++ b/Rakefile @@ -32,6 +32,7 @@ end def appraisal_for(example) case example + when /multiple_projects/ then "ruby-llm-openai" when /ruby_llm/ then "ruby_llm" when /ruby-openai/, /ruby_openai/, /alexrudall/ then "ruby-openai" when /anthropic/ then "anthropic" diff --git a/examples/trace/multiple_projects.rb b/examples/trace/multiple_projects.rb index 3bc0b33..790e2bc 100644 --- a/examples/trace/multiple_projects.rb +++ b/examples/trace/multiple_projects.rb @@ -1,4 +1,5 @@ #!/usr/bin/env ruby +# frozen_string_literal: true require "bundler/setup" require "braintrust" diff --git a/gemfiles/ruby_llm_openai.gemfile b/gemfiles/ruby_llm_openai.gemfile new file mode 100644 index 0000000..a515a58 --- /dev/null +++ b/gemfiles/ruby_llm_openai.gemfile @@ -0,0 +1,9 @@ +# This file was generated by Appraisal + +source "https://rubygems.org" + +gem "minitest-reporters", "~> 1.6" +gem "openai", ">= 0.34" +gem "ruby_llm", ">= 1.9" + +gemspec path: "../" From 431b2a8152477c4bc4e87f850e980c1ed34aa9cf Mon Sep 17 00:00:00 2001 From: Nick Slavin Date: Mon, 22 Dec 2025 23:14:55 -0500 Subject: [PATCH 6/9] Update readme for all examples --- examples/README.md | 47 +++++++++++++++++++++++++++++++++++++++------- 1 file changed, 40 insertions(+), 7 deletions(-) diff --git a/examples/README.md b/examples/README.md index 0affaf4..29e3949 100644 --- a/examples/README.md +++ b/examples/README.md @@ -14,24 +14,57 @@ export BRAINTRUST_API_KEY="your-api-key-here" ## Running Examples +### Using Rake (Recommended) + +The rake task automatically uses the correct gemfile for each example: + +```bash +# Run a single example +rake 'example[examples/trace/multiple_projects.rb]' + +# Run all examples +rake examples +``` + +### Running Directly + From the project root: ```bash # Run a specific example -ruby examples/login/login_basic.rb +ruby examples/login.rb # Enable debug logging -BRAINTRUST_DEBUG=true ruby examples/login/login_basic.rb +BRAINTRUST_DEBUG=true ruby examples/login.rb ``` ## Available Examples ### Login Examples -- **`login/login_basic.rb`**: Basic login example showing how to authenticate and retrieve organization information +- **`login.rb`**: Basic login example showing how to authenticate and retrieve organization information + + +### Tracing Examples + +- **`trace.rb`**: Basic OpenTelemetry tracing example +- **`trace/span_filtering.rb`**: Example of filtering out non-AI spans in traces to reduce noise +- **`trace/trace_attachments.rb`**: Example of adding attachments (images, PDFs, BLOBs) to traces +- **`trace/multiple_projects.rb`**: Example of logging traces to multiple Braintrust projects simultaneously + +### LLM Integration Examples + +- **`openai.rb`**: OpenAI integration example +- **`anthropic.rb`**: Anthropic integration example +- **`ruby_llm.rb`**: Ruby LLM integration example +- **`alexrudall_openai.rb`**: Alexrudall's ruby-openai gem integration example + +### Evaluation Examples + +- **`eval.rb`**: Defining scorers and running evals +- **`eval/dataset.rb`**: Running an evaluation against a dataset +- **`eval/remote_functions.rb`**: Using remote functions (server-side prompts) in evaluations -## Coming Soon +### API Examples -- OpenTelemetry tracing examples -- OpenAI integration examples -- Eval framework examples +- **`api/dataset.rb`**: Dataset API usage example From 7f7b7f23b5c321389d6959bb29cd62e8c8aa1d86 Mon Sep 17 00:00:00 2001 From: Nick Slavin Date: Mon, 22 Dec 2025 23:35:28 -0500 Subject: [PATCH 7/9] Style updates --- examples/trace/multiple_projects.rb | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/examples/trace/multiple_projects.rb b/examples/trace/multiple_projects.rb index 790e2bc..62e6f93 100644 --- a/examples/trace/multiple_projects.rb +++ b/examples/trace/multiple_projects.rb @@ -50,15 +50,15 @@ default_project: project2, set_global: false, enable_tracing: false, - blocking_login: true + blocking_login: true ) # Wrap all instances of RubyLLM client Braintrust::Trace::Contrib::Github::Crmne::RubyLLM.wrap RubyLLM.configure do |config| - config.openai_api_key = ENV["OPENAI_API_KEY"] - config.anthropic_api_key = ENV["ANTHROPIC_API_KEY"] + config.openai_api_key = ENV["OPENAI_API_KEY"] + config.anthropic_api_key = ENV["ANTHROPIC_API_KEY"] end chat_openai = RubyLLM.chat(model: model1) @@ -180,7 +180,7 @@ nested.set_attribute("braintrust.span_attributes.type", "llm") nested.set_attribute("metadata.model", model1) nested.set_attribute("braintrust.input", input) - nested.set_attribute("braintrust.output", "#{response.choices[0].message.content}") + nested.set_attribute("braintrust.output", response.choices[0].message.content.to_s) puts "✓ Vision response: #{response.choices[0].message.content[0..100]}..." puts " Tokens: #{response.usage.total_tokens}" @@ -197,4 +197,4 @@ # Shutdown both tracer providers to flush spans tracer_provider_a.shutdown -tracer_provider_b.shutdown \ No newline at end of file +tracer_provider_b.shutdown From 60b00b7bfb514e16ec899520e9f3dc04d2329920 Mon Sep 17 00:00:00 2001 From: Nick Slavin Date: Wed, 24 Dec 2025 12:56:20 -0500 Subject: [PATCH 8/9] Fix failing CI; compatibility for Ruby 3.4+ where base64 gem removed --- Gemfile.lock | 5 +++-- braintrust.gemspec | 4 ++++ 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 7a03578..331958f 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -2,6 +2,7 @@ PATH remote: . specs: braintrust (0.0.11) + base64 (~> 0.2) openssl (~> 3.3.1) opentelemetry-exporter-otlp (~> 0.28) opentelemetry-sdk (~> 1.3) @@ -50,7 +51,7 @@ GEM builder minitest (>= 5.0) ruby-progressbar - openssl (3.3.1) + openssl (3.3.2) opentelemetry-api (1.7.0) opentelemetry-common (0.23.0) opentelemetry-api (~> 1.0) @@ -153,4 +154,4 @@ DEPENDENCIES yard (~> 0.9) BUNDLED WITH - 2.4.19 + 4.0.3 diff --git a/braintrust.gemspec b/braintrust.gemspec index 6f1fd68..3a8dcfc 100644 --- a/braintrust.gemspec +++ b/braintrust.gemspec @@ -31,6 +31,10 @@ Gem::Specification.new do |spec| # Runtime dependencies spec.add_runtime_dependency "opentelemetry-sdk", "~> 1.3" spec.add_runtime_dependency "opentelemetry-exporter-otlp", "~> 0.28" + # Ruby 3.4+ considers this a bundled gem, removed from default gems + # bundler should use default base64 lib in Ruby <3.4 + # https://stdgems.org/base64/ + spec.add_runtime_dependency "base64", "~> 0.2" # OpenSSL 3.3.1+ fixes macOS CRL (Certificate Revocation List) verification issues # that occur with OpenSSL 3.6 + Ruby (certificate verify failed: unable to get certificate CRL). From 405235acbcd0135290200b97b613b1b29f113779 Mon Sep 17 00:00:00 2001 From: Nick Slavin Date: Wed, 24 Dec 2025 13:13:44 -0500 Subject: [PATCH 9/9] revert to bundler 2.4.19 --- Gemfile.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Gemfile.lock b/Gemfile.lock index 331958f..0bd3f74 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -154,4 +154,4 @@ DEPENDENCIES yard (~> 0.9) BUNDLED WITH - 4.0.3 + 2.4.19