class JobsTestCase

Public Instance Methods

test_asynchronous_job_has_no_segmentation_by_default() click to toggle source

Results and preview on a search job should have no segmentation by default.

# File test/test_jobs.rb, line 152
def test_asynchronous_job_has_no_segmentation_by_default
  omit_if(@service.splunk_version[0] == 4)
  job = @service.jobs.create("search index=_internal GET | head 3")
  until job.is_done?()
    sleep(0.1)
  end
  assert_false(job.events().include?("<sg"))
  assert_false(job.preview().include?("<sg"))
end
test_asynchronous_job_has_segmentation_when_forced() click to toggle source

Results and preview on a search job should have segmentation when it is forced.

# File test/test_jobs.rb, line 166
def test_asynchronous_job_has_segmentation_when_forced
  omit_if(@service.splunk_version[0] == 4)
  job = @service.jobs.create("search index=_internal GET | head 3")
  until job.is_done?()
    sleep(0.1)
  end
  assert_true(job.events(:segmentation => "raw").include?("<sg"))
  assert_true(job.preview(:segmentation => "raw").include?("<sg"))
end
test_create_and_idempotent_cancel() click to toggle source
# File test/test_jobs.rb, line 17
def test_create_and_idempotent_cancel
  jobs = @service.jobs
  job = jobs.create(QUERY)
  assert_true(jobs.has_key?(job.sid))
  job.cancel()
  assert_eventually_true() { !jobs.has_key?(job.sid) }
  job.cancel() # Cancel twice should be a nop
end
test_create_with_exec_mode_fails() click to toggle source
# File test/test_jobs.rb, line 39
def test_create_with_exec_mode_fails
  assert_raises(ArgumentError) do
    @service.jobs.create(QUERY, :exec_mode => "oneshot")
  end
end
test_create_with_garbage_fails() click to toggle source
# File test/test_jobs.rb, line 11
def test_create_with_garbage_fails
  assert_raises(SplunkHTTPError) do
    @service.jobs.create("aweaj;awfaw faf'adf")
  end
end
test_each_and_values() click to toggle source
# File test/test_jobs.rb, line 176
def test_each_and_values
  jobs = Jobs.new(@service)

  created_jobs = []

  (1..3).each() do |i|
    job = jobs.create("search index=_internal | head #{i}")
    while !job.is_ready?
      sleep(0.1)
    end
    created_jobs << job
  end

  each_jobs = []
  jobs.each() do |job|
    assert_false(job.name.empty?)
    each_jobs << job.sid
  end

  values_jobs = jobs.values().map() { |j| j.sid }
  assert_equal(each_jobs, values_jobs)

  created_jobs.each do |job|
    job.cancel()
  end
end
test_enable_preview() click to toggle source
# File test/test_jobs.rb, line 249
def test_enable_preview
  begin
    install_app_from_collection("sleep_command")
    job = @service.jobs.create("search index=_internal | sleep 2 | join [sleep 2]")

    while !job.is_ready?()
      sleep(0.1)
    end

    assert_equal("0", job["isPreviewEnabled"])
    job.enable_preview()
    assert_eventually_true(1000) do
      job.refresh()
      fail("Job finished before preview enabled") if job.is_done?()
      job["isPreviewEnabled"] == "1"
    end
  ensure
    job.cancel()
    assert_eventually_true do
      !@service.jobs.contains?(job.sid)
    end
    # We have to wait for jobs to be properly killed or we can't delete
    # the sleep_command app in teardown on Windows.
    sleep(4)
  end
end
test_export() click to toggle source
# File test/test_jobs.rb, line 78
def test_export
  stream = @service.jobs.create_export(QUERY)
  assert_true(stream.is_a?(ExportStream))
  results = ResultsReader.new(stream).to_a()
  assert_equal(3, results.length())
end
test_export_has_forced_segmentation() click to toggle source

Export jobs should have <sg> elements in the XML they return when a value is passed to the segmentation argument to make it so.

# File test/test_jobs.rb, line 140
def test_export_has_forced_segmentation
  omit_if(@service.splunk_version[0] == 4)

  stream = @service.create_export("search index=_internal GET | head 3",
                                   :segmentation => "raw")
  assert_true(stream.include?("<sg"))
end
test_export_has_no_segmentation_by_default() click to toggle source

Test that export jobs have no <sg> elements in the XML they return by default.

# File test/test_jobs.rb, line 130
def test_export_has_no_segmentation_by_default
  omit_if(@service.splunk_version[0] == 4)
  stream = @service.create_export("search index=_internal GET | head 3")
  assert_false(stream.include?("<sg"))
end
test_export_on_service() click to toggle source

Test that the convenience method Service#create_export behaves the same way as Jobs#create_export.

# File test/test_jobs.rb, line 89
def test_export_on_service
  stream = @service.create_export(QUERY)
  results = ResultsReader.new(stream).to_a()
  assert_equal(3, results.length())
end
test_export_with_garbage_fails() click to toggle source
# File test/test_jobs.rb, line 72
def test_export_with_garbage_fails
  assert_raises(SplunkHTTPError) do
    @service.jobs.create_export("abavadfa;ejwfawfasdfadf wfw").to_a()
  end
end
test_oneshot() click to toggle source
# File test/test_jobs.rb, line 45
def test_oneshot
  jobs = @service.jobs
  stream = jobs.create_oneshot(QUERY)
  results = ResultsReader.new(stream)
  assert_false(results.is_preview?)
  events = results.to_a()
  assert_equal(3, events.length())
end
test_oneshot_has_forced_segmentation() click to toggle source

Are <sg> elements returned in the XML from a oneshot job when we pass the option segmentation=raw?

# File test/test_jobs.rb, line 119
def test_oneshot_has_forced_segmentation
  omit_if(@service.splunk_version[0] == 4)
  stream = @service.create_oneshot("search index=_internal GET | head 3",
                                   :segmentation => "raw")
  assert_true(stream.include?("<sg"))
end
test_oneshot_has_no_segmentation_by_default() click to toggle source

Test that oneshot jobs have no <sg> elements in the XML they return by default.

# File test/test_jobs.rb, line 109
def test_oneshot_has_no_segmentation_by_default
  omit_if(@service.splunk_version[0] == 4)
  stream = @service.create_oneshot("search index=_internal GET | head 3")
  assert_false(stream.include?("<sg"))
end
test_oneshot_on_service() click to toggle source

Test that Service#create_oneshot properly creates a oneshot search.

# File test/test_jobs.rb, line 57
def test_oneshot_on_service
  jobs = @service.jobs
  stream = @service.create_oneshot(QUERY)
  results = ResultsReader.new(stream)
  assert_false(results.is_preview?)
  events = results.to_a()
  assert_equal(3, events.length())
end
test_oneshot_with_garbage_fails() click to toggle source
# File test/test_jobs.rb, line 66
def test_oneshot_with_garbage_fails
  assert_raises(SplunkHTTPError) do
    @service.jobs.create_oneshot("abcwrawerafawf 'adfad'faw")
  end
end
test_preview_and_events() click to toggle source
# File test/test_jobs.rb, line 203
def test_preview_and_events
  job = @service.jobs.create(QUERY, JOB_ARGS)
  assert_eventually_true() { job.is_done?() }
  assert_true(Integer(job['eventCount']) <= 3)

  preview_stream = job.preview()
  preview_results = ResultsReader.new(preview_stream)
  assert_false(preview_results.is_preview?)
  preview_array = preview_results.to_a()

  events_stream = job.events()
  events_results = ResultsReader.new(events_stream)
  assert_false(events_results.is_preview?)
  events_array = events_results.to_a()

  results_stream = job.results()
  results_results = ResultsReader.new(results_stream)
  assert_false(results_results.is_preview?)
  results_array = results_results.to_a()

  assert_equal(events_array, preview_array)
  assert_equal(results_array, preview_array)

  job.cancel()
end
test_search_with_many_results() click to toggle source

Splunk by default returns 100 events in a call to results or preview. We need to make sure overriding this with count works.

# File test/test_jobs.rb, line 280
def test_search_with_many_results
  internal = @service.indexes.fetch("_internal")
  internal.refresh()
  if internal.fetch("totalEventCount").to_i < 150
    fail("Need at 150 events in index _internal for this test.")
  end

  job = @service.jobs.create("search index=_internal | head 150")
  while !job.is_done?()
    sleep(0.1)
  end

  stream = job.results(:count => 0)
  results = Splunk::ResultsReader.new(stream)
  count = 0
  results.each do |event|
    count += 1
  end
  assert_equal(150, count)

  stream = job.preview(:count => 0)
  results = Splunk::ResultsReader.new(stream)
  count = 0
  results.each do |event|
    count += 1
  end
  assert_equal(150, count)
end
test_service_create_and_idempotent_cancel() click to toggle source

There is a convenience method on service to create an asynchronous search job. Test it the same way.

# File test/test_jobs.rb, line 30
def test_service_create_and_idempotent_cancel
  jobs = @service.jobs
  job = @service.create_search(QUERY)
  assert_true(jobs.has_key?(job.sid))
  job.cancel()
  assert_eventually_true() { !jobs.has_key?(job.sid) }
  job.cancel() # Cancel twice should be a nop
end
test_timeline() click to toggle source
# File test/test_jobs.rb, line 229
def test_timeline
  original_xml_library = $splunk_xml_library
  job = @service.jobs.create(QUERY, JOB_ARGS)
  assert_eventually_true() { job.is_done?() }

  begin
    Splunk::require_xml_library(:rexml)
    timeline = job.timeline()
    assert_true(timeline.is_a?(Array))

    Splunk::require_xml_library(:nokogiri)
    timeline = job.timeline()
    assert_true(timeline.is_a?(Array))
  ensure
    # Have to reset the XML library or test_resultsreader gets unhappy.
    Splunk::require_xml_library(original_xml_library)
    job.cancel()
  end
end