diff --git a/.github/workflows/rubocop.yml b/.github/workflows/rubocop.yml new file mode 100644 index 0000000..c516d8b --- /dev/null +++ b/.github/workflows/rubocop.yml @@ -0,0 +1,26 @@ +name: RuboCop + +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + +permissions: + contents: read + +jobs: + test: + name: RuboCop + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - name: Set up Ruby + uses: ruby/setup-ruby@v1 + with: + ruby-version: '3.3' + rubygems: latest + bundler-cache: true + - name: RuboCop + run: bundle exec rubocop diff --git a/.rubocop.yml b/.rubocop.yml new file mode 100644 index 0000000..4d6a7f0 --- /dev/null +++ b/.rubocop.yml @@ -0,0 +1,47 @@ +inherit_from: .rubocop_todo.yml + +# inherit_from: .rubocop_todo.yml + +require: + - rubocop-packaging + - rubocop-performance + - rubocop-rails + - rubocop-rake + - rubocop-rspec + +AllCops: + NewCops: enable + TargetRubyVersion: 2.3 + Exclude: + - .git/**/* + - .github/**/* + - bin/**/* + - gemfiles/**/* + - node_modules/**/* + - tmp/**/* + - vendor/**/* + - lib/**/* + +Layout/LineLength: + Enabled: false + +Rails/TimeZone: + Enabled: false + +RSpec/ExampleLength: + Enabled: false + +RSpec/MultipleExpectations: + Enabled: false + +RSpec/MultipleMemoizedHelpers: + Max: 10 + +RSpec/NestedGroups: + Enabled: false + +Style/ArgumentsForwarding: + Enabled: false + +Style/OpenStructUse: + Enabled: false diff --git a/.rubocop_todo.yml b/.rubocop_todo.yml new file mode 100644 index 0000000..73b2d89 --- /dev/null +++ b/.rubocop_todo.yml @@ -0,0 +1,11 @@ +# This configuration was generated by +# `rubocop --auto-gen-config --no-offense-counts --no-auto-gen-timestamp` +# using RuboCop version 1.63.2. +# The point is for the user to remove these configuration records +# one by one as the offenses are removed from the code base. +# Note that changes in the inspected code, or installation of new +# versions of RuboCop, may require this file to be generated again. + +RSpec/AnyInstance: + Exclude: + - 'spec/lib/sharepoint/client_methods_spec.rb' diff --git a/Gemfile b/Gemfile index fa75df1..b8f66ee 100644 --- a/Gemfile +++ b/Gemfile @@ -1,3 +1,22 @@ +# frozen_string_literal: true + source 'https://rubygems.org' gemspec + +gem 'byebug' +gem 'dotenv' +gem 'rake' +gem 'rspec' +gem 'ruby-filemagic' +gem 'simplecov' +gem 'webmock' + +if RUBY_VERSION >= '2.7' + gem 'rubocop', require: false + gem 'rubocop-packaging', require: false + gem 'rubocop-performance', require: false + gem 'rubocop-rails', require: false + gem 'rubocop-rake', require: false + gem 'rubocop-rspec', require: false +end diff --git a/Rakefile b/Rakefile index 63a721d..2b9c584 100644 --- a/Rakefile +++ b/Rakefile @@ -1,5 +1,6 @@ -begin - require 'rspec/core/rake_task' - RSpec::Core::RakeTask.new(:spec) -rescue LoadError -end +# frozen_string_literal: true + +require 'rspec/core/rake_task' +RSpec::Core::RakeTask.new(:spec) + +task default: :spec diff --git a/sharepoint.gemspec b/sharepoint.gemspec index 55751c5..0a4f4ff 100644 --- a/sharepoint.gemspec +++ b/sharepoint.gemspec @@ -1,25 +1,21 @@ +# frozen_string_literal: true + Gem::Specification.new do |gem| gem.name = 'sharepoint' gem.version = '0.1.0' - gem.authors = [ 'Antonio Delfin' ] - gem.email = [ 'a.delfin@ifad.org' ] - gem.description = %q(Ruby client to consume Sharepoint services) - gem.summary = %q(Ruby client to consume Sharepoint services) - gem.homepage = "https://github.com/ifad/sharepoint" + gem.authors = ['Antonio Delfin'] + gem.email = ['a.delfin@ifad.org'] + gem.description = 'Ruby client to consume Sharepoint services' + gem.summary = 'Ruby client to consume Sharepoint services' + gem.homepage = 'https://github.com/ifad/sharepoint' - gem.files = `git ls-files`.split("\n") - gem.require_paths = ["lib"] + gem.files = Dir.glob('{LICENSE,README.md,lib/**/*.rb}', File::FNM_DOTMATCH) + gem.require_paths = ['lib'] gem.required_ruby_version = '>= 2.3' - gem.add_dependency 'ethon' gem.add_dependency 'activesupport', '>= 4.0' + gem.add_dependency 'ethon' - gem.add_development_dependency 'rake' - gem.add_development_dependency 'rspec' - gem.add_development_dependency 'dotenv' - gem.add_development_dependency 'webmock' - gem.add_development_dependency 'byebug' - gem.add_development_dependency 'ruby-filemagic' - gem.add_development_dependency 'simplecov' + gem.metadata['rubygems_mfa_required'] = 'true' end diff --git a/spec/lib/sharepoint/client_methods_spec.rb b/spec/lib/sharepoint/client_methods_spec.rb index fee7bfd..f01a102 100644 --- a/spec/lib/sharepoint/client_methods_spec.rb +++ b/spec/lib/sharepoint/client_methods_spec.rb @@ -1,26 +1,32 @@ +# frozen_string_literal: true + require 'spec_helper' describe Sharepoint::Client do before { mock_requests } + let(:config) do { - username: ENV['SP_USERNAME'], - password: ENV['SP_PASSWORD'], - uri: ENV['SP_URL'] + username: ENV.fetch('SP_USERNAME', nil), + password: ENV.fetch('SP_PASSWORD', nil), + uri: ENV.fetch('SP_URL', nil) } end let(:client) { described_class.new(config) } describe '#documents_for' do + subject(:documents_for) { client.documents_for path } + let(:path) { '/Documents' } + before { mock_responses('documents_for.json') } - subject { client.documents_for path } + it 'returns documents with filled properties' do - is_expected.not_to be_empty - sample = subject.sample - %w( + expect(documents_for).not_to be_empty + sample = documents_for.sample + %w[ title path name url created_at updated_at - ).each do |property| + ].each do |property| expect(sample).to respond_to property expect(sample.send(property)).not_to be_nil end @@ -28,96 +34,112 @@ end describe '#get_document' do + subject(:get_document) { client.get_document path } + let(:path) { '/Documents/20160718 BRI-FCO boarding-pass.pdf' } + before { mock_responses('get_document.json') } - subject { client.get_document path } + it { is_expected.to be_a OpenStruct } + it 'returns expected document properties' do - %w(guid title created modified).each do |property| - expect(subject).to respond_to property - expect(subject.send(property)).not_to be_nil + %w[guid title created modified].each do |property| + expect(get_document).to respond_to property + expect(get_document.send(property)).not_to be_nil end end + it { is_expected.to respond_to(:url) } end - describe '#document_exists' do + describe '#document_exists?' do + subject { client.document_exists? file_path, site_path } + let(:file_path) { "#{list_path}#{folder_path}/#{file_name}" } let(:site_path) { '/sites/APRop' } - subject { client.document_exists? file_path, site_path } + context 'when list exists' do + let(:list_path) { '/Lists/AFG' } - context "when list exists" do - let(:list_path) { "/Lists/AFG" } + context 'when folder exists' do + let(:folder_path) { '/1100001460/Design Report' } - context "and folder exists" do - let(:folder_path) { "/1100001460/Design Report" } - - context "and file exists" do + context 'when file exists' do before { mock_responses('document_exists_true.json') } - let(:file_name) { "design_completion_part_1 without map.doc" } - it { is_expected.to eq true } + + let(:file_name) { 'design_completion_part_1 without map.doc' } + + it { is_expected.to be true } end - context "and file doesn't exist" do + context "when file doesn't exist" do before { mock_responses('document_exists_false.json') } - let(:file_name) { "dummy.doc" } - it { is_expected.to eq false } + + let(:file_name) { 'dummy.doc' } + + it { is_expected.to be false } end end - context "and folder doesn't exist" do + context "when folder doesn't exist" do before { mock_responses('document_exists_false.json') } - let(:folder_path) { "/foo/bar" } - let(:file_name) { "dummy.doc" } - it { is_expected.to eq false } + + let(:folder_path) { '/foo/bar' } + let(:file_name) { 'dummy.doc' } + + it { is_expected.to be false } end end context "when list doesn't exist" do before { mock_responses('document_exists_false.json') } - let(:list_path) { "/Lists/foobar" } - let(:folder_path) { "/1100001460/Design Report" } - let(:file_name) { "design_completion_part_1 without map.doc" } - it { is_expected.to eq false } + + let(:list_path) { '/Lists/foobar' } + let(:folder_path) { '/1100001460/Design Report' } + let(:file_name) { 'design_completion_part_1 without map.doc' } + + it { is_expected.to be false } end end - describe '#list_documents' do + subject(:list_documents) { client.list_documents list_name, conditions } + before { mock_responses('list_documents.json') } + let(:list_name) { 'Documents' } let(:time) { Time.parse('2016-07-22') } let(:conditions) { "Modified ge datetime'#{time.utc.iso8601}'" } - subject { client.list_documents list_name, conditions } + it 'returns Hash with expected keys' do - expect(subject).to be_a Hash - expect(subject[:server_responded_at]).to be_a Time - expect(subject[:results]).to be_a Array + expect(list_documents).to be_a Hash + expect(list_documents[:server_responded_at]).to be_a Time + expect(list_documents[:results]).to be_a Array end - context 'results' do + + describe 'results' do let(:results) { subject[:results] } + it 'is not empty' do expect(results).not_to be_empty end - it 'return documents with filled properties' do + + it 'returns documents with filled properties' do sample = results.sample - %w( + %w[ unique_id title created modified name server_relative_url length - ).each do |property| + ].each do |property| expect(sample).to respond_to property expect(sample.send(property)).not_to be_nil end end - it 'return documents verifying custom conditions' do - results.each do |document| - expect(Time.parse(document.modified)).to be >= time - end + + it 'returns documents verifying custom conditions' do + expect(results.map { |document| Time.parse(document.modified) }).to all(be >= time) end + it 'documents respond to url method' do - results.each do |document| - expect(document).to respond_to :url - end + expect(results).to all(respond_to :url) end end end @@ -126,32 +148,35 @@ let(:start_at) { Time.parse('2016-07-24') } let(:end_at) { nil } let(:default_properties) do - %w( write is_document list_id web_id created title author size path unique_id ) + %w[write is_document list_id web_id created title author size path unique_id] end - context 'search whole SharePoint instance' do + context 'when searching whole SharePoint instance' do + subject(:search_modified_documents) { client.search_modified_documents({ start_at: start_at, end_at: end_at }) } + before { mock_responses('search_modified_documents.json') } - subject { client.search_modified_documents( { start_at: start_at, end_at: end_at } ) } + it 'returns Hash with expected keys' do - expect(subject).to be_a Hash - expect(subject[:server_responded_at]).to be_a Time - expect(subject[:results]).to be_a Array + expect(search_modified_documents).to be_a Hash + expect(search_modified_documents[:server_responded_at]).to be_a Time + expect(search_modified_documents[:results]).to be_a Array end - context 'results' do + + describe 'results' do let(:results) { subject[:results] } + it 'is not empty' do expect(results).not_to be_empty end + it 'return document objects only' do - results.each do |document| - expect(document.is_document).to eq 'true' - end + expect(results.map(&:is_document)).to all(eq 'true') end + it 'return documents modified after start_at' do - results.each do |document| - expect(Time.parse(document.write)).to be >= start_at - end + expect(results.map { |document| Time.parse(document.write) }).to all(be >= start_at) end + it 'return default properties with values' do sample = results.sample default_properties.each do |property| @@ -159,13 +184,14 @@ expect(sample.send(property)).not_to be_nil end end + it 'documents respond to url method' do - results.each do |document| - expect(document).to respond_to :url - end + expect(results).to all(respond_to :url) end + context 'with range end' do let(:end_at) { Time.parse('2016-07-26') } + it 'return documents modified between start_at and end_at' do results.each do |document| modified_at = Time.parse(document.write) @@ -177,48 +203,58 @@ end end - context 'search specific Site' do - let(:options) { { start_at: start_at } } + context 'when searching specific Site' do subject { client.search_modified_documents(options)[:results] } + + let(:options) { { start_at: start_at } } + context 'when existing web_id is passed' do before do mock_responses('search_modified_documents.json') - options.merge!( { web_id: 'b285c5ff-9256-4f30-99ba-26fc705a9f2d' } ) + options.merge!({ web_id: 'b285c5ff-9256-4f30-99ba-26fc705a9f2d' }) end + it { is_expected.not_to be_empty } end + context 'when non-existing web_id is passed' do before do mock_responses('search_noresults.json') - options.merge!( { web_id: 'a285c5ff-9256-4f30-99ba-26fc705a9f2e' } ) + options.merge!({ web_id: 'a285c5ff-9256-4f30-99ba-26fc705a9f2e' }) end + it { is_expected.to be_empty } end end - context 'search specific List' do - let(:options) { { start_at: start_at } } + context 'when searching specific List' do subject { client.search_modified_documents(options)[:results] } + + let(:options) { { start_at: start_at } } + context 'when existing list_id is passed' do before do mock_responses('search_modified_documents.json') - options.merge!( { list_id: '3314c0cf-d5b0-4d1e-a5f1-9a10fca08bc3' } ) + options.merge!({ list_id: '3314c0cf-d5b0-4d1e-a5f1-9a10fca08bc3' }) end + it { is_expected.not_to be_empty } end + context 'when non-existing list_id is passed' do before do mock_responses('search_noresults.json') - options.merge!( { list_id: '2314c0cf-d5b0-4d1e-a5f1-9a10fca08bc4' } ) + options.merge!({ list_id: '2314c0cf-d5b0-4d1e-a5f1-9a10fca08bc4' }) end + it { is_expected.to be_empty } end end - end - describe '#search' do + subject { client.search(options) } + before { mock_responses('search_modified_documents.json') } let(:options) do @@ -230,56 +266,57 @@ } end - subject { client.search(options) } - it { is_expected.not_to be_empty } end describe '#download' do - let(:document_json) { File.open('spec/fixtures/responses/get_document.json').read } + subject(:download) { client.download file_path: file_path } + + let(:document_json) { File.read('spec/fixtures/responses/get_document.json') } let(:document_meta) { client.send :parse_get_document_response, document_json, [] } let(:file_path) { '/Documents/document.docx' } - let(:expected_content) { File.open('spec/fixtures/responses/document.docx').read } + let(:expected_content) { File.read('spec/fixtures/responses/document.docx') } + before do allow(client).to receive(:get_document).and_return(document_meta) mock_responses('document.docx') end - subject { client.download file_path: file_path } + it 'returns expected hash' do - is_expected.to have_key :file_contents - is_expected.to have_key :link_url - expect(subject[:file_contents]).to eq expected_content + expect(download).to have_key :file_contents + expect(download).to have_key :link_url + expect(download[:file_contents]).to eq expected_content end end - describe '#upload' do - # TODO - it "should upload the file correctly" + describe '#upload', pending: 'TODO' do + it 'should upload the file correctly' end - # TODO - describe ".update_metadata" do - it "shoud raise invalid metadata if any metadata value or key include the single quote char" - it "should update the metadata correctly" + describe '.update_metadata', pending: 'TODO' do + it 'shoud raise invalid metadata if any metadata value or key include the single quote char' + it 'should update the metadata correctly' end describe '#folder_exists?' do specify do allow_any_instance_of(Ethon::Easy).to receive(:response_code).and_return(200) - expect(client.folder_exists?('foo')).to eq(true) + expect(client.folder_exists?('foo')).to be(true) end specify do allow_any_instance_of(Ethon::Easy).to receive(:response_code).and_return(404) - expect(client.folder_exists?('bar')).to eq(false) + expect(client.folder_exists?('bar')).to be(false) end end describe '#create_folder' do it 'does nothing if the folder name is nil' do - expect(Ethon::Easy).to_not receive(:new) - expect(client.create_folder(nil, 'bar')).to eq(nil) + allow(Ethon::Easy).to receive(:new) + expect(client.create_folder(nil, 'bar')).to be_nil + expect(Ethon::Easy).not_to have_received(:new) end + specify do mock_responses('request_digest.json') expect(client.create_folder('foo', 'bar')).to eq(200) @@ -287,18 +324,20 @@ end describe '#lists' do + subject(:lists) { client.lists(site_path, query) } + before { mock_responses('lists.json') } - subject { client.lists(site_path, query) } let(:site_path) { '/sites/APRop' } let(:query) { { select: 'Title,Id,Hidden,ItemCount', filter: 'Hidden eq false' } } it 'returns Hash with expected keys' do - expect(subject).to be_a Hash - expect(subject[:server_responded_at]).to be_a Time - expect(subject[:results]).to be_a Array + expect(lists).to be_a Hash + expect(lists[:server_responded_at]).to be_a Time + expect(lists[:results]).to be_a Array end - context 'results' do + + describe 'results' do let(:results) { subject[:results] } it 'is not empty' do @@ -306,11 +345,11 @@ end it 'returns lists with filled properties' do - is_expected.not_to be_empty + expect(lists).not_to be_empty sample = results.sample - %w( + %w[ hidden id item_count title - ).each do |property| + ].each do |property| expect(sample).to respond_to property expect(sample.send(property)).not_to be_nil end @@ -319,11 +358,11 @@ end describe '#index_field' do - subject { client.index_field('My List', 'Modified', site_path) } + subject(:index_field) { client.index_field('My List', 'Modified', site_path) } let(:site_path) { '/sites/APRop' } let(:response) { response_file } - let(:response_file) { File.open('spec/fixtures/responses/index_field.json').read } + let(:response_file) { File.read('spec/fixtures/responses/index_field.json') } before do allow(client).to receive(:xrequest_digest).and_return('digest') @@ -338,16 +377,15 @@ end it 'updates the indexed field' do - expect(subject).to eq(204) + expect(index_field).to eq(204) end context 'when the field is already indexed' do let(:response) { JSON.parse(response_file).deep_merge('d' => { 'Indexed' => true }).to_json } it 'returns 304' do - expect(subject).to eq(304) + expect(index_field).to eq(304) end end end - end diff --git a/spec/lib/sharepoint/client_spec.rb b/spec/lib/sharepoint/client_spec.rb index 5076241..145348e 100644 --- a/spec/lib/sharepoint/client_spec.rb +++ b/spec/lib/sharepoint/client_spec.rb @@ -1,151 +1,146 @@ +# frozen_string_literal: true + require 'spec_helper' describe Sharepoint::Client do - let(:config) { { username: ENV['SP_USERNAME'], - password: ENV['SP_PASSWORD'], - uri: ENV['SP_URL'] } } + let(:config) do + { username: ENV.fetch('SP_USERNAME', nil), + password: ENV.fetch('SP_PASSWORD', nil), + uri: ENV.fetch('SP_URL', nil) } + end describe '#initialize' do - - context 'success' do - subject { described_class.new(config) } + context 'with success' do + subject(:client) { described_class.new(config) } it 'returns a valid instance' do - is_expected.to be_a Sharepoint::Client + expect(client).to be_a described_class end it 'sets config object' do - client_config = subject.config + client_config = client.config expect(client_config).to be_a OpenStruct - [:username, :password, :url].each do |key| + %i[username password url].each do |key| value = client_config.send(key) expect(value).to eq config[key] end end - it "sets base_url in the client" do - expect(subject.send :base_url).to eql(ENV['SP_URL']) + it 'sets base_url in the client' do + expect(client.send(:base_url)).to eql(ENV.fetch('SP_URL', nil)) end - it "sets base_api_url in the client" do - expect(subject.send :base_api_url).to eql(ENV['SP_URL']+'/_api/') + it 'sets base_api_url in the client' do + expect(client.send(:base_api_url)).to eql("#{ENV.fetch('SP_URL', nil)}/_api/") end - it "sets base_api_web_url in the client" do - expect(subject.send :base_api_web_url).to eql(ENV['SP_URL']+'/_api/web/') + it 'sets base_api_web_url in the client' do + expect(client.send(:base_api_web_url)).to eql("#{ENV.fetch('SP_URL', nil)}/_api/web/") end end - context 'ethon easy options' do - context 'success' do + context 'with ethon easy options' do + context 'with success' do + subject(:client) { described_class.new(config_ethon) } + let(:config_ethon) { config.merge({ ethon_easy_options: ssl_verifypeer }) } let(:ssl_verifypeer) { { ssl_verifypeer: false } } - subject { described_class.new(config_ethon) } - - it "sets ethon easy options in the client" do - expect(subject.send :ethon_easy_options).to eql(ssl_verifypeer) + it 'sets ethon easy options in the client' do + expect(client.send(:ethon_easy_options)).to eql(ssl_verifypeer) end end - context 'failure' do + context 'with failure' do let(:config_ethon) { config.merge({ ethon_easy_options: 'hello' }) } - it "should raise ethon configuration error for bad config" do - expect { + it 'raises ethon configuration error for bad config' do + expect do described_class.new(config_ethon) - }.to raise_error(Sharepoint::Errors::EthonOptionsConfigurationError) + end.to raise_error(Sharepoint::Errors::EthonOptionsConfigurationError) end end end - context 'failure' do - - context "bad username" do - [{ value: nil, name: 'nil' }, - { value: '', name: 'blank' }, - { value: 344, name: 344 } ].each do |ocurrence| - - it "should raise username configuration error for #{ ocurrence[:name]} username" do + context 'with failure' do + context 'with bad username' do + [{ value: nil, name: 'nil' }, + { value: '', name: 'blank' }, + { value: 344, name: 344 }].each do |ocurrence| + it "raises username configuration error for #{ocurrence[:name]} username" do wrong_config = config wrong_config[:username] = ocurrence[:value] - expect { - described_class.new(wrong_config) - }.to raise_error(Sharepoint::Errors::UsernameConfigurationError) + expect do + described_class.new(wrong_config) + end.to raise_error(Sharepoint::Errors::UsernameConfigurationError) end end end - context "bad password" do - [{ value: nil, name: 'nil' }, - { value: '', name: 'blank' }, - { value: 344, name: 344 } ].each do |ocurrence| - - it "should raise password configuration error for #{ocurrence[:name]} password" do + context 'with bad password' do + [{ value: nil, name: 'nil' }, + { value: '', name: 'blank' }, + { value: 344, name: 344 }].each do |ocurrence| + it "raises password configuration error for #{ocurrence[:name]} password" do wrong_config = config wrong_config[:password] = ocurrence[:value] - expect { + expect do described_class.new(wrong_config) - }.to raise_error(Sharepoint::Errors::PasswordConfigurationError) + end.to raise_error(Sharepoint::Errors::PasswordConfigurationError) end end end - context "bad uri" do - [{ value: nil, name: 'nil' }, - { value: '', name: 'blank' }, - { value: 344, name: 344 }, - { value: 'ftp://www.test.com', name: "invalid uri" }].each do |ocurrence| - - it "should raise uri configuration error for #{ocurrence[:name]} uri" do + context 'with bad uri' do + [{ value: nil, name: 'nil' }, + { value: '', name: 'blank' }, + { value: 344, name: 344 }, + { value: 'ftp://www.test.com', name: 'invalid uri' }].each do |ocurrence| + it "raises uri configuration error for #{ocurrence[:name]} uri" do wrong_config = config wrong_config[:uri] = ocurrence[:value] - expect { + expect do described_class.new(wrong_config) - }.to raise_error(Sharepoint::Errors::UriConfigurationError) + end.to raise_error(Sharepoint::Errors::UriConfigurationError) end end - end - end - end describe '#remove_double_slashes' do - PAIRS = { - 'foobar' => 'foobar', - 'foo/bar' => 'foo/bar', - 'foo/bar/' => 'foo/bar/', - 'http://foo/bar//' => 'http://foo/bar/', - 'https://foo/bar//' => 'https://foo/bar/', - 'https://foo/bar' => 'https://foo/bar', + { + 'foobar' => 'foobar', + 'foo/bar' => 'foo/bar', + 'foo/bar/' => 'foo/bar/', + 'http://foo/bar//' => 'http://foo/bar/', + 'https://foo/bar//' => 'https://foo/bar/', + 'https://foo/bar' => 'https://foo/bar', 'https://foo//bar//' => 'https://foo/bar/' }.each do |input, output| specify do - expect(described_class.new(config).send :remove_double_slashes, input).to eq(output) + expect(described_class.new(config).send(:remove_double_slashes, input)).to eq(output) end end end { - '[]' => '%5B%5D', + '[]' => '%5B%5D', "https://example.org/sites/Method('/file+name .pdf')" => "https://example.org/sites/Method('/file+name%20.pdf')" }.each do |input, output| describe '#uri_escape' do specify do - expect(described_class.new(config).send :uri_escape, input).to eq(output) + expect(described_class.new(config).send(:uri_escape, input)).to eq(output) end end describe '#uri_unescape' do specify do - expect(described_class.new(config).send :uri_unescape, output).to eq(input) + expect(described_class.new(config).send(:uri_unescape, output)).to eq(input) end end end - end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 9aa5367..44f6ef4 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + if ENV['RCOV'] || ENV['COVERAGE'] require 'simplecov' @@ -17,18 +19,18 @@ SPEC_BASE = Pathname.new(__FILE__).realpath.parent -$: << SPEC_BASE.parent + 'lib' +$LOAD_PATH << ("#{SPEC_BASE.parent}lib") require 'sharepoint' -def fixture name - SPEC_BASE + 'fixtures' + name +def fixture(name) + "#{SPEC_BASE}fixtures#{name}" end # Requires supporting ruby files with custom matchers and macros, etc, # # in spec/support/ and its subdirectories. -Dir[File.join(SPEC_BASE, "support/**/*.rb")].each { |f| require f } +Dir[File.join(SPEC_BASE, 'support/**/*.rb')].sort.each { |f| require f } -RSpec::configure do |rspec| +RSpec.configure do |rspec| rspec.tty = true rspec.color = true rspec.include Sharepoint::SpecHelpers