Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix lint issue C0301 #855

Merged
merged 10 commits into from
Jan 13, 2020
3 changes: 2 additions & 1 deletion esrally/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,8 @@ def __init__(self, hosts, client_options):

# ssl.Purpose.CLIENT_AUTH allows presenting client certs and can only be enabled during instantiation
# but can be disabled via the verify_mode property later on.
self.ssl_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH, cafile=self.client_options.pop("ca_certs", certifi.where()))
self.ssl_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH,
cafile=self.client_options.pop("ca_certs", certifi.where()))

if not self.client_options.pop("verify_certs", True):
self.logger.info("SSL certificate verification: off")
Expand Down
13 changes: 7 additions & 6 deletions esrally/driver/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -315,11 +315,11 @@ def __call__(self, es, params):
If ``detailed-results`` is ``True`` the following meta data are returned in addition:

* ``ops``: A hash with the operation name as key (e.g. index, update, delete) and various counts as values. ``item-count`` contains
the total number of items for this key. Additionally, we return a separate counter each result (indicating e.g. the number of created
items, the number of deleted items etc.).
* ``shards_histogram``: An array of hashes where each hash has two keys: ``item-count`` contains the number of items to which a shard
distribution applies and ``shards`` contains another hash with the actual distribution of ``total``, ``successful`` and ``failed``
shards (see examples below).
the total number of items for this key. Additionally, we return a separate counter each result (indicating e.g. the number of
created items, the number of deleted items etc.).
* ``shards_histogram``: An array of hashes where each hash has two keys: ``item-count`` contains the number of items to which a
shard distribution applies and ``shards`` contains another hash with the actual distribution of ``total``, ``successful`` and
``failed`` shards (see examples below).
* ``bulk-request-size-bytes``: Total size of the bulk request body in bytes.
* ``total-document-size-bytes``: Total size of all documents within the bulk request body in bytes.

Expand Down Expand Up @@ -919,7 +919,8 @@ def __repr__(self, *args, **kwargs):

class DeleteIndexTemplate(Runner):
"""
Execute the `delete index template API <https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-templates.html#delete>`_.
Execute the `delete index template API
<https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-templates.html#delete>`_.
"""

def __call__(self, es, params):
Expand Down
7 changes: 4 additions & 3 deletions esrally/mechanic/provisioner.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def local(cfg, car, plugins, cluster_settings, ip, http_port, all_node_ips, all_
node_root_dir = os.path.join(target_root, node_name)

_, java_home = java_resolver.java_home(car.mandatory_var("runtime.jdk"), cfg)

es_installer = ElasticsearchInstaller(car, java_home, node_name, node_root_dir, all_node_ips, all_node_names, ip, http_port)
plugin_installers = [PluginInstaller(plugin, java_home) for plugin in plugins]

Expand Down Expand Up @@ -227,12 +227,13 @@ def _provisioner_variables(self):
provisioner_vars.update(self.es_installer.variables)
provisioner_vars.update(plugin_variables)
provisioner_vars["cluster_settings"] = cluster_settings

return provisioner_vars


class ElasticsearchInstaller:
def __init__(self, car, java_home, node_name, node_root_dir, all_node_ips, all_node_names, ip, http_port, hook_handler_class=team.BootstrapHookHandler):
def __init__(self, car, java_home, node_name, node_root_dir, all_node_ips, all_node_names, ip, http_port,
hook_handler_class=team.BootstrapHookHandler):
self.car = car
self.java_home = java_home
self.node_name = node_name
Expand Down
7 changes: 4 additions & 3 deletions esrally/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -1196,12 +1196,13 @@ def format_dict(d):

races = []
for race in race_store(cfg).list():
races.append([race.race_id, time.to_iso8601(race.race_timestamp), race.track, format_dict(race.track_params), race.challenge_name, race.car_name,
format_dict(race.user_tags), race.track_revision, race.team_revision])
races.append([race.race_id, time.to_iso8601(race.race_timestamp), race.track, format_dict(race.track_params), race.challenge_name,
race.car_name, format_dict(race.user_tags), race.track_revision, race.team_revision])

if len(races) > 0:
console.println("\nRecent races:\n")
console.println(tabulate.tabulate(races, headers=["Race ID", "Race Timestamp", "Track", "Track Parameters", "Challenge", "Car", "User Tags", "Track Revision", "Team Revision"]))
console.println(tabulate.tabulate(races, headers=["Race ID", "Race Timestamp", "Track", "Track Parameters", "Challenge", "Car",
"User Tags", "Track Revision", "Team Revision"]))
else:
console.println("")
console.println("No recent races found.")
Expand Down
8 changes: 5 additions & 3 deletions esrally/rally.py
Original file line number Diff line number Diff line change
Expand Up @@ -454,7 +454,7 @@ def positive_number(v):
"--distribution-repository",
help="Define the repository from where the Elasticsearch distribution should be downloaded (default: release).",
default="release")

task_filter_group = p.add_mutually_exclusive_group()
task_filter_group.add_argument(
"--include-tasks",
Expand Down Expand Up @@ -658,7 +658,8 @@ def with_actor_system(runnable, cfg):
logger.warning("User interrupted shutdown of internal actor system.")
console.info("Please wait a moment for Rally's internal components to shutdown.")
if not shutdown_complete and times_interrupted > 0:
logger.warning("Terminating after user has interrupted actor system shutdown explicitly for [%d] times.", times_interrupted)
logger.warning("Terminating after user has interrupted actor system shutdown explicitly for [%d] times.",
times_interrupted)
console.println("")
console.warn("Terminating now at the risk of leaving child processes behind.")
console.println("")
Expand All @@ -667,7 +668,8 @@ def with_actor_system(runnable, cfg):
console.println(SKULL)
console.println("")
elif not shutdown_complete:
console.warn("Could not terminate all internal processes within timeout. Please check and force-terminate all Rally processes.")
console.warn("Could not terminate all internal processes within timeout. Please check and force-terminate "
"all Rally processes.")


def generate(cfg):
Expand Down
21 changes: 12 additions & 9 deletions esrally/reporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,9 +205,12 @@ def report_total_time(self, name, total_time):
def report_total_time_per_shard(self, name, total_time_per_shard):
unit = "min"
return self.join(
self.line("Min cumulative {} across primary shards".format(name), "", total_time_per_shard.get("min"), unit, convert.ms_to_minutes),
self.line("Median cumulative {} across primary shards".format(name), "", total_time_per_shard.get("median"), unit, convert.ms_to_minutes),
self.line("Max cumulative {} across primary shards".format(name), "", total_time_per_shard.get("max"), unit, convert.ms_to_minutes),
self.line("Min cumulative {} across primary shards".format(name), "", total_time_per_shard.get("min"), unit,
convert.ms_to_minutes),
self.line("Median cumulative {} across primary shards".format(name), "", total_time_per_shard.get("median"),
unit, convert.ms_to_minutes),
self.line("Max cumulative {} across primary shards".format(name), "", total_time_per_shard.get("max"), unit,
convert.ms_to_minutes),
)

def report_total_count(self, name, total_count):
Expand Down Expand Up @@ -474,12 +477,12 @@ def report_total_time(self, name, baseline_total, contender_total):
def report_total_time_per_shard(self, name, baseline_per_shard, contender_per_shard):
unit = "min"
return self.join(
self.line("Min cumulative {} across primary shard".format(name), baseline_per_shard.get("min"), contender_per_shard.get("min"), "", unit,
treat_increase_as_improvement=False, formatter=convert.ms_to_minutes),
self.line("Median cumulative {} across primary shard".format(name), baseline_per_shard.get("median"), contender_per_shard.get("median"), "", unit,
treat_increase_as_improvement=False, formatter=convert.ms_to_minutes),
self.line("Max cumulative {} across primary shard".format(name), baseline_per_shard.get("max"), contender_per_shard.get("max"), "", unit,
treat_increase_as_improvement=False, formatter=convert.ms_to_minutes),
self.line("Min cumulative {} across primary shard".format(name), baseline_per_shard.get("min"),
contender_per_shard.get("min"), "", unit, treat_increase_as_improvement=False, formatter=convert.ms_to_minutes),
self.line("Median cumulative {} across primary shard".format(name), baseline_per_shard.get("median"),
contender_per_shard.get("median"), "", unit, treat_increase_as_improvement=False, formatter=convert.ms_to_minutes),
self.line("Max cumulative {} across primary shard".format(name), baseline_per_shard.get("max"), contender_per_shard.get("max"),
"", unit, treat_increase_as_improvement=False, formatter=convert.ms_to_minutes),
)

def report_total_count(self, name, baseline_total, contender_total):
Expand Down
3 changes: 2 additions & 1 deletion esrally/telemetry.py
Original file line number Diff line number Diff line change
Expand Up @@ -1049,7 +1049,8 @@ def on_benchmark_stop(self):
self.metrics_store.put_doc(doc=ct, level=metrics.MetaInfoScope.cluster)

self.add_metrics(self.extract_value(p, ["segments", "doc_values_memory_in_bytes"]), "segments_doc_values_memory_in_bytes", "byte")
self.add_metrics(self.extract_value(p, ["segments", "stored_fields_memory_in_bytes"]), "segments_stored_fields_memory_in_bytes", "byte")
self.add_metrics(self.extract_value(p, ["segments", "stored_fields_memory_in_bytes"]), "segments_stored_fields_memory_in_bytes",
"byte")
self.add_metrics(self.extract_value(p, ["segments", "terms_memory_in_bytes"]), "segments_terms_memory_in_bytes", "byte")
self.add_metrics(self.extract_value(p, ["segments", "norms_memory_in_bytes"]), "segments_norms_memory_in_bytes", "byte")
self.add_metrics(self.extract_value(p, ["segments", "points_memory_in_bytes"]), "segments_points_memory_in_bytes", "byte")
Expand Down
4 changes: 3 additions & 1 deletion esrally/track/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,9 @@
# specific language governing permissions and limitations
# under the License.

from .loader import list_tracks, track_info, load_track, load_track_plugins, track_repo, prepare_track, operation_parameters, set_absolute_data_path
from .loader import (
list_tracks, track_info, load_track, load_track_plugins, track_repo, prepare_track, operation_parameters, set_absolute_data_path
)

# expose the complete track API
from .track import *
1 change: 1 addition & 0 deletions esrally/utils/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -428,6 +428,7 @@ def guess_java_home(major_version=8, fallback=None, runner=_run, read_symlink=_r
m = debian_jdk_pattern.match(j)
if m:
return m.group(1)
# pylint: disable=line-too-long
# Red Hat based distributions
#
# ls -l /etc/alternatives/jre_1.[789].0
Expand Down
12 changes: 6 additions & 6 deletions tests/client_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,8 @@ def test_create_https_connection_verify_server(self, mocked_load_cert_chain):
mock.call("SSL client authentication: off")
])

assert not mocked_load_cert_chain.called, "ssl_context.load_cert_chain should not have been called as we have not supplied client " \
"certs"
assert not mocked_load_cert_chain.called, "ssl_context.load_cert_chain should not have been called as we have not supplied " \
"client certs"

self.assertEqual(hosts, f.hosts)
self.assertTrue(f.ssl_context.check_hostname)
Expand Down Expand Up @@ -142,8 +142,8 @@ def test_create_https_connection_only_verify_self_signed_server_certificate(self
mock.call("SSL client authentication: off")
])

assert not mocked_load_cert_chain.called, "ssl_context.load_cert_chain should not have been called as we have not supplied client " \
"certs"
assert not mocked_load_cert_chain.called, "ssl_context.load_cert_chain should not have been called as we have not supplied " \
"client certs"
self.assertEqual(hosts, f.hosts)
self.assertTrue(f.ssl_context.check_hostname)
self.assertEqual(ssl.CERT_REQUIRED, f.ssl_context.verify_mode)
Expand Down Expand Up @@ -217,8 +217,8 @@ def test_create_https_connection_unverified_certificate(self, mocked_load_cert_c
mock.call("SSL client authentication: off")
])

assert not mocked_load_cert_chain.called, "ssl_context.load_cert_chain should not have been called as we have not supplied client " \
"certs"
assert not mocked_load_cert_chain.called, "ssl_context.load_cert_chain should not have been called as we have not supplied " \
"client certs"

self.assertEqual(hosts, f.hosts)
self.assertFalse(f.ssl_context.check_hostname)
Expand Down
6 changes: 3 additions & 3 deletions tests/driver/runner_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,7 +328,7 @@ def test_bulk_index_error_no_shards(self, es):
"_type": "doc",
"_id": "1",
"status": 429,
"error": "EsRejectedExecutionException[rejected execution (queue capacity 50) on org.elasticsearch.action.support.replication.TransportShardReplicationOperationAction$PrimaryPhase$1@1]"
"error": "EsRejectedExecutionException[rejected execution (queue capacity 50) on org.elasticsearch.action.support.replication.TransportShardReplicationOperationAction$PrimaryPhase$1@1]" # pylint: disable=line-too-long
}
},
{
Expand All @@ -337,7 +337,7 @@ def test_bulk_index_error_no_shards(self, es):
"_type": "doc",
"_id": "2",
"status": 429,
"error": "EsRejectedExecutionException[rejected execution (queue capacity 50) on org.elasticsearch.action.support.replication.TransportShardReplicationOperationAction$PrimaryPhase$1@2]"
"error": "EsRejectedExecutionException[rejected execution (queue capacity 50) on org.elasticsearch.action.support.replication.TransportShardReplicationOperationAction$PrimaryPhase$1@2]" # pylint: disable=line-too-long
}
},
{
Expand All @@ -346,7 +346,7 @@ def test_bulk_index_error_no_shards(self, es):
"_type": "doc",
"_id": "3",
"status": 429,
"error": "EsRejectedExecutionException[rejected execution (queue capacity 50) on org.elasticsearch.action.support.replication.TransportShardReplicationOperationAction$PrimaryPhase$1@3]"
"error": "EsRejectedExecutionException[rejected execution (queue capacity 50) on org.elasticsearch.action.support.replication.TransportShardReplicationOperationAction$PrimaryPhase$1@3]" # pylint: disable=line-too-long
}
}
]
Expand Down
2 changes: 1 addition & 1 deletion tests/mechanic/launcher_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ def test_env_options_order(self):
self.assertEqual("/java_home/bin" + os.pathsep + os.environ["PATH"], env["PATH"])
self.assertEqual("-XX:+ExitOnOutOfMemoryError -XX:+UnlockDiagnosticVMOptions -XX:+DebugNonSafepoints "
"-XX:+UnlockCommercialFeatures -XX:+FlightRecorder "
"-XX:FlightRecorderOptions=disk=true,maxage=0s,maxsize=0,dumponexit=true,dumponexitpath=/tmp/telemetry/profile.jfr "
"-XX:FlightRecorderOptions=disk=true,maxage=0s,maxsize=0,dumponexit=true,dumponexitpath=/tmp/telemetry/profile.jfr " # pylint: disable=line-too-long
"-XX:StartFlightRecording=defaultrecording=true", env["ES_JAVA_OPTS"])


Expand Down
3 changes: 2 additions & 1 deletion tests/racecontrol_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,8 @@ def test_fails_without_benchmark_only_pipeline_in_docker(self):
self.assertEqual(
"Only the [benchmark-only] pipeline is supported by the Rally Docker image.\n"
"Add --pipeline=benchmark-only in your Rally arguments and try again.\n"
"For more details read the docs for the benchmark-only pipeline in https://esrally.readthedocs.io/en/latest/pipelines.html#benchmark-only\n",
"For more details read the docs for the benchmark-only pipeline in "
"https://esrally.readthedocs.io/en/latest/pipelines.html#benchmark-only\n",
ctx.exception.args[0])
del racecontrol.pipelines[test_pipeline_name]

Expand Down
Loading