-
Notifications
You must be signed in to change notification settings - Fork 32
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* chaos es scripts * adding requirements * numpy * adding index update * adding grafana link * adding grafana link * taking out script graphana link * taking out prints * updating kube burner data soruces * updating index data frame * only try to find workload in intlab if not intlab to begin with * adding env index from variable * adding helper print lines
- Loading branch information
1 parent
0e14df6
commit 44384f5
Showing
8 changed files
with
298 additions
and
12 deletions.
There are no files selected for viewing
Empty file.
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,21 @@ | ||
import update_es_uuid | ||
import json | ||
ids=["XStsmZEBPJqRxZ0XWz26","Wyv-kJEBPJqRxZ0XaD35"] | ||
ids=["zO95D5EBjXIeP7FHSwnd","Nip2D5EBPJqRxZ0XAFt9"] | ||
index="krkn-telemetry" | ||
params={"_id":"XStsmZEBPJqRxZ0XWz26"} | ||
|
||
params={"run_uuid":'fd1984a4-97da-4ce7-9a28-95b7a8cc8cf9'} | ||
#es_search=update_es_uuid.es_search(params,index=index )[0] | ||
|
||
|
||
# update_es_uuid.delete_key(ids[0], index, "cluster_version") | ||
# update_es_uuid.delete_key(ids[1], index, "cluster_version") | ||
# del es_search['_source']["cluster_version"] | ||
# print('es search 1' + str(es_search)) | ||
# update_es_uuid.update_data_to_elasticsearch(es_search['_id'], es_search, index) | ||
|
||
with open("run.json","r+") as f: | ||
es_search = json.loads(f.read()) | ||
|
||
update_es_uuid.upload_data_to_elasticsearch(es_search,index) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,9 @@ | ||
argparse>=1.4.0 | ||
requests>=2.25.1 | ||
pyyaml>=5.4.1 | ||
python-jenkins>=1.7.0 | ||
numpy==1.24.0 | ||
elasticsearch<7.14.0 | ||
coloredlogs>=15.0.1 | ||
utils | ||
urllib3 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,48 @@ | ||
import update_es_uuid | ||
import time | ||
|
||
|
||
index="krkn-telemetry" | ||
|
||
params={'cloud_infrastructure':'AWS'} | ||
|
||
must_not={"field": "cluster_version"} | ||
|
||
#"must_not": [ | ||
# { | ||
# "exists": { | ||
# "field": "cluster_version" | ||
# } | ||
# } | ||
|
||
i = 0 | ||
size = 60 | ||
from_pos = 0 | ||
while i < 20: | ||
print('from pos' + str(from_pos)) | ||
es_search_all=update_es_uuid.es_search(params,must_not=must_not,index=index, size=size,from_pos=from_pos) | ||
#print('search all' + str(es_search_all)) | ||
for es_search in es_search_all: | ||
|
||
if "cluster_version" in es_search["_source"]: | ||
print('continue') | ||
continue | ||
os_version = es_search["_source"]['node_summary_infos'][0]["os_version"] | ||
# "Red Hat Enterprise Linux CoreOS 417.94.202410180656-0" | ||
|
||
#418.94.20240906 2250-0 | ||
numbers = os_version.split(' ')[-1] | ||
if "Plow" in numbers: | ||
numbers = os_version.split(' ')[-2] | ||
numbers = numbers.split(".") | ||
|
||
print('numbers ' + str(os_version | ||
)) | ||
#4.17.0-0.nightly-2024-10-21-185738 | ||
version = numbers[0][0] + '.' + numbers[0][1:] + ".0-0.nightly-" + numbers[2][:4] + "-" + numbers[2][4:6] +"-" + numbers[2][6:8] +"-" + numbers[2][8:12] + numbers[2][13:14] | ||
print('version: ' + str(es_search['_id']) + " " +str(version)) | ||
data_to_update= {'cluster_version': version} | ||
update_es_uuid.update_data_to_elasticsearch(es_search['_id'],data_to_update,index) | ||
time.sleep(2) | ||
i+=1 | ||
from_pos = size * i |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,71 @@ | ||
import os | ||
import time | ||
from elasticsearch import Elasticsearch | ||
import update_es_uuid | ||
|
||
|
||
# elasticsearch constants | ||
ES_URL = 'search-ocp-qe-perf-scale-test-elk-hcm7wtsqpxy7xogbu72bor4uve.us-east-1.es.amazonaws.com' | ||
ES_USERNAME = os.getenv('ES_USERNAME') | ||
ES_PASSWORD = os.getenv('ES_PASSWORD') | ||
|
||
|
||
def update_data_to_elasticsearch(params, index, new_index): | ||
''' updates captured data in RESULTS dictionary to Elasticsearch | ||
''' | ||
|
||
start = time.time() | ||
matched_docs = update_es_uuid.es_search(params, index=index, size=30,from_pos=0) | ||
|
||
print('doc length' + str(len(matched_docs[0]))) | ||
for item in matched_docs: | ||
param_uuid = {"uuid": item['_source']['uuid']} | ||
found_uuid = update_es_uuid.es_search(param_uuid, index=new_index) | ||
print(' uui' + str(item)) | ||
if len(found_uuid) == 0: | ||
print('find uui' + str(found_uuid)) | ||
response = upload_data_to_elasticsearch(item["_source"], new_index) | ||
print(f"Response back was {response}") | ||
#break | ||
update_es_uuid.delete_es_entry(item['_id'], index) | ||
|
||
end = time.time() | ||
elapsed_time = end - start | ||
|
||
# return elapsed time for upload if no issues | ||
return elapsed_time | ||
|
||
def upload_data_to_elasticsearch(item, index): | ||
''' uploads captured data in RESULTS dictionary to Elasticsearch | ||
''' | ||
|
||
# create Elasticsearch object and attempt index | ||
es = Elasticsearch( | ||
[f'https://{ES_USERNAME}:{ES_PASSWORD}@{ES_URL}:443'] | ||
) | ||
|
||
start = time.time() | ||
print(f"Uploading item {item} to index {index} in Elasticsearch") | ||
response = es.index( | ||
index=index, | ||
body=item | ||
) | ||
print(f"Response back was {response}") | ||
|
||
end = time.time() | ||
elapsed_time = end - start | ||
|
||
# return elapsed time for upload if no issues | ||
return elapsed_time | ||
# to_update = {"profile": "IPI-on-AWS.install.yaml"} | ||
# update_data_to_elasticsearch("2l41vYYBRpj_T8Zagru2", to_update) | ||
# update_data_to_elasticsearch("4F41vYYBRpj_T8ZahLvF", to_update) | ||
# update_data_to_elasticsearch("7F41vYYBRpj_T8ZairsN", to_update) | ||
# update_data_to_elasticsearch("5F41vYYBRpj_T8Zahrtk", to_update) | ||
|
||
|
||
params={"workload":'ovn-live-migration'} | ||
new_index="ovn-live-migration" | ||
old_index="ripsaw-kube-burner-000020" | ||
update_data_to_elasticsearch(params, old_index, new_index) | ||
#delete_es_entry("5F41vYYBRpj_T8Zahrtk") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,109 @@ | ||
#!/usr/bin/env python3 | ||
|
||
import os | ||
from es_scripts import update_es_uuid | ||
|
||
|
||
def find_workload_type( current_run_uuid): | ||
search_params = { | ||
"uuid": current_run_uuid | ||
} | ||
|
||
index = os.getenv("es_metadata_index") | ||
|
||
hits = update_es_uuid.es_search(search_params, index=index) | ||
print('hits ' + str(hits)) | ||
if len(hits) <= 0: | ||
#print('else') | ||
workload_type = find_workload_type_sub(current_run_uuid) | ||
print('workload type' + str(workload_type)) | ||
if workload_type == "Unknown" and "intlab" not in os.environ.get("ES_URL"): | ||
|
||
es_metadata_index="ospst-perf-scale-ci*" | ||
if os.getenv("ES_USERNAME_INTERNAL") is not None and os.getenv("ES_PASSWORD_INTERNAL") is not None: | ||
os.environ["ES_USERNAME"] = os.getenv("ES_USERNAME_INTERNAL", None) | ||
os.environ["ES_PASSWORD"] = os.getenv("ES_PASSWORD_INTERNAL", None) | ||
# try finding in internal es | ||
|
||
ES_URL = os.environ["ES_URL"] = "https://opensearch.app.intlab.redhat.com" | ||
|
||
hits = update_es_uuid.es_search_url(search_params, es_url=ES_URL, es_pass=os.getenv("ES_PASSWORD_INTERNAL"), es_user=os.getenv("ES_USERNAME_INTERNAL"),index=es_metadata_index) | ||
print('hits ' + str(hits)) | ||
else: | ||
print("internal username and password not set") | ||
|
||
if len(hits) == 0: | ||
print("No data entry was found for that UUID") | ||
return "Not Found" | ||
return hits[0]['_source'] | ||
|
||
|
||
def find_workload_type_sub( current_run_uuid): | ||
search_params = { | ||
"uuid": current_run_uuid | ||
} | ||
|
||
|
||
if "intlab" in os.environ.get("ES_URL"): | ||
workload_index_map = { "kube-burner":"ospst-ripsaw-kube-burner*" ,"ingress-perf":"ospst-ingress-perf*", "network-perf-v2":"ospst-k8s-netperf*"} | ||
else: | ||
workload_index_map = { "kube-burner":"ripsaw-kube-burner*" ,"ingress-perf":"ingress-perf*", "network-perf-v2":"k8s-netperf*","router-perf":"router-test-results"} | ||
for k, v in workload_index_map.items(): | ||
hits = update_es_uuid.es_search(search_params, index=v) | ||
print('hits extra' + str(hits)) | ||
if len(hits) > 0: | ||
return k | ||
return "Unknown" | ||
|
||
|
||
def get_graphana(): | ||
|
||
baseline_uuid = os.environ.get("BASELINE_UUID") | ||
|
||
uuid = os.environ.get("UUID") | ||
workload_details = find_workload_type( uuid) | ||
if workload_details != "Not Found": | ||
workload = workload_details["benchmark"] | ||
uuid_str = "&var-uuid=" + uuid | ||
baseline_workload_details= [] | ||
if baseline_uuid != "" and baseline_uuid is not None: | ||
for baseline in baseline_uuid.split(","): | ||
uuid_str += "&var-uuid=" + baseline | ||
baseline_workload_details.append(find_workload_type(baseline)) | ||
|
||
|
||
worker_count = f"&var-workerNodesCount={workload_details['workerNodesCount']}" | ||
# data source for public dev es | ||
# might want to be able to loop through multiple baseline uuids if more than one is passed | ||
major_version = "&var-ocpMajorVersion=" + str(workload_details['releaseStream'][:4]) | ||
for baseline_details in baseline_workload_details: | ||
if baseline_details['releaseStream'][:4] not in major_version: | ||
major_version += "&var-ocpMajorVersion=" + str(baseline_details['releaseStream'][:4]) | ||
grafana_url_ending=f"{worker_count}&from=now-1y&to=now&var-platform=AWS&var-platform=Azure&var-platform=GCP&var-platform=IBMCloud&var-platform=AlibabaCloud&var-platform=VSphere&var-platform=rosa&var-clusterType=rosa&var-clusterType=self-managed" | ||
if workload == "ingress-perf": | ||
if "intlab" in os.environ.get("ES_URL"): | ||
data_source = "be0f4aff-4122-43cf-95dd-fd51c012a208" | ||
else: | ||
data_source = "beefdfd9-800e-430c-afef-383032aa2d1f" | ||
|
||
grafana_url_ending += f"&var-infraNodesType={workload_details['infraNodesType']}" | ||
print(f"grafana url https://grafana.rdu2.scalelab.redhat.com:3000/d/d6105ff8-bc26-4d64-951e-56da771b703d/ingress-perf?orgId=1&var-datasource=beefdfd9-800e-430c-afef-383032aa2d1f&var-Datasource={data_source}{uuid_str}=&var-termination=edge&var-termination=http&var-termination=passthrough&var-termination=reencrypt&var-latency_metric=avg_lat_us&var-compare_by=uuid.keyword{major_version}{grafana_url_ending}") | ||
print(f"grafana report mode link: https://grafana.rdu2.scalelab.redhat.com:3000/d/df906760-b4c0-44cc-9ecb-586cf39f9bab/ingress-perf-v2-report-mode?orgId=1&var-datasource={data_source}{uuid_str}&var-ocpMajorVersion=All&var-uuid=&var-termination=All&var-latency_metric=avg_lat_us&var-compare_by=ocpMajorVersion.keyword&var-all_uuids=All{grafana_url_ending}") | ||
elif workload == "k8s-netperf" or workload == "network-perf-v2": | ||
|
||
if "intlab" in os.environ.get("ES_URL"): | ||
data_source = 'abc72863-3b49-47d5-98d1-357a9559afea' | ||
else: | ||
data_source = "rKPTw9UVz" | ||
print(f"grafana url https://grafana.rdu2.scalelab.redhat.com:3000/d/wINGhybVz/k8s-netperf?orgId=1&var-datasource={data_source}{uuid_str}&var-termination=edge&var-termination=http&var-termination=passthrough&var-termination=reencrypt&var-latency_metric=avg_lat_us&var-compare_by=uuid.keyword&var-workerNodesCount=9&from=now-1y&to=now&var-platform=All&var-clusterType=rosa&var-clusterType=self-managed&var-workerNodesType=All&var-hostNetwork=All&var-service=All&var-parallelism=All&var-throughput_profile=All&var-latency_profile=All&var-messageSize=All&var-driver=netperf") | ||
else: | ||
if "intlab" in os.environ.get("ES_URL"): | ||
data_source = "ab3f14e6-a50f-4d52-93fa-a5076794f864" | ||
else: | ||
data_source = "QzcDu7T4z" | ||
print( f"grafana url https://grafana.rdu2.scalelab.redhat.com:3000/d/g4dJlkBnz3/kube-burner-compare?orgId=1&var-Datasource={data_source}&var-sdn=OVNKubernetes&var-workload={workload}&var-latencyPercentile=P99&var-condition=Ready&var-component=kube-apiserver{uuid_str}{grafana_url_ending}") | ||
|
||
print(f"grafana report mode link: https://grafana.rdu2.scalelab.redhat.com:3000/d/D5E8c5XVz/kube-burner-report-mode?orgId=1&var-Datasource={data_source}&var-sdn=OVNKubernetes&var-clusterType=rosa&var-clusterType=self-managed&var-job={workload}{major_version}&var-compare_by=metadata.ocpMajorVersion&var-component=kube-apiserver&var-component=kube-controller-manager&var-node_roles=masters&var-node_roles=workers&var-node_roles=infra&to=now{uuid_str}{grafana_url_ending}") | ||
|
||
# | ||
get_graphana() |